diff --git a/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-03e95750-d1d7-4aba-ba4c-b80d732967351767624280770-2026_01_05-15.44.46.878/source.csv b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-03e95750-d1d7-4aba-ba4c-b80d732967351767624280770-2026_01_05-15.44.46.878/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..675a7f4fcac5e56bf526f9aa99d4a0a60a56fc11 --- /dev/null +++ b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-03e95750-d1d7-4aba-ba4c-b80d732967351767624280770-2026_01_05-15.44.46.878/source.csv @@ -0,0 +1,777 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,2,"src/preview/types.ts",0,0,"import * as vscode from 'vscode';\n\n/**\n * Action types that can be previewed and executed.\n */\nexport type Action =\n | { kind: 'showTextDocument' }\n | { kind: 'setSelections'; selections: Array<{ start: [number, number]; end: [number, number] }> }\n | { kind: 'editInsert'; position: [number, number]; text: string }\n | { kind: 'editDelete'; range: { start: [number, number]; end: [number, number] } }\n | { kind: 'editReplace'; range: { start: [number, number]; end: [number, number] }; text: string }\n | { kind: 'terminalShow' }\n | { kind: 'terminalSendText'; text: string }\n | { kind: 'openFile'; filePath: string; selections?: Array<{ start: [number, number]; end: [number, number] }> };\n\n/**\n * Convert action range to VS Code Range.\n */\nexport function toVscodeRange(range: { start: [number, number]; end: [number, number] }): vscode.Range {\n return new vscode.Range(\n new vscode.Position(range.start[0], range.start[1]),\n new vscode.Position(range.end[0], range.end[1])\n );\n}\n\n/**\n * Convert action position to VS Code Position.\n */\nexport function toVscodePosition(position: [number, number]): vscode.Position {\n return new vscode.Position(position[0], position[1]);\n}\n\n/**\n * Truncate text to a maximum length with ellipsis.\n */\nexport function truncate(text: string, maxLength: number): string {\n const oneLine = text.replace(/\r?\n/g, '↵');\n if (oneLine.length <= maxLength) {\n return oneLine;\n }\n return oneLine.slice(0, maxLength - 1) + '…';\n}\n\n\n",typescript,tab +2,330,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"3:44:46 PM [info] Activating crowd-code\n3:44:46 PM [info] Recording started\n3:44:46 PM [info] Initializing git provider using file system watchers...\n3:44:46 PM [info] Git repository found\n3:44:46 PM [info] Git provider initialized successfully\n3:44:46 PM [info] Initial git state: [object Object]\n",Log,tab +3,1315,"src/preview/types.ts",0,0,"",typescript,tab +4,43405,"src/preview/types.ts",0,0,"",typescript,tab +5,43704,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab +6,45202,"TERMINAL",0,0,"",,terminal_focus +7,45202,"src/preview/types.ts",0,0,"",typescript,tab +8,47014,"TERMINAL",0,0,"squeue",,terminal_command +9,47015,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 39369 nick.lecht standard 1 128 R 2026-01-05T15:25:33 2026-01-05T15:25:34 19:59 12:00:00 hai007\r\n 39368 nick.lecht standard 1 128 R 2026-01-05T15:24:15 2026-01-05T15:24:15 21:18 12:00:00 hai006\r\n 39367 nick.lecht standard 1 128 R 2026-01-05T15:22:56 2026-01-05T15:22:56 22:37 12:00:00 hai002\r\n 39351 nishant.ku standard 3 624 R 2026-01-05T10:44:33 2026-01-05T10:44:33 5:01:00 1-00:00:00 hai[003-005]\r\n]0;franz.srambical@hai-login2:~/crowd-pilot-extension",,terminal_output +10,52541,"TERMINAL",0,0,"salloc --gpus-per-node=8 --nodes=1 --ntasks-per-node=1 --cpus-per-task=10 --mem=100G --qos=low",,terminal_command +11,52590,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 39370\r\n",,terminal_output +12,52695,"TERMINAL",0,0,"salloc: Nodes hai001 are ready for job\r\n",,terminal_output +13,52969,"TERMINAL",0,0,"Running inside SLURM, Job ID 39370.\r\n",,terminal_output +14,53062,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/crowd-pilot-extension[?2004h[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ ",,terminal_output +15,55332,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +16,55443,"TERMINAL",0,0,"b': python /home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/preference_utils.py --mode threshold --path ../crowd-pilot-serializer/.crowd-pilot-preferences.jsonl --target 0.25 ",,terminal_output +17,55701,"TERMINAL",0,0,"\rfailed reverse-i-search)`bh': python /home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/preference_utils.py --mode threshold --path ../crowd-pilot-serializer/.crowd-pilot-preferences.jsonl --target 0.25",,terminal_output +18,55876,"TERMINAL",0,0,"\r ': python /home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/preference_utils.py --mode threshold --path ../crowd-pilot-serializer/.crowd-pilot-preferences.jsonl --tar[1@g",,terminal_output +19,66146,"TERMINAL",0,0,"\r': python /home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/preference_utils.py --mode threshold --path ../crowd-pilot-serializer/.crowd-pilot-preferences.jsonl --targ",,terminal_output +20,67226,"TERMINAL",0,0,"\r[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ python /home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/preference_utils.py --mode threshold --path ../crowd-pilot-serializer/.c[35@rowd-pilot-preferences.jsonl --targ\r\n\r",,terminal_output +21,67462,"TERMINAL",0,0,"\r\n\r",,terminal_output +22,68162,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +23,68651,"TERMINAL",0,0,"b': python /home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/preference_utils.py --mode threshold --path .crowd-pilot-preferences.jsonl --target 0.25\r",,terminal_output +24,68970,"TERMINAL",0,0,"\ra': bash /home/franz.srambical/slurm/dev/franz/berlin/crowd-pilot/crowd_pilot_serializer/spawner.sh\r[1@s': bas[1@h': bash",,terminal_output +25,70689,"TERMINAL",0,0,"erialize_128k_glm.sh\r",,terminal_output +26,71378,"TERMINAL",0,0,"qwen.sh\r",,terminal_output +27,71692,"TERMINAL",0,0,"",,terminal_output +28,72111,"TERMINAL",0,0,"/home/franz.srambical/slurm/dev/franz/berlin/crowd-pilot/crowd_pilot_serializer/serialize_32k_glm.sh\r",,terminal_output +29,72674,"TERMINAL",0,0,"qwen.sh\r",,terminal_output +30,72825,"TERMINAL",0,0,"16k_qwen.sh\r",,terminal_output +31,73122,"TERMINAL",0,0,"glm.sh\r",,terminal_output +32,73454,"TERMINAL",0,0,"qwen.sh\r",,terminal_output +33,73561,"TERMINAL",0,0,"8k_glm.sh \r",,terminal_output +34,73692,"TERMINAL",0,0,"[1@qwen\r",,terminal_output +35,74078,"TERMINAL",0,0,"4k_qwen.sh\rmiles/scripts/run-sft-torchrun.sh\r",,terminal_output +36,74222,"TERMINAL",0,0,"slurm/dev/franz/berlin/crowd-pilot/miles/test_lora_sglang.sh\r",,terminal_output +37,75027,"TERMINAL",0,0,"srun bash /home/franz.srambical/miles/scripts/run-sft-torchrun.sh\r",,terminal_output +38,75602,"TERMINAL",0,0,"\r[40@[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ srun bash",,terminal_output +39,76022,"TERMINAL",0,0,"\r(reverse-i-search)`': bash /home/franz.srambical/miles/scripts/run-sft-torchrun.sh\r",,terminal_output +40,76625,"TERMINAL",0,0,"\r[44@[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $",,terminal_output +41,76896,"TERMINAL",0,0,"",,terminal_output +42,84506,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/tab_model/logs/franz/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/tab_model/logs/franz/%x_%j.log\n#SBATCH --job-name=crowd_pilot_sglang\n#SBATCH --mem=400GB\n#SBATCH --qos=normal\n\nexport HF_HOME=/fast/project/HFMI_SynergyUnit/tab_model/franz/hf_home/\n\nsource /home/franz.srambical/crowd-pilot-serializer-legacy/.venv/bin/activate\nmodule load CUDA/12.8\n\nmodel_path=""zai-org/GLM-4.5-Air""\npython3 -m sglang.launch_server --model-path $model_path --host 0.0.0.0 --log-requests \\n --tp-size 8 \\n --tool-call-parser glm45 \\n --reasoning-parser glm45 \\n --speculative-algorithm EAGLE \\n --speculative-num-steps 3 \\n --speculative-eagle-topk 1 \\n --speculative-num-draft-tokens 4 \\n --mem-fraction-static 0.9",shellscript,tab +43,86440,"TERMINAL",0,0,"b",,terminal_output +44,86678,"TERMINAL",0,0,"ash",,terminal_output +45,86856,"TERMINAL",0,0," ",,terminal_output +46,87030,"TERMINAL",0,0,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",,terminal_output +47,89253,"TERMINAL",0,0,"\r/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh\r\n[?2004l\r",,terminal_output +48,94825,"TERMINAL",0,0,"^CTraceback (most recent call last):\r\n File """", line 189, in _run_module_as_main\r\n File """", line 112, in _get_module_details\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/sglang/__init__.py"", line 5, in \r\n from sglang.lang.api import (\r\n ...<22 lines>...\r\n )\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/sglang/lang/api.py"", line 7, in \r\n from sglang.lang.backend.base_backend import BaseBackend\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/sglang/lang/backend/base_backend.py"", line 5, in \r\n from sglang.lang.interpreter import StreamExecutor\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/sglang/lang/interpreter.py"", line 35, in \r\n from sglang.utils import (\r\n ...<3 lines>...\r\n )\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/sglang/utils.py"", line 26, in \r\n from IPython.display import HTML, display\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/IPython/__init__.py"", line 57, in \r\n from .terminal.embed import embed\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/IPython/terminal/embed.py"", line 16, in \r\n from IPython.terminal.interactiveshell import TerminalInteractiveShell\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/IPython/terminal/interactiveshell.py"", line 49, in \r\n from .debugger import TerminalPdb, Pdb\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/IPython/terminal/debugger.py"", line 6, in \r\n from IPython.core.completer import IPCompleter\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/IPython/core/completer.py"", line 258, in \r\n import jedi\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/jedi/__init__.py"", line 32, in \r\n from jedi.api import Script, Interpreter, set_debug_function, preload_module\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/jedi/api/__init__.py"", line 21, in \r\n from jedi.api import classes\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/jedi/api/classes.py"", line 24, in \r\n from jedi.inference.utils import unite\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/jedi/inference/__init__.py"", line 78, in \r\n from jedi.inference.syntax_tree import infer_expr_stmt, \\r\n check_tuple_assignments, tree_name_to_values\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/jedi/inference/syntax_tree.py"", line 28, in \r\n from jedi.inference.gradual import annotation\r\n File """", line 1360, in _find_and_load\r\n File """", line 1331, in _find_and_load_unlocked\r\n File """", line 935, in _load_unlocked\r\n File """", line 1022, in exec_module\r\n File """", line 1118, in get_code\r\n File """", line 1218, in get_data\r\nKeyboardInterrupt\r\n^C\r\n]0;franz.srambical@hai-login2:~/crowd-pilot-extension[?2004h[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ ",,terminal_output +49,95122,"TERMINAL",0,0,"bash /home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",,terminal_output +50,95697,"TERMINAL",0,0,"[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ bash /home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",,terminal_output +51,96992,"TERMINAL",0,0,"\r",,terminal_output +52,97154,"TERMINAL",0,0,"e",,terminal_output +53,97689,"TERMINAL",0,0,"xi",,terminal_output +54,97934,"TERMINAL",0,0,"t",,terminal_output +55,98023,"TERMINAL",0,0,"\r\n[?2004l\rexit\r\nsrun: error: hai001: task 0: Exited with exit code 130\r\nsalloc: Relinquishing job allocation 39370\r\n]0;franz.srambical@hai-login2:~/crowd-pilot-extension",,terminal_output +56,101081,"TERMINAL",0,0,"salloc --gpus-per-node=8 --nodes=1 --ntasks-per-node=1 --cpus-per-task=10 --mem=100G --qos=normal",,terminal_command +57,101123,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 39371\r\n",,terminal_output +58,101219,"TERMINAL",0,0,"salloc: Nodes hai001 are ready for job\r\n",,terminal_output +59,101499,"TERMINAL",0,0,"Running inside SLURM, Job ID 39371.\r\n",,terminal_output +60,101650,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/crowd-pilot-extension[?2004h[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ ",,terminal_output +61,103980,"TERMINAL",0,0,"e",,terminal_output +62,104252,"TERMINAL",0,0,"xi",,terminal_output +63,104355,"TERMINAL",0,0,"t",,terminal_output +64,104477,"TERMINAL",0,0,"\r\n[?2004l\rexit\r\nsalloc: Relinquishing job allocation 39371\r\n]0;franz.srambical@hai-login2:~/crowd-pilot-extension",,terminal_output +65,108739,"TERMINAL",0,0,"salloc --gpus-per-node=4 --nodes=1 --ntasks-per-node=1 --cpus-per-task=10 --mem=100G --qos=normal",,terminal_command +66,108781,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 39372\r\n",,terminal_output +67,108881,"TERMINAL",0,0,"salloc: Nodes hai001 are ready for job\r\n",,terminal_output +68,109142,"TERMINAL",0,0,"Running inside SLURM, Job ID 39372.\r\n",,terminal_output +69,109221,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/crowd-pilot-extension[?2004h[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ ",,terminal_output +70,111116,"TERMINAL",0,0,"bb",,terminal_output +71,111544,"TERMINAL",0,0,"",,terminal_output +72,111805,"TERMINAL",0,0,"a",,terminal_output +73,111849,"TERMINAL",0,0,"sh",,terminal_output +74,112051,"TERMINAL",0,0," ",,terminal_output +75,112248,"TERMINAL",0,0,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",,terminal_output +76,112769,"TERMINAL",0,0,"\r/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh\r\n[?2004l\r",,terminal_output +77,129835,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",866,0,"",shellscript,selection_command +78,129921,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",835,0,"",shellscript,selection_command +79,130014,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",866,0,"",shellscript,selection_command +80,134030,"TERMINAL",0,0,"",,terminal_focus +81,135838,"TERMINAL",0,0,"squeue",,terminal_command +82,135840,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 39372 franz.sram interacti 1 20 R 2026-01-05T15:46:35 2026-01-05T15:46:35 0:27 1-00:00:00 hai001\r\n 39369 nick.lecht standard 1 128 R 2026-01-05T15:25:33 2026-01-05T15:25:34 21:28 12:00:00 hai007\r\n 39368 nick.lecht standard 1 128 R 2026-01-05T15:24:15 2026-01-05T15:24:15 22:47 12:00:00 hai006\r\n 39367 nick.lecht standard 1 128 R 2026-01-05T15:22:56 2026-01-05T15:22:56 24:06 12:00:00 hai002\r\n 39351 nishant.ku standard 3 624 R 2026-01-05T10:44:33 2026-01-05T10:44:33 5:02:29 1-00:00:00 hai[003-005]\r\n]0;franz.srambical@hai-login2:~/crowd-pilot-extension",,terminal_output +83,138871,"TERMINAL",0,0,"[2026-01-05 15:47:05] WARNING server_args.py:711: The tool_call_parser 'glm45' is deprecated. Please use 'glm' instead.\r\n",,terminal_output +84,146558,"TERMINAL",0,0,"[2026-01-05 15:47:13] WARNING server_args.py:1406: Attention backend not explicitly specified. Use fa3 backend by default.\r\n[2026-01-05 15:47:13] WARNING server_args.py:1741: Max running requests is reset to 48 for speculative decoding. You can override this by explicitly setting --max-running-requests.\r\n[2026-01-05 15:47:13] WARNING server_args.py:1756: Overlap scheduler is disabled because of using eagle3 or standalone speculative decoding.\r\n",,terminal_output +85,148524,"TERMINAL",0,0,"[2026-01-05 15:47:15] server_args=ServerArgs(model_path='zai-org/GLM-4.5-Air', tokenizer_path='zai-org/GLM-4.5-Air', tokenizer_mode='auto', tokenizer_worker_num=1, skip_tokenizer_init=False, load_format='auto', model_loader_extra_config='{}', rl_quant_profile=None, trust_remote_code=False, context_length=None, is_embedding=False, enable_multimodal=None, revision=None, model_impl='auto', host='0.0.0.0', port=30000, fastapi_root_path='', grpc_mode=False, skip_server_warmup=False, warmups=None, nccl_port=None, checkpoint_engine_wait_weights_before_ready=False, dtype='auto', quantization=None, quantization_param_path=None, kv_cache_dtype='auto', enable_fp32_lm_head=False, modelopt_quant=None, modelopt_checkpoint_restore_path=None, modelopt_checkpoint_save_path=None, modelopt_export_path=None, quantize_and_serve=False, mem_fraction_static=0.9, max_running_requests=48, max_queued_requests=None, max_total_tokens=None, chunked_prefill_size=8192, max_prefill_tokens=16384, schedule_policy='fcfs', enable_priority_scheduling=False, abort_on_priority_when_disabled=False, schedule_low_priority_values_first=False, priority_scheduling_preemption_threshold=10, schedule_conservativeness=1.0, page_size=1, hybrid_kvcache_ratio=None, swa_full_tokens_ratio=0.8, disable_hybrid_swa_memory=False, radix_eviction_policy='lru', device='cuda', tp_size=8, pp_size=1, pp_max_micro_batch_size=None, stream_interval=1, stream_output=False, random_seed=945637946, constrained_json_whitespace_pattern=None, constrained_json_disable_any_whitespace=False, watchdog_timeout=300, dist_timeout=None, download_dir=None, base_gpu_id=0, gpu_id_step=1, sleep_on_idle=False, mm_process_config={}, log_level='info', log_level_http=None, log_requests=True, log_requests_level=2, crash_dump_folder=None, show_time_cost=False, enable_metrics=False, enable_metrics_for_all_schedulers=False, tokenizer_metrics_custom_labels_header='x-custom-labels', tokenizer_metrics_allowed_custom_labels=None, bucket_time_to_first_token=None, bucket_inter_token_latency=None, bucket_e2e_request_latency=None, collect_tokens_histogram=False, prompt_tokens_buckets=None, generation_tokens_buckets=None, gc_warning_threshold_secs=0.0, decode_log_interval=40, enable_request_time_stats_logging=False, kv_events_config=None, enable_trace=False, otlp_traces_endpoint='localhost:4317', export_metrics_to_file=False, export_metrics_to_file_dir=None, api_key=None, served_model_name='zai-org/GLM-4.5-Air', weight_version='default', chat_template=None, completion_template=None, file_storage_path='sglang_storage', enable_cache_report=False, reasoning_parser='glm45', tool_call_parser='glm', tool_server=None, sampling_defaults='model', dp_size=1, load_balance_method='round_robin', load_watch_interval=0.1, prefill_round_robin_balance=False, dist_init_addr=None, nnodes=1, node_rank=0, json_model_override_args='{}', preferred_sampling_params=None, enable_lora=None, max_lora_rank=None, lora_target_modules=None, lora_paths=None, max_loaded_loras=None, max_loras_per_batch=8, lora_eviction_policy='lru', lora_backend='csgmv', max_lora_chunk_size=16, attention_backend='fa3', decode_attention_backend=None, prefill_attention_backend=None, sampling_backend='flashinfer', grammar_backend='xgrammar', mm_attention_backend=None, fp8_gemm_runner_backend='auto', nsa_prefill_backend='flashmla_sparse', nsa_decode_backend='fa3', enable_flashinfer_autotune=False, speculative_algorithm='EAGLE', speculative_draft_model_path='zai-org/GLM-4.5-Air', speculative_draft_model_revision=None, speculative_draft_load_format=None, speculative_num_steps=3, speculative_eagle_topk=1, speculative_num_draft_tokens=4, speculative_accept_threshold_single=1.0, speculative_accept_threshold_acc=1.0, speculative_token_map=None, speculative_attention_mode='prefill', speculative_moe_runner_backend=None, speculative_moe_a2a_backend=None, speculative_ngram_min_match_window_size=1, speculative_ngram_max_match_window_size=12, speculative_ngram_min_bfs_breadth=1, speculative_ngram_max_bfs_breadth=10, speculative_ngram_match_type='BFS', speculative_ngram_branch_length=18, speculative_ngram_capacity=10000000, ep_size=1, moe_a2a_backend='none', moe_runner_backend='auto', flashinfer_mxfp4_moe_precision='default', enable_flashinfer_allreduce_fusion=False, deepep_mode='auto', ep_num_redundant_experts=0, ep_dispatch_algorithm=None, init_expert_location='trivial', enable_eplb=False, eplb_algorithm='auto', eplb_rebalance_num_iterations=1000, eplb_rebalance_layers_per_chunk=None, eplb_min_rebalancing_utilization_threshold=1.0, expert_distribution_recorder_mode=None, expert_distribution_recorder_buffer_size=1000, enable_expert_distribution_metrics=False, deepep_config=None, moe_dense_tp_size=None, elastic_ep_backend=None, mooncake_ib_device=None, max_mamba_cache_size=None, mamba_ssm_dtype='float32', mamba_full_memory_ratio=0.9, enable_hierarchical_cache=False, hicache_ratio=2.0, hicache_size=0, hicache_write_policy='write_through', hicache_io_backend='kernel', hicache_mem_layout='layer_first', hicache_storage_backend=None, hicache_storage_prefetch_policy='best_effort', hicache_storage_backend_extra_config=None, enable_lmcache=False, kt_weight_path=None, kt_method='AMXINT4', kt_cpuinfer=None, kt_threadpool_count=2, kt_num_gpu_experts=None, kt_max_deferred_experts_per_token=None, dllm_algorithm=None, dllm_algorithm_config=None, enable_double_sparsity=False, ds_channel_config_path=None, ds_heavy_channel_num=32, ds_heavy_token_num=256, ds_heavy_channel_type='qk', ds_sparse_decode_threshold=4096, cpu_offload_gb=0, offload_group_size=-1, offload_num_in_group=1, offload_prefetch_step=1, offload_mode='cpu', multi_item_scoring_delimiter=None, disable_radix_cache=False, cuda_graph_max_bs=512, cuda_graph_bs=[1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 40, 44, 48, 52, 56, 60, 64, 72, 80, 88, 96, 104, 112, 120, 128, 136, 144, 152, 160, 168, 176, 184, 192, 200, 208, 216, 224, 232, 240, 248, 256, 272, 288, 304, 320, 336, 352, 368, 384, 400, 416, 432, 448, 464, 480, 496, 512], disable_cuda_graph=False, disable_cuda_graph_padding=False, enable_profile_cuda_graph=False, enable_cudagraph_gc=False, enable_layerwise_nvtx_marker=False, enable_nccl_nvls=False, enable_symm_mem=False, disable_flashinfer_cutlass_moe_fp4_allgather=False, enable_tokenizer_batch_encode=False, disable_tokenizer_batch_decode=False, disable_outlines_disk_cache=False, disable_custom_all_reduce=False, enable_mscclpp=False, enable_torch_symm_mem=False, disable_overlap_schedule=True, enable_mixed_chunk=False, enable_dp_attention=False, enable_dp_lm_head=False, enable_two_batch_overlap=False, enable_single_batch_overlap=False, tbo_token_distribution_threshold=0.48, enable_torch_compile=False, enable_piecewise_cuda_graph=False, enable_torch_compile_debug_mode=False, torch_compile_max_bs=32, piecewise_cuda_graph_max_tokens=4096, piecewise_cuda_graph_tokens=[4, 8, 12, 16, 20, 24, 28, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240, 256, 288, 320, 352, 384, 416, 448, 480, 512, 640, 768, 896, 1024, 1152, 1280, 1408, 1536, 1664, 1792, 1920, 2048, 2176, 2304, 2432, 2560, 2688, 2816, 2944, 3072, 3200, 3328, 3456, 3584, 3712, 3840, 3968, 4096], piecewise_cuda_graph_compiler='eager', torchao_config='', enable_nan_detection=False, enable_p2p_check=False, triton_attention_reduce_in_fp32=False, triton_attention_num_kv_splits=8, triton_attention_split_tile_size=None, num_continuous_decode_steps=1, delete_ckpt_after_loading=False, enable_memory_saver=False, enable_weights_cpu_backup=False, enable_draft_weights_cpu_backup=False, allow_auto_truncate=False, enable_custom_logit_processor=False, flashinfer_mla_disable_ragged=False, disable_shared_experts_fusion=False, disable_chunked_prefix_cache=False, disable_fast_image_processor=False, keep_mm_feature_on_device=False, enable_return_hidden_states=False, scheduler_recv_interval=1, numa_node=None, enable_deterministic_inference=False, rl_on_policy_target=None, enable_attn_tp_input_scattered=False, enable_nsa_prefill_context_parallel=False, enable_fused_qk_norm_rope=False, enable_dynamic_batch_tokenizer=False, dynamic_batch_tokenizer_batch_size=32, dynamic_batch_tokenizer_batch_timeout=0.002, debug_tensor_dump_output_folder=None, debug_tensor_dump_layers=None, debug_tensor_dump_input_file=None, debug_tensor_dump_inject=False, disaggregation_mode='null', disaggregation_transfer_backend='mooncake', disaggregation_bootstrap_port=8998, disaggregation_decode_tp=None, disaggregation_decode_dp=None, disaggregation_prefill_pp=1, disaggregation_ib_device=None, disaggregation_decode_enable_offload_kvcache=False, num_reserved_decode_tokens=512, disaggregation_decode_polling_interval=1, custom_weight_loader=[], weight_loader_disable_mmap=False, remote_instance_weight_loader_seed_instance_ip=None, remote_instance_weight_loader_seed_instance_service_port=None, remote_instance_weight_loader_send_weights_group_ports=None, enable_pdmux=False, pdmux_config_path=None, sm_group_num=8, mm_max_concurrent_calls=32, mm_per_request_timeout=10.0, enable_broadcast_mm_inputs_process=False, decrypted_config_file=None, decrypted_draft_config_file=None, mm_enable_dp_encoder=False, forward_hooks=None)\r\n",,terminal_output +86,149412,"TERMINAL",0,0,"[2026-01-05 15:47:16] Using default HuggingFace chat template with detected content format: openai\r\n",,terminal_output +87,165402,"TERMINAL",0,0,"srun",,terminal_focus +88,169609,"TERMINAL",0,0,"[2026-01-05 15:47:36 TP1] Init torch distributed begin.\r\n",,terminal_output +89,170637,"TERMINAL",0,0,"[2026-01-05 15:47:37 TP5] Init torch distributed begin.\r\n",,terminal_output +90,170718,"TERMINAL",0,0,"[2026-01-05 15:47:37 TP5] Context: self.device='cuda' self.gpu_id=5 os.environ.get('CUDA_VISIBLE_DEVICES')='0,1,2,3' self.tp_rank=5 self.tp_size=8\r\n[2026-01-05 15:47:37 TP5] Scheduler hit an exception: Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/sglang/srt/managers/scheduler.py"", line 2680, in run_scheduler_process\r\n scheduler = Scheduler(\r\n server_args,\r\n ...<5 lines>...\r\n dp_rank,\r\n )\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/sglang/srt/managers/scheduler.py"", line 320, in __init__\r\n self.tp_worker = TpModelWorker(\r\n ~~~~~~~~~~~~~^\r\n server_args=server_args,\r\n ^^^^^^^^^^^^^^^^^^^^^^^^\r\n ...<5 lines>...\r\n nccl_port=port_args.nccl_port,\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n )\r\n ^\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/sglang/srt/managers/tp_worker.py"", line 248, in __init__\r\n self._model_runner = ModelRunner(\r\n ~~~~~~~~~~~^\r\n model_config=self.model_config,\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n ...<13 lines>...\r\n token_to_kv_pool_allocator=token_to_kv_pool_allocator,\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n )\r\n ^\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/sglang/srt/model_executor/model_runner.py"", line 342, in __init__\r\n min_per_gpu_memory = self.init_torch_distributed()\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/sglang/srt/model_executor/model_runner.py"", line 616, in init_torch_distributed\r\n torch.get_device_module(self.device).set_device(self.gpu_id)\r\n ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^\r\n File ""/fast/home/franz.srambical/crowd-pilot-serializer-legacy/.venv/lib/python3.13/site-packages/torch/cuda/__init__.py"", line 567, in set_device\r\n torch._C._cuda_setDevice(device)\r\n ~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^\r\ntorch.AcceleratorError: CUDA error: invalid device ordinal\r\nGPU device may be out of range, do you have enough GPUs?\r\nCUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect.\r\nFor debugging consider passing CUDA_LAUNCH_BLOCKING=1\r\nCompile with `TORCH_USE_CUDA_DSA` to enable device-side assertions.\r\n\r\n\r\n[2026-01-05 15:47:37] Received sigquit from a child process. It usually means the child failed.\r\n",,terminal_output +91,171462,"TERMINAL",0,0,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh: line 28: 1335034 Killed python3 -m sglang.launch_server --model-path $model_path --host 0.0.0.0 --log-requests --tp-size 8 --tool-call-parser glm45 --reasoning-parser glm45 --speculative-algorithm EAGLE --speculative-num-steps 3 --speculative-eagle-topk 1 --speculative-num-draft-tokens 4 --mem-fraction-static 0.9\r\n]0;franz.srambical@hai-login2:~/crowd-pilot-extension[?2004h[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ ",,terminal_output +92,239931,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",835,0,"",shellscript,selection_command +93,240017,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",805,0,"",shellscript,selection_command +94,240183,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",771,0,"",shellscript,selection_command +95,240286,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",742,0,"",shellscript,selection_command +96,240416,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",712,0,"",shellscript,selection_command +97,240583,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",684,0,"",shellscript,selection_command +98,241068,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",682,0,"",shellscript,selection_command +99,241684,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",682,1,"4",shellscript,content +100,243017,"TERMINAL",0,0,"bash /home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",,terminal_output +101,243189,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +102,250329,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",0,0,"",shellscript,selection_command +103,253553,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",20,0,"",shellscript,selection_command +104,253676,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",21,0,"",shellscript,selection_command +105,253805,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",39,0,"",shellscript,selection_command +106,253919,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",67,0,"",shellscript,selection_command +107,254323,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",89,0,"",shellscript,selection_command +108,254635,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",65,0,"",shellscript,selection_command +109,254732,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",37,0,"",shellscript,selection_command +110,254951,"TERMINAL",0,0,"[2026-01-05 15:49:01] WARNING server_args.py:711: The tool_call_parser 'glm45' is deprecated. Please use 'glm' instead.\r\n",,terminal_output +111,255386,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",65,0,"",shellscript,selection_command +112,255543,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",89,0,"",shellscript,selection_command +113,255688,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",115,0,"",shellscript,selection_command +114,255848,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",136,0,"",shellscript,selection_command +115,257524,"TERMINAL",0,0,"[2026-01-05 15:49:04] WARNING server_args.py:1406: Attention backend not explicitly specified. Use fa3 backend by default.\r\n[2026-01-05 15:49:04] WARNING server_args.py:1741: Max running requests is reset to 48 for speculative decoding. You can override this by explicitly setting --max-running-requests.\r\n[2026-01-05 15:49:04] WARNING server_args.py:1756: Overlap scheduler is disabled because of using eagle3 or standalone speculative decoding.\r\n",,terminal_output +116,258084,"TERMINAL",0,0,"[2026-01-05 15:49:04] server_args=ServerArgs(model_path='zai-org/GLM-4.5-Air', tokenizer_path='zai-org/GLM-4.5-Air', tokenizer_mode='auto', tokenizer_worker_num=1, skip_tokenizer_init=False, load_format='auto', model_loader_extra_config='{}', rl_quant_profile=None, trust_remote_code=False, context_length=None, is_embedding=False, enable_multimodal=None, revision=None, model_impl='auto', host='0.0.0.0', port=30000, fastapi_root_path='', grpc_mode=False, skip_server_warmup=False, warmups=None, nccl_port=None, checkpoint_engine_wait_weights_before_ready=False, dtype='auto', quantization=None, quantization_param_path=None, kv_cache_dtype='auto', enable_fp32_lm_head=False, modelopt_quant=None, modelopt_checkpoint_restore_path=None, modelopt_checkpoint_save_path=None, modelopt_export_path=None, quantize_and_serve=False, mem_fraction_static=0.9, max_running_requests=48, max_queued_requests=None, max_total_tokens=None, chunked_prefill_size=8192, max_prefill_tokens=16384, schedule_policy='fcfs', enable_priority_scheduling=False, abort_on_priority_when_disabled=False, schedule_low_priority_values_first=False, priority_scheduling_preemption_threshold=10, schedule_conservativeness=1.0, page_size=1, hybrid_kvcache_ratio=None, swa_full_tokens_ratio=0.8, disable_hybrid_swa_memory=False, radix_eviction_policy='lru', device='cuda', tp_size=4, pp_size=1, pp_max_micro_batch_size=None, stream_interval=1, stream_output=False, random_seed=429590888, constrained_json_whitespace_pattern=None, constrained_json_disable_any_whitespace=False, watchdog_timeout=300, dist_timeout=None, download_dir=None, base_gpu_id=0, gpu_id_step=1, sleep_on_idle=False, mm_process_config={}, log_level='info', log_level_http=None, log_requests=True, log_requests_level=2, crash_dump_folder=None, show_time_cost=False, enable_metrics=False, enable_metrics_for_all_schedulers=False, tokenizer_metrics_custom_labels_header='x-custom-labels', tokenizer_metrics_allowed_custom_labels=None, bucket_time_to_first_token=None, bucket_inter_token_latency=None, bucket_e2e_request_latency=None, collect_tokens_histogram=False, prompt_tokens_buckets=None, generation_tokens_buckets=None, gc_warning_threshold_secs=0.0, decode_log_interval=40, enable_request_time_stats_logging=False, kv_events_config=None, enable_trace=False, otlp_traces_endpoint='localhost:4317', export_metrics_to_file=False, export_metrics_to_file_dir=None, api_key=None, served_model_name='zai-org/GLM-4.5-Air', weight_version='default', chat_template=None, completion_template=None, file_storage_path='sglang_storage', enable_cache_report=False, reasoning_parser='glm45', tool_call_parser='glm', tool_server=None, sampling_defaults='model', dp_size=1, load_balance_method='round_robin', load_watch_interval=0.1, prefill_round_robin_balance=False, dist_init_addr=None, nnodes=1, node_rank=0, json_model_override_args='{}', preferred_sampling_params=None, enable_lora=None, max_lora_rank=None, lora_target_modules=None, lora_paths=None, max_loaded_loras=None, max_loras_per_batch=8, lora_eviction_policy='lru', lora_backend='csgmv', max_lora_chunk_size=16, attention_backend='fa3', decode_attention_backend=None, prefill_attention_backend=None, sampling_backend='flashinfer', grammar_backend='xgrammar', mm_attention_backend=None, fp8_gemm_runner_backend='auto', nsa_prefill_backend='flashmla_sparse', nsa_decode_backend='fa3', enable_flashinfer_autotune=False, speculative_algorithm='EAGLE', speculative_draft_model_path='zai-org/GLM-4.5-Air', speculative_draft_model_revision=None, speculative_draft_load_format=None, speculative_num_steps=3, speculative_eagle_topk=1, speculative_num_draft_tokens=4, speculative_accept_threshold_single=1.0, speculative_accept_threshold_acc=1.0, speculative_token_map=None, speculative_attention_mode='prefill', speculative_moe_runner_backend=None, speculative_moe_a2a_backend=None, speculative_ngram_min_match_window_size=1, speculative_ngram_max_match_window_size=12, speculative_ngram_min_bfs_breadth=1, speculative_ngram_max_bfs_breadth=10, speculative_ngram_match_type='BFS', speculative_ngram_branch_length=18, speculative_ngram_capacity=10000000, ep_size=1, moe_a2a_backend='none', moe_runner_backend='auto', flashinfer_mxfp4_moe_precision='default', enable_flashinfer_allreduce_fusion=False, deepep_mode='auto', ep_num_redundant_experts=0, ep_dispatch_algorithm=None, init_expert_location='trivial', enable_eplb=False, eplb_algorithm='auto', eplb_rebalance_num_iterations=1000, eplb_rebalance_layers_per_chunk=None, eplb_min_rebalancing_utilization_threshold=1.0, expert_distribution_recorder_mode=None, expert_distribution_recorder_buffer_size=1000, enable_expert_distribution_metrics=False, deepep_config=None, moe_dense_tp_size=None, elastic_ep_backend=None, mooncake_ib_device=None, max_mamba_cache_size=None, mamba_ssm_dtype='float32', mamba_full_memory_ratio=0.9, enable_hierarchical_cache=False, hicache_ratio=2.0, hicache_size=0, hicache_write_policy='write_through', hicache_io_backend='kernel', hicache_mem_layout='layer_first', hicache_storage_backend=None, hicache_storage_prefetch_policy='best_effort', hicache_storage_backend_extra_config=None, enable_lmcache=False, kt_weight_path=None, kt_method='AMXINT4', kt_cpuinfer=None, kt_threadpool_count=2, kt_num_gpu_experts=None, kt_max_deferred_experts_per_token=None, dllm_algorithm=None, dllm_algorithm_config=None, enable_double_sparsity=False, ds_channel_config_path=None, ds_heavy_channel_num=32, ds_heavy_token_num=256, ds_heavy_channel_type='qk', ds_sparse_decode_threshold=4096, cpu_offload_gb=0, offload_group_size=-1, offload_num_in_group=1, offload_prefetch_step=1, offload_mode='cpu', multi_item_scoring_delimiter=None, disable_radix_cache=False, cuda_graph_max_bs=512, cuda_graph_bs=[1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 40, 44, 48, 52, 56, 60, 64, 72, 80, 88, 96, 104, 112, 120, 128, 136, 144, 152, 160, 168, 176, 184, 192, 200, 208, 216, 224, 232, 240, 248, 256, 272, 288, 304, 320, 336, 352, 368, 384, 400, 416, 432, 448, 464, 480, 496, 512], disable_cuda_graph=False, disable_cuda_graph_padding=False, enable_profile_cuda_graph=False, enable_cudagraph_gc=False, enable_layerwise_nvtx_marker=False, enable_nccl_nvls=False, enable_symm_mem=False, disable_flashinfer_cutlass_moe_fp4_allgather=False, enable_tokenizer_batch_encode=False, disable_tokenizer_batch_decode=False, disable_outlines_disk_cache=False, disable_custom_all_reduce=False, enable_mscclpp=False, enable_torch_symm_mem=False, disable_overlap_schedule=True, enable_mixed_chunk=False, enable_dp_attention=False, enable_dp_lm_head=False, enable_two_batch_overlap=False, enable_single_batch_overlap=False, tbo_token_distribution_threshold=0.48, enable_torch_compile=False, enable_piecewise_cuda_graph=False, enable_torch_compile_debug_mode=False, torch_compile_max_bs=32, piecewise_cuda_graph_max_tokens=4096, piecewise_cuda_graph_tokens=[4, 8, 12, 16, 20, 24, 28, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240, 256, 288, 320, 352, 384, 416, 448, 480, 512, 640, 768, 896, 1024, 1152, 1280, 1408, 1536, 1664, 1792, 1920, 2048, 2176, 2304, 2432, 2560, 2688, 2816, 2944, 3072, 3200, 3328, 3456, 3584, 3712, 3840, 3968, 4096], piecewise_cuda_graph_compiler='eager', torchao_config='', enable_nan_detection=False, enable_p2p_check=False, triton_attention_reduce_in_fp32=False, triton_attention_num_kv_splits=8, triton_attention_split_tile_size=None, num_continuous_decode_steps=1, delete_ckpt_after_loading=False, enable_memory_saver=False, enable_weights_cpu_backup=False, enable_draft_weights_cpu_backup=False, allow_auto_truncate=False, enable_custom_logit_processor=False, flashinfer_mla_disable_ragged=False, disable_shared_experts_fusion=False, disable_chunked_prefix_cache=False, disable_fast_image_processor=False, keep_mm_feature_on_device=False, enable_return_hidden_states=False, scheduler_recv_interval=1, numa_node=None, enable_deterministic_inference=False, rl_on_policy_target=None, enable_attn_tp_input_scattered=False, enable_nsa_prefill_context_parallel=False, enable_fused_qk_norm_rope=False, enable_dynamic_batch_tokenizer=False, dynamic_batch_tokenizer_batch_size=32, dynamic_batch_tokenizer_batch_timeout=0.002, debug_tensor_dump_output_folder=None, debug_tensor_dump_layers=None, debug_tensor_dump_input_file=None, debug_tensor_dump_inject=False, disaggregation_mode='null', disaggregation_transfer_backend='mooncake', disaggregation_bootstrap_port=8998, disaggregation_decode_tp=None, disaggregation_decode_dp=None, disaggregation_prefill_pp=1, disaggregation_ib_device=None, disaggregation_decode_enable_offload_kvcache=False, num_reserved_decode_tokens=512, disaggregation_decode_polling_interval=1, custom_weight_loader=[], weight_loader_disable_mmap=False, remote_instance_weight_loader_seed_instance_ip=None, remote_instance_weight_loader_seed_instance_service_port=None, remote_instance_weight_loader_send_weights_group_ports=None, enable_pdmux=False, pdmux_config_path=None, sm_group_num=8, mm_max_concurrent_calls=32, mm_per_request_timeout=10.0, enable_broadcast_mm_inputs_process=False, decrypted_config_file=None, decrypted_draft_config_file=None, mm_enable_dp_encoder=False, forward_hooks=None)\r\n",,terminal_output +117,258901,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",136,1,"]",shellscript,content +118,258950,"TERMINAL",0,0,"[2026-01-05 15:49:05] Using default HuggingFace chat template with detected content format: openai\r\n",,terminal_output +119,262904,"/home/franz.srambical/slurm/jobs/franz/berlin/crowd-pilot/start_sglang_server_glm4_5_air.sh",136,1,"4",shellscript,content +120,277094,"TERMINAL",0,0,"[2026-01-05 15:49:23 TP2] Init torch distributed begin.\r\n",,terminal_output +121,277281,"TERMINAL",0,0,"[2026-01-05 15:49:24 TP0] Init torch distributed begin.\r\n",,terminal_output +122,282083,"TERMINAL",0,0,"[2026-01-05 15:49:28 TP1] Init torch distributed begin.\r\n",,terminal_output +123,287296,"TERMINAL",0,0,"[2026-01-05 15:49:34 TP3] Init torch distributed begin.\r\n",,terminal_output +124,287501,"TERMINAL",0,0,"[Gloo] Rank 1 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[Gloo] Rank 0 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[Gloo] Rank 2 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[Gloo] Rank 3 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n",,terminal_output +125,287577,"TERMINAL",0,0,"[Gloo] Rank 1 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[Gloo] Rank 3 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[Gloo] Rank 0 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[Gloo] Rank 2 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[2026-01-05 15:49:34 TP0] sglang is using nccl==2.27.5\r\n",,terminal_output +126,288576,"TERMINAL",0,0,"[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[Gloo] Rank 1 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[Gloo] Rank 3 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[Gloo] Rank 2 is connected to 3 peer ranks. Expected number of connected peer ranks is : 3\r\n[2026-01-05 15:49:35 TP0] Init torch distributed ends. mem usage=1.25 GB\r\n[2026-01-05 15:49:35 TP3] Init torch distributed ends. mem usage=1.06 GB\r\n[2026-01-05 15:49:35 TP2] Init torch distributed ends. mem usage=1.29 GB\r\n[2026-01-05 15:49:35 TP1] Init torch distributed ends. mem usage=1.29 GB\r\n",,terminal_output +127,293879,"TERMINAL",0,0,"[2026-01-05 15:49:40 TP1] Ignore import error when loading sglang.srt.models.mindspore: name 'ms' is not defined\r\n[2026-01-05 15:49:40 TP3] Ignore import error when loading sglang.srt.models.mindspore: name 'ms' is not defined\r\n[2026-01-05 15:49:40 TP0] Ignore import error when loading sglang.srt.models.mindspore: name 'ms' is not defined\r\n",,terminal_output +128,295269,"TERMINAL",0,0,"[2026-01-05 15:49:42 TP2] Ignore import error when loading sglang.srt.models.mindspore: name 'ms' is not defined\r\n",,terminal_output +129,299858,"TERMINAL",0,0,"[2026-01-05 15:49:46 TP3] Load weight begin. avail mem=77.61 GB\r\n[2026-01-05 15:49:46 TP2] Load weight begin. avail mem=77.38 GB\r\n[2026-01-05 15:49:46 TP1] Load weight begin. avail mem=77.38 GB\r\n[2026-01-05 15:49:46 TP0] Load weight begin. avail mem=77.42 GB\r\n[2026-01-05 15:49:46 TP0] Shared experts fusion optimization enabled.\r\n",,terminal_output +130,300693,"TERMINAL",0,0,"[2026-01-05 15:49:47 TP0] Found local HF snapshot for zai-org/GLM-4.5-Air at /fast/project/HFMI_SynergyUnit/tab_model/franz/hf_home/hub/models--zai-org--GLM-4.5-Air/snapshots/a24ceef6ce4f3536971efe9b778bdaa1bab18daa; skipping download.\r\n",,terminal_output +131,300836,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 0% Completed | 0/47 [00:00 {\n const detail = formatActionDetail(action);\n \n const items: vscode.QuickPickItem[] = [\n { \n label: '$(check) Accept', \n description: 'Execute this action',\n detail: detail\n },\n { \n label: '$(x) Dismiss', \n description: 'Cancel this suggestion'\n },\n ];\n\n const result = await vscode.window.showQuickPick(items, {\n title: 'Pending Suggestion',\n placeHolder: getActionSummary(action),\n ignoreFocusOut: false,\n });\n\n if (result?.label.includes('Accept')) {\n return 'accept';\n }\n if (result?.label.includes('Dismiss')) {\n return 'dismiss';\n }\n return null;\n}\n\n/**\n * Get a short summary of the action for the quick pick placeholder.\n */\nfunction getActionSummary(action: Action): string {\n switch (action.kind) {\n case 'terminalSendText':\n return `Run terminal command`;\n case 'openFile':\n const fileName = action.filePath.split(/[/\\]/).pop() || action.filePath;\n return `Open file: ${fileName}`;\n case 'setSelections':\n return `Move cursor to line ${action.selections[0].start[0] + 1}`;\n case 'editInsert':\n return 'Insert text';\n case 'editReplace':\n return 'Replace text';\n case 'editDelete':\n return `Delete lines ${action.range.start[0] + 1}–${action.range.end[0] + 1}`;\n case 'terminalShow':\n return 'Show terminal';\n case 'showTextDocument':\n return 'Show document';\n default:\n return 'Execute action';\n }\n}\n\n/**\n * Format the full action detail for display in quick pick.\n */\nfunction formatActionDetail(action: Action): string {\n switch (action.kind) {\n case 'terminalSendText':\n return action.text;\n case 'openFile':\n if (action.selections?.[0]) {\n const line = action.selections[0].start[0] + 1;\n return `${action.filePath}:${line}`;\n }\n return action.filePath;\n case 'setSelections':\n const sel = action.selections[0];\n return `Line ${sel.start[0] + 1}, Column ${sel.start[1] + 1}`;\n case 'editInsert':\n return truncate(action.text, 200);\n case 'editReplace':\n return truncate(action.text, 200);\n case 'editDelete':\n return `Lines ${action.range.start[0] + 1} to ${action.range.end[0] + 1}`;\n default:\n return '';\n }\n}\n\n\n\n",typescript,tab +145,389833,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 28% Completed | 13/47 [01:28<03:56, 6.95s/it]\r\n",,terminal_output +146,396960,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 30% Completed | 14/47 [01:36<03:50, 6.99s/it]\r\n",,terminal_output +147,397332,"src/preview/quickPick.ts",1343,0,"",typescript,selection_keyboard +148,397632,"src/preview/quickPick.ts",2960,0,"",typescript,selection_keyboard +149,398309,"src/preview/quickPick.ts",1343,0,"",typescript,selection_keyboard +150,398528,"src/preview/quickPick.ts",0,0,"",typescript,selection_keyboard +151,400366,"src/preview/hoverProvider.ts",0,0,"import * as vscode from 'vscode';\nimport { Action, truncate } from './types';\n\n/**\n * Provides hover tooltips for meta-action indicators.\n * Shows full content when hovering over truncated terminal commands, etc.\n */\nexport class MetaActionHoverProvider implements vscode.HoverProvider {\n private action: Action | null = null;\n private anchorLine: number | null = null;\n\n /**\n * Set the current action and its anchor line for hover detection.\n */\n setAction(action: Action, anchorLine: number): void {\n this.action = action;\n this.anchorLine = anchorLine;\n }\n\n /**\n * Clear the current action.\n */\n clearAction(): void {\n this.action = null;\n this.anchorLine = null;\n }\n\n /**\n * Provide hover content when user hovers over the indicator area.\n */\n provideHover(\n document: vscode.TextDocument,\n position: vscode.Position,\n token: vscode.CancellationToken\n ): vscode.ProviderResult {\n if (!this.action || this.anchorLine === null) {\n return null;\n }\n\n // Check if hovering on the anchor line\n if (position.line !== this.anchorLine) {\n return null;\n }\n\n // Check if hovering past the line content (in the decoration area)\n const lineLength = document.lineAt(position.line).text.length;\n if (position.character < lineLength) {\n return null;\n }\n\n // Build hover content based on action type\n const content = this.buildHoverContent();\n if (!content) {\n return null;\n }\n\n return new vscode.Hover(content);\n }\n\n /**\n * Build markdown content for the hover based on action type.\n */\n private buildHoverContent(): vscode.MarkdownString | null {\n if (!this.action) {\n return null;\n }\n\n const md = new vscode.MarkdownString();\n md.isTrusted = true;\n\n switch (this.action.kind) {\n case 'terminalSendText':\n md.appendMarkdown('**Terminal Command**\n\n');\n md.appendCodeblock(this.action.text, 'bash');\n md.appendMarkdown('\n\n*Press Tab to execute, Esc to dismiss*');\n return md;\n\n case 'openFile':\n md.appendMarkdown('**Open File**\n\n');\n md.appendMarkdown(`\`${this.action.filePath}\``);\n if (this.action.selections?.[0]) {\n const line = this.action.selections[0].start[0] + 1;\n md.appendMarkdown(` at line ${line}`);\n }\n md.appendMarkdown('\n\n*Press Tab to open, Esc to dismiss*');\n return md;\n\n case 'setSelections':\n const targetLine = this.action.selections[0].start[0] + 1;\n md.appendMarkdown('**Move Cursor**\n\n');\n md.appendMarkdown(`Go to line ${targetLine}`);\n md.appendMarkdown('\n\n*Press Tab to move, Esc to dismiss*');\n return md;\n\n case 'editInsert':\n md.appendMarkdown('**Insert Text**\n\n');\n md.appendCodeblock(this.action.text, 'plaintext');\n md.appendMarkdown('\n\n*Press Tab to insert, Esc to dismiss*');\n return md;\n\n case 'editReplace':\n md.appendMarkdown('**Replace Text**\n\n');\n md.appendCodeblock(this.action.text, 'plaintext');\n md.appendMarkdown('\n\n*Press Tab to replace, Esc to dismiss*');\n return md;\n\n case 'editDelete':\n const startLine = this.action.range.start[0] + 1;\n const endLine = this.action.range.end[0] + 1;\n md.appendMarkdown('**Delete Text**\n\n');\n md.appendMarkdown(`Lines ${startLine}–${endLine}`);\n md.appendMarkdown('\n\n*Press Tab to delete, Esc to dismiss*');\n return md;\n\n default:\n return null;\n }\n }\n}\n\n\n\n",typescript,tab +152,402623,"src/preview/hoverProvider.ts",34,0,"",typescript,selection_command +153,402779,"src/preview/hoverProvider.ts",41,0,"",typescript,selection_command +154,402937,"src/preview/hoverProvider.ts",43,0,"",typescript,selection_command +155,403246,"src/preview/hoverProvider.ts",49,0,"",typescript,selection_command +156,403337,"src/preview/hoverProvider.ts",51,0,"",typescript,selection_command +157,403934,"src/preview/hoverProvider.ts",51,9,"",typescript,content +158,403934,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 32% Completed | 15/47 [01:43<03:43, 6.99s/it]\r\n",,terminal_output +159,404346,"src/preview/hoverProvider.ts",50,0,"",typescript,selection_command +160,404450,"src/preview/hoverProvider.ts",49,0,"",typescript,selection_command +161,404817,"src/preview/hoverProvider.ts",49,1,"",typescript,content +162,410878,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 34% Completed | 16/47 [01:50<03:36, 6.98s/it]\r\n",,terminal_output +163,417618,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 36% Completed | 17/47 [01:56<03:27, 6.91s/it]\r\n",,terminal_output +164,424590,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 38% Completed | 18/47 [02:03<03:21, 6.93s/it]\r\n",,terminal_output +165,431456,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 40% Completed | 19/47 [02:10<03:13, 6.91s/it]\r\n",,terminal_output +166,438676,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 43% Completed | 20/47 [02:17<03:09, 7.01s/it]\r\n",,terminal_output +167,445816,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 45% Completed | 21/47 [02:24<03:03, 7.05s/it]\r\n",,terminal_output +168,453156,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 47% Completed | 22/47 [02:32<02:57, 7.11s/it]\r\n",,terminal_output +169,453645,"src/preview/hoverProvider.ts",1463,0,"",typescript,selection_keyboard +170,453877,"src/preview/hoverProvider.ts",49,0,"",typescript,selection_keyboard +171,454143,"src/preview/hoverProvider.ts",0,0,"",typescript,selection_keyboard +172,459687,"src/preview/hoverProvider.ts",1447,0,"",typescript,selection_keyboard +173,460194,"src/preview/hoverProvider.ts",3299,0,"",typescript,selection_keyboard +174,460275,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 49% Completed | 23/47 [02:39<02:51, 7.13s/it]\r\n",,terminal_output +175,464667,"src/preview/hoverProvider.ts",4043,0,"",typescript,selection_keyboard +176,466297,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 51% Completed | 24/47 [02:45<02:35, 6.78s/it]\r\n",,terminal_output +177,466872,"src/preview/hoverProvider.ts",2098,0,"",typescript,selection_keyboard +178,467061,"src/preview/hoverProvider.ts",705,0,"",typescript,selection_keyboard +179,468948,"src/preview/hoverProvider.ts",2098,0,"",typescript,selection_keyboard +180,471095,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 53% Completed | 25/47 [02:50<02:16, 6.21s/it]\r\n",,terminal_output +181,472158,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 55% Completed | 26/47 [02:51<01:37, 4.67s/it]\r\n",,terminal_output +182,476690,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 57% Completed | 27/47 [02:55<01:32, 4.63s/it]\r\n",,terminal_output +183,481581,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 60% Completed | 28/47 [03:00<01:29, 4.70s/it]\r\n",,terminal_output +184,486460,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 62% Completed | 29/47 [03:05<01:25, 4.73s/it]\r\n",,terminal_output +185,490485,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 64% Completed | 30/47 [03:09<01:17, 4.54s/it]\r\n",,terminal_output +186,494680,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 66% Completed | 31/47 [03:13<01:11, 4.44s/it]\r\n",,terminal_output +187,499638,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 70% Completed | 33/47 [03:18<00:48, 3.47s/it]\r\n",,terminal_output +188,503794,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 72% Completed | 34/47 [03:22<00:48, 3.70s/it]\r\n",,terminal_output +189,504094,"src/preview/hoverProvider.ts",0,0,"",typescript,selection_command +190,508463,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 74% Completed | 35/47 [03:27<00:47, 3.94s/it]\r\n",,terminal_output +191,511662,"src/preview/hoverProvider.ts",34,0,"",typescript,selection_command +192,511923,"src/preview/hoverProvider.ts",68,0,"",typescript,selection_command +193,511923,"src/preview/hoverProvider.ts",69,0,"",typescript,selection_command +194,511987,"src/preview/hoverProvider.ts",73,0,"",typescript,selection_command +195,511987,"src/preview/hoverProvider.ts",128,0,"",typescript,selection_command +196,512011,"src/preview/hoverProvider.ts",203,0,"",typescript,selection_command +197,512116,"src/preview/hoverProvider.ts",207,0,"",typescript,selection_command +198,512116,"src/preview/hoverProvider.ts",278,0,"",typescript,selection_command +199,512117,"src/preview/hoverProvider.ts",320,0,"",typescript,selection_command +200,512176,"src/preview/hoverProvider.ts",366,0,"",typescript,selection_command +201,512337,"src/preview/hoverProvider.ts",367,0,"",typescript,selection_command +202,513024,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 77% Completed | 36/47 [03:32<00:45, 4.11s/it]\r\n",,terminal_output +203,517915,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 79% Completed | 37/47 [03:36<00:42, 4.29s/it]\r\n",,terminal_output +204,522811,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 81% Completed | 38/47 [03:41<00:38, 4.33s/it]\r\n",,terminal_output +205,526988,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 83% Completed | 39/47 [03:46<00:35, 4.49s/it]\r\n",,terminal_output +206,531606,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 85% Completed | 40/47 [03:50<00:31, 4.53s/it]\r\n",,terminal_output +207,536122,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 87% Completed | 41/47 [03:55<00:27, 4.52s/it]\r\n",,terminal_output +208,538449,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 89% Completed | 42/47 [03:57<00:19, 3.86s/it]\r\n",,terminal_output +209,538978,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 91% Completed | 43/47 [03:58<00:11, 2.85s/it]\r\n",,terminal_output +210,539442,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 94% Completed | 44/47 [03:58<00:06, 2.15s/it]\r\n",,terminal_output +211,539870,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 96% Completed | 45/47 [03:59<00:03, 1.65s/it]\r\n",,terminal_output +212,540366,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 98% Completed | 46/47 [03:59<00:01, 1.29s/it]\r\n",,terminal_output +213,540819,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 100% Completed | 47/47 [03:59<00:00, 1.04s/it]\r\n\rLoading safetensors checkpoint shards: 100% Completed | 47/47 [03:59<00:00, 5.10s/it]\r\n\r\n",,terminal_output +214,540890,"TERMINAL",0,0,"[2026-01-05 15:53:47 TP0] Load weight end. type=Glm4MoeForCausalLM, dtype=torch.bfloat16, avail mem=27.52 GB, mem usage=49.90 GB.\r\n[2026-01-05 15:53:47 TP2] Load weight end. type=Glm4MoeForCausalLM, dtype=torch.bfloat16, avail mem=27.48 GB, mem usage=49.90 GB.\r\n",,terminal_output +215,541623,"TERMINAL",0,0,"[2026-01-05 15:53:48 TP3] Load weight end. type=Glm4MoeForCausalLM, dtype=torch.bfloat16, avail mem=27.71 GB, mem usage=49.90 GB.\r\n",,terminal_output +216,542030,"TERMINAL",0,0,"[2026-01-05 15:53:48 TP1] Load weight end. type=Glm4MoeForCausalLM, dtype=torch.bfloat16, avail mem=27.48 GB, mem usage=49.90 GB.\r\n[2026-01-05 15:53:48 TP0] Using KV cache dtype: torch.bfloat16\r\n",,terminal_output +217,543434,"TERMINAL",0,0,"[2026-01-05 15:53:50 TP1] KV Cache is allocated. #tokens: 450343, K size: 9.88 GB, V size: 9.88 GB\r\n[2026-01-05 15:53:50 TP2] KV Cache is allocated. #tokens: 450343, K size: 9.88 GB, V size: 9.88 GB\r\n[2026-01-05 15:53:50 TP1] Memory pool end. avail mem=7.68 GB\r\n[2026-01-05 15:53:50 TP0] KV Cache is allocated. #tokens: 450343, K size: 9.88 GB, V size: 9.88 GB\r\n[2026-01-05 15:53:50 TP3] KV Cache is allocated. #tokens: 450343, K size: 9.88 GB, V size: 9.88 GB\r\n[2026-01-05 15:53:50 TP0] Memory pool end. avail mem=7.73 GB\r\n[2026-01-05 15:53:50 TP3] Memory pool end. avail mem=7.92 GB\r\n[2026-01-05 15:53:50 TP2] Memory pool end. avail mem=7.68 GB\r\n",,terminal_output +218,543496,"TERMINAL",0,0,"[2026-01-05 15:53:50 TP0] Capture cuda graph begin. This can take up to several minutes. avail mem=7.64 GB\r\n[2026-01-05 15:53:50 TP0] Capture cuda graph bs [1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 40, 44, 48]\r\n[2026-01-05 15:53:50 TP1] Capture cuda graph begin. This can take up to several minutes. avail mem=7.59 GB\r\n[2026-01-05 15:53:50 TP3] Capture cuda graph begin. This can take up to several minutes. avail mem=7.82 GB\r\n[2026-01-05 15:53:50 TP2] Capture cuda graph begin. This can take up to several minutes. avail mem=7.59 GB\r\n",,terminal_output +219,543827,"TERMINAL",0,0,"\r 0%| | 0/23 [00:00 {\n if (!this.currentAction) {\n return null;\n }\n return showPendingActionQuickPick(this.currentAction);\n }\n\n /**\n * Dispose all resources.\n */\n dispose(): void {\n this.decorationPool.dispose();\n this.hoverProviderDisposable?.dispose();\n }\n\n // -------------------- Preview Renderers --------------------\n\n /**\n * Check if an action can use inline completion (Case 1).\n * Case 1: Pure insertion at/after cursor position - use inline completion API.\n * Case 2: Anything else - use text decorators.\n */\n private canUseInlineCompletion(action: Action, editor: vscode.TextEditor): boolean {\n const cursor = editor.selection.active;\n \n if (action.kind === 'editInsert') {\n const insertPos = toVscodePosition(action.position);\n // Can use inline if inserting on same line at/after cursor, or on a later line\n if (insertPos.line > cursor.line) {\n return true;\n }\n if (insertPos.line === cursor.line && insertPos.character >= cursor.character) {\n return true;\n }\n return false;\n }\n \n if (action.kind === 'editReplace') {\n const range = toVscodeRange(action.range);\n // Can use inline only if the range starts at/after cursor (pure addition scenario)\n // AND the range is empty (no deletion)\n if (range.isEmpty && range.start.isAfterOrEqual(cursor)) {\n return true;\n }\n return false;\n }\n \n return false;\n }\n\n /**\n * Show preview for text insertion.\n * Case 1: Insert at/after cursor → inline completion (ghost text)\n * Case 2: Insert before cursor → decorations\n */\n private showInsertPreview(action: { kind: 'editInsert'; position: [number, number]; text: string }, editor?: vscode.TextEditor): void {\n if (!editor) {\n return;\n }\n \n const insertPos = toVscodePosition(action.position);\n const anchorLine = Math.min(action.position[0], editor.document.lineCount - 1);\n \n if (this.canUseInlineCompletion(action, editor)) {\n // Case 1: Use inline completion - clean ghost text\n this.inlineProvider.setAction(action);\n } else {\n // Case 2: Use decorations - show green insertion block\n this.showInsertionBlock(editor, anchorLine, action.text);\n }\n \n // Set up hover provider for detailed view\n this.hoverProvider.setAction(action, anchorLine);\n }\n\n /**\n * Show preview for text replacement.\n * Case 1: Empty range at/after cursor → inline completion (ghost text)\n * Case 2: Pure insertion (no deletions) → show only inserted text inline in green\n * Case 3: Has deletions → decorations (red deletion + green addition)\n * - If coherent (single substring replacement): show green inline after red\n * - If not coherent (scattered changes): show green block on next line\n */\n private showReplacePreview(action: { kind: 'editReplace'; range: { start: [number, number]; end: [number, number] }; text: string }, editor?: vscode.TextEditor): void {\n if (!editor) {\n return;\n }\n\n const range = toVscodeRange(action.range);\n \n if (this.canUseInlineCompletion(action, editor)) {\n // Case 1: Use inline completion only - no decorations\n this.inlineProvider.setAction(action);\n } else {\n const oldText = editor.document.getText(range);\n \n // Case 2: Check for pure insertion first (no deletions)\n const pureInsertion = analyzePureInsertion(editor.document, range, action.text);\n if (pureInsertion.isPureInsertion && pureInsertion.insertionPosition && pureInsertion.insertionText) {\n // Pure insertion: show only the new text inline in green (no red)\n this.showInlineInsertion(editor, pureInsertion.insertionPosition, pureInsertion.insertionText);\n } else {\n // Case 3: Has deletions - show red strikethrough\n const deletionRanges = computeDeletionRanges(editor.document, range, action.text);\n \n if (deletionRanges.length > 0) {\n const decorationOptions: vscode.DecorationOptions[] = deletionRanges.map(r => ({\n range: r\n }));\n this.decorationPool.setDecorations(editor, 'deletion-char', decorationOptions);\n } else if (!range.isEmpty) {\n // Highlight entire range if no char-level diff but range is not empty\n this.decorationPool.setDecorations(editor, 'deletion', [{ range }]);\n }\n \n // Green highlight on text being added - only if there's actual new content\n // Don't show if it's purely a deletion (new text is subset of old text)\n if (hasInsertions(oldText, action.text)) {\n // Check if this is a coherent single-substring replacement\n const coherent = analyzeCoherentReplacement(editor.document, range, action.text);\n \n if (coherent.isCoherent && coherent.deletionRange && coherent.insertionText) {\n // Coherent: show green text inline right after the red deletion\n this.showInlineInsertion(editor, coherent.deletionRange.end, coherent.insertionText);\n } else {\n // Not coherent: show green block on next line\n this.showInsertionBlock(editor, range.end.line, action.text);\n }\n }\n }\n }\n\n // Set hover provider for full details\n this.hoverProvider.setAction(action, range.start.line);\n }\n\n /**\n * Show inserted text inline at a specific position (right after deleted text).\n * Used for coherent single-substring replacements.\n */\n private showInlineInsertion(editor: vscode.TextEditor, position: vscode.Position, text: string): void {\n // Format text for display\n const displayText = text.replace(/\n/g, '↵').replace(/\t/g, '→');\n const truncatedText = truncate(displayText, 60);\n \n const decorationOptions: vscode.DecorationOptions[] = [{\n range: new vscode.Range(position, position),\n renderOptions: {\n after: {\n contentText: truncatedText,\n color: COLORS.insertion.foreground,\n backgroundColor: COLORS.insertion.background,\n fontStyle: 'normal',\n border: '1px solid',\n borderColor: COLORS.insertion.border,\n }\n }\n }];\n\n this.decorationPool.setDecorations(editor, 'insertion-inline', decorationOptions);\n }\n\n /**\n * Show the new/inserted text with green highlight as a block after the specified line.\n */\n private showInsertionBlock(editor: vscode.TextEditor, afterLine: number, text: string): void {\n const anchorLine = Math.min(afterLine, editor.document.lineCount - 1);\n const anchorPos = new vscode.Position(anchorLine, Number.MAX_SAFE_INTEGER);\n \n // Format text for display (escape for CSS content)\n const displayText = text.replace(/\n/g, '↵').replace(/\t/g, '→');\n const truncatedText = truncate(displayText, 80);\n \n const decorationOptions: vscode.DecorationOptions[] = [{\n range: new vscode.Range(anchorPos, anchorPos),\n renderOptions: {\n after: {\n contentText: ` + ${truncatedText}`,\n color: COLORS.insertion.foreground,\n backgroundColor: COLORS.insertion.background,\n fontStyle: 'normal',\n margin: '0 0 0 2ch',\n border: '1px solid',\n borderColor: COLORS.insertion.border,\n }\n }\n }];\n\n this.decorationPool.setDecorations(editor, 'insertion-block', decorationOptions);\n }\n\n /**\n * Show preview for text deletion with strikethrough decoration.\n */\n private showDeletePreview(action: { kind: 'editDelete'; range: { start: [number, number]; end: [number, number] } }, editor?: vscode.TextEditor): void {\n if (!editor) {\n return;\n }\n\n const range = toVscodeRange(action.range);\n \n // Highlight the deletion range\n this.decorationPool.setDecorations(editor, 'deletion', [{ range }]);\n\n // Set hover provider\n this.hoverProvider.setAction(action, range.start.line);\n }\n\n /**\n * Show preview for terminal command with indicator decoration.\n */\n private showTerminalCommandPreview(action: { kind: 'terminalSendText'; text: string }, editor?: vscode.TextEditor): void {\n if (!editor) {\n return;\n }\n\n const anchorLine = this.getVisibleAnchorLine(editor);\n const cmdPreview = truncate(action.text, 60);\n \n this.showMetaIndicator(editor, anchorLine, '$(terminal)', `Run: ${cmdPreview}`, COLORS.terminal);\n this.hoverProvider.setAction(action, anchorLine);\n }\n\n /**\n * Show preview for cursor movement with indicator decoration.\n */\n private showCursorMovePreview(action: { kind: 'setSelections'; selections: Array<{ start: [number, number]; end: [number, number] }> }, editor?: vscode.TextEditor): void {\n if (!editor) {\n return;\n }\n\n const targetLine = action.selections[0].start[0];\n const targetPos = new vscode.Position(targetLine, action.selections[0].start[1]);\n const isTargetVisible = editor.visibleRanges.some(r => r.contains(targetPos));\n\n let anchorLine: number;\n let icon: string;\n let label: string;\n\n if (isTargetVisible) {\n // Target is visible, show indicator at target\n anchorLine = targetLine;\n icon = '$(arrow-right)';\n label = 'Move cursor here';\n } else {\n // Target is off-screen, show indicator at edge of visible area\n anchorLine = this.getVisibleAnchorLine(editor);\n const direction = targetLine < anchorLine ? '↑' : '↓';\n icon = `$(arrow-${targetLine < anchorLine ? 'up' : 'down'})`;\n label = `Go to line ${targetLine + 1}`;\n }\n\n this.showMetaIndicator(editor, anchorLine, icon, label, COLORS.cursorMove);\n this.hoverProvider.setAction(action, anchorLine);\n }\n\n /**\n * Show preview for file switch with indicator decoration.\n */\n private showFileSwitchPreview(action: { kind: 'openFile'; filePath: string; selections?: Array<{ start: [number, number]; end: [number, number] }> }, editor?: vscode.TextEditor): void {\n if (!editor) {\n return;\n }\n\n const anchorLine = this.getVisibleAnchorLine(editor);\n const fileName = action.filePath.split(/[/\\]/).pop() || action.filePath;\n const targetLine = action.selections?.[0]?.start[0];\n \n const label = targetLine !== undefined\n ? `Open: ${fileName}:${targetLine + 1}`\n : `Open: ${fileName}`;\n\n this.showMetaIndicator(editor, anchorLine, '$(file)', label, COLORS.fileSwitch);\n this.hoverProvider.setAction(action, anchorLine);\n }\n\n // -------------------- Helper Methods --------------------\n\n /**\n * Show a meta-action indicator decoration at the specified line.\n */\n private showMetaIndicator(\n editor: vscode.TextEditor,\n line: number,\n icon: string,\n label: string,\n color: vscode.ThemeColor\n ): void {\n const anchorPos = new vscode.Position(line, Number.MAX_SAFE_INTEGER);\n const range = new vscode.Range(anchorPos, anchorPos);\n\n const decorationOptions: vscode.DecorationOptions[] = [{\n range,\n renderOptions: {\n after: {\n contentText: ` ${icon} ${label}`,\n color: color,\n fontStyle: 'italic',\n margin: '0 0 0 2ch',\n }\n }\n }];\n\n this.decorationPool.setDecorations(editor, 'meta-indicator', decorationOptions);\n }\n\n /**\n * Get a visible anchor line for decorations.\n * Returns the line of the cursor if visible, or a line at the edge of the visible area.\n */\n private getVisibleAnchorLine(editor: vscode.TextEditor): number {\n const cursor = editor.selection.active;\n const isVisible = editor.visibleRanges.some(r => r.contains(cursor));\n\n if (isVisible) {\n return cursor.line;\n }\n\n if (editor.visibleRanges.length > 0) {\n const firstVisible = editor.visibleRanges[0];\n const lastVisible = editor.visibleRanges[editor.visibleRanges.length - 1];\n\n if (cursor.isBefore(firstVisible.start)) {\n return firstVisible.start.line;\n } else {\n return lastVisible.end.line;\n }\n }\n\n return 0;\n }\n\n}\n\n",typescript,tab +252,571070,"src/preview/index.ts",3372,0,"",typescript,selection_command +253,571072,"TERMINAL",0,0,"\rCapturing batches (bs=8 avail_mem=6.97 GB): 70%|██████████████████████████████████████████████████████████████████████▎ | 16/23 [00:27<00:02, 2.55it/s]\rCapturing batches (bs=7 avail_mem=6.96 GB): 70%|██████████████████████████████████████████████████████████████████████▎ | 16/23 [00:27<00:02, 2.55it/s]",,terminal_output +254,571802,"TERMINAL",0,0,"\rCapturing batches (bs=7 avail_mem=6.96 GB): 74%|██████████████████████████████████████████████████████████████████████████▋ | 17/23 [00:27<00:02, 2.04it/s]\rCapturing batches (bs=6 avail_mem=6.94 GB): 74%|██████████████████████████████████████████████████████████████████████████▋ | 17/23 [00:27<00:02, 2.04it/s]",,terminal_output +255,571996,"TERMINAL",0,0,"\rCapturing batches (bs=6 avail_mem=6.94 GB): 78%|███████████████████████████████████████████████████████████████████████████████ | 18/23 [00:28<00:02, 2.49it/s]\rCapturing batches (bs=5 avail_mem=6.93 GB): 78%|███████████████████████████████████████████████████████████████████████████████ | 18/23 [00:28<00:02, 2.49it/s]",,terminal_output +256,572126,"TERMINAL",0,0,"\rCapturing batches (bs=5 avail_mem=6.93 GB): 83%|███████████████████████████████████████████████████████████████████████████████████▍ | 19/23 [00:28<00:01, 2.95it/s]\rCapturing batches (bs=4 avail_mem=6.91 GB): 83%|███████████████████████████████████████████████████████████████████████████████████▍ | 19/23 [00:28<00:01, 2.95it/s]",,terminal_output +257,572332,"TERMINAL",0,0,"\rCapturing batches (bs=4 avail_mem=6.91 GB): 87%|███████████████████████████████████████████████████████████████████████████████████████▊ | 20/23 [00:28<00:00, 3.34it/s]\rCapturing batches (bs=3 avail_mem=6.90 GB): 87%|███████████████████████████████████████████████████████████████████████████████████████▊ | 20/23 [00:28<00:00, 3.34it/s]",,terminal_output +258,572527,"TERMINAL",0,0,"\rCapturing batches (bs=3 avail_mem=6.90 GB): 91%|████████████████████████████████████████████████████████████████████████████████████████████▏ | 21/23 [00:28<00:00, 3.72it/s]\rCapturing batches (bs=2 avail_mem=6.88 GB): 91%|████████████████████████████████████████████████████████████████████████████████████████████▏ | 21/23 [00:28<00:00, 3.72it/s]",,terminal_output +259,572797,"TERMINAL",0,0,"\rCapturing batches (bs=2 avail_mem=6.88 GB): 96%|████████████████████████████████████████████████████████████████████████████████████████████████▌ | 22/23 [00:28<00:00, 4.02it/s]\rCapturing batches (bs=1 avail_mem=6.87 GB): 96%|████████████████████████████████████████████████████████████████████████████████████████████████▌ | 22/23 [00:28<00:00, 4.02it/s]",,terminal_output +260,572983,"TERMINAL",0,0,"\rCapturing batches (bs=1 avail_mem=6.87 GB): 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████| 23/23 [00:29<00:00, 4.33it/s]\rCapturing batches (bs=1 avail_mem=6.87 GB): 100%|█████████████████████████████████████████████████████████████████████████████████████████████████████| 23/23 [00:29<00:00, 1.26s/it]\r\n[2026-01-05 15:54:19 TP0] Registering 2139 cuda graph addresses\r\n",,terminal_output +261,573415,"TERMINAL",0,0,"[2026-01-05 15:54:20 TP3] Capture cuda graph end. Time elapsed: 29.94 s. mem usage=0.78 GB. avail mem=7.04 GB.\r\n[2026-01-05 15:54:20 TP0] Capture cuda graph end. Time elapsed: 29.98 s. mem usage=0.78 GB. avail mem=6.85 GB.\r\n[2026-01-05 15:54:20 TP1] Capture cuda graph end. Time elapsed: 29.97 s. mem usage=0.78 GB. avail mem=6.81 GB.\r\n",,terminal_output +262,573579,"TERMINAL",0,0,"[2026-01-05 15:54:20 TP2] Capture cuda graph end. Time elapsed: 30.02 s. mem usage=0.78 GB. avail mem=6.81 GB.\r\n",,terminal_output +263,574561,"TERMINAL",0,0,"[2026-01-05 15:54:21 TP0] Init torch distributed begin.\r\n[2026-01-05 15:54:21 TP1] Init torch distributed begin.\r\n[2026-01-05 15:54:21 TP2] Init torch distributed begin.\r\n[2026-01-05 15:54:21 TP3] Init torch distributed begin.\r\n[2026-01-05 15:54:21 TP0] Init torch distributed ends. mem usage=0.00 GB\r\n[2026-01-05 15:54:21 TP3] Init torch distributed ends. mem usage=0.00 GB\r\n[2026-01-05 15:54:21 TP2] Init torch distributed ends. mem usage=0.00 GB\r\n[2026-01-05 15:54:21 TP1] Init torch distributed ends. mem usage=0.00 GB\r\n[2026-01-05 15:54:21 TP0] Load weight begin. avail mem=6.85 GB\r\n[2026-01-05 15:54:21 TP3] Load weight begin. avail mem=7.04 GB\r\n[2026-01-05 15:54:21 TP1] Load weight begin. avail mem=6.81 GB\r\n[2026-01-05 15:54:21 TP2] Load weight begin. avail mem=6.81 GB\r\n[2026-01-05 15:54:21 TP0] Found local HF snapshot for zai-org/GLM-4.5-Air at /fast/project/HFMI_SynergyUnit/tab_model/franz/hf_home/hub/models--zai-org--GLM-4.5-Air/snapshots/a24ceef6ce4f3536971efe9b778bdaa1bab18daa; skipping download.\r\n",,terminal_output +264,574627,"TERMINAL",0,0,"\rLoading safetensors checkpoint shards: 0% Completed | 0/47 [00:00 {\n if (!this.enabled || !this.action) {\n return [];\n }\n\n // Handle insertions\n if (this.action.kind === 'editInsert') {\n const insertPos = toVscodePosition(this.action.position);\n \n // Always provide the completion - VS Code will show it at the insert position\n // The ghost text appears at the range specified, not necessarily at cursor\n const item = new vscode.InlineCompletionItem(\n this.action.text,\n new vscode.Range(insertPos, insertPos)\n );\n \n return [item];\n }\n\n // Handle replacements\n if (this.action.kind === 'editReplace') {\n const range = toVscodeRange(this.action.range);\n \n // Always provide the completion for replacements\n const item = new vscode.InlineCompletionItem(\n this.action.text,\n range // This will replace the content in range when accepted\n );\n \n return [item];\n }\n\n return [];\n }\n\n /**\n * Check if the current action's position is near the given cursor position.\n * Used to determine if we need a fallback indicator.\n */\n isActionNearCursor(cursorLine: number): boolean {\n if (!this.action) return false;\n \n if (this.action.kind === 'editInsert') {\n return Math.abs(this.action.position[0] - cursorLine) <= 1;\n }\n \n if (this.action.kind === 'editReplace') {\n return cursorLine >= this.action.range.start[0] - 1 && \n cursorLine <= this.action.range.end[0] + 1;\n }\n \n return false;\n }\n}\n\n",typescript,tab +285,582815,"TERMINAL",0,0,"\r 0%| | 0/23 [00:00<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... 'and will be removed in a future version. Please use \'/model_info\' instead.\n[2026-01-05 15:54:32] INFO: 127.0.0.1:41114 - ""GET /get_model_info HTTP/1.1"" 200 OK\n[2026-01-05 15:54:32] Receive: obj=GenerateReqInput(validation_time=None, received_time=None, received_time_perf=None, rid=\'38fb5799baf646329e6cd84e75c92c27\', http_worker_ipc=None, text=\'The capital city of France is\', input_ids=None, input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={\'temperature\': 0, \'max_new_tokens\': 8}, return_logprob=False, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=False, stream=False, log_metrics=True, return_hidden_states=False, modalities=None, session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, backgroun\n... [truncated]\n/nothink<|assistant|>\n', input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 19511, 90473, 369, 8822, 24983, 33562, 624, 257, 220, 21, 197, 353, 36097, 2480, 2213, 979, 68108, 916, 59341, 15010, 11288, 11, 4992, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 15807, 2512, 34209, 5179, 5169, 55008, 3839, 1975, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 17086, 2460, 25, 1372, 760, 845, 284, 845, 280, 262, 220, 98965, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 103498, 4, 45225, 760, 220, 100235, 14, 101655, 508, 100441, 25, 99951, 27, 98503, 25, 101655, 11, 220, 220, 18, 13, 103992, 82, 14, 275, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 17, 11, 99241, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 17, 21864, 314, 5586, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 19511, 90473, 369, 8822, 24983, 33562, 624, 257, 220, 21, 197, 353, 36097, 2480, 2213, 979, 68108, 916, 59341, 15010, 11288, 11, 4992, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 15807, 2512, 34209, 5179, 5169, 55008, 3839, 1975, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 17086, 2460, 25, 1372, 760, 845, 284, 845, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482] ... [11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99618, 20, 23, 102626, 20, 99082, 99698, 24, 101562, 24, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 22, 16, 13, 101663, 24, 19, 101411, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100899, 21, 13, 101478, 17, 18, 100067, 20, 99366, 11, 9276, 1131, 3235, 100286, 65, 19, 67, 18, 66, 98668, 65, 19, 68, 20, 69, 23, 65, 24, 65, 18, 631, 99619, 34285, 17, 65, 99146, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 46978, 3353, 455, 5047, 3109, 6, 374, 31457, 323, 686, 387, 6963, 304, 264, 3853, 2319, 13, 5209, 990, 3353, 2528, 3109, 6, 4518, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 30649, 25, 257, 220, 115925, 13, 15, 13, 15, 13, 16, 25, 19, 98965, 99367, 481, 330, 3806, 608, 455, 5047, 3109, 10125, 14, 16, 13, 16, 1, 220, 98867, 10397, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 5856, 11, 3949, 3009, 5856, 11, 3949, 3009, 76167, 5856, 11, 9276, 1131, 100919, 10793, 102486, 24, 24, 65, 2577, 21, 101562, 18, 100104, 68, 21, 4385, 104029, 68, 100899, 66, 103825, 66, 99951, 516, 1758, 40163, 83114, 5856, 11, 1467, 1131, 785, 6722, 3283, 315, 9621, 374, 516, 1946, 8076, 5856, 11, 1946, 23939, 82, 5856, 11, 2168, 1769, 5856, 11, 2766, 1769, 5856, 11, 7699, 1769, 5856, 11, 24816, 6745, 12848, 34388, 1210, 220, 15, 11, 364, 2810, 5921, 28735, 1210, 220, 23, 2137, 470, 5224, 19861, 5608, 11, 1487, 19861, 4906, 6043, 10824, 16, 11, 1909, 5224, 776, 1279, 4273, 28, 15, 11, 3950, 8076, 5224, 19861, 5856, 11, 470, 4326, 1243, 5224, 776, 1279, 5608, 11, 4269, 5608, 11, 1487, 37490, 3618, 11, 470, 26653, 22911, 5608, 11, 13220, 1361, 5856, 11, 3797, 6745, 5856, 11, 326, 6215, 2638, 5856, 11, 326, 6215, 842, 5856, 11, 2526, 5224, 275, 49805, 5856, 11, 26838, 12842, 5856, 11, 26838, 8715, 5856, 11, 26838, 24927, 5856, 11, 26838, 14435, 3097, 5856, 11, 16876, 54821, 2368, 5856, 11, 32559, 5608, 11, 821, 60201, 20375, 5856, 11, 1182, 70, 581, 77, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '13,23p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 99619, 79, 1248, 73022, 151336], 'meta_info': {'id': '2727d90add2c4b79a8f9066792381927', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 9836, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-8.11782301752828e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.13528956472873688, 4616, 'cat'), (-0.0009533389820717275, 481, ' -'), (0.0, 77, 'n'), (-2.7418097943154862e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-5.245195097813848e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.003493992844596505, 13428, '/src'), (-0.002355421893298626, 14, '/'), (-0.06695890426635742, 27082, 'preview'), (-0.9591017961502075, 14, '/'), (-0.13149096071720123, 5057, 'inline'), (-7.748573807475623e-06, 5179, 'Provider'), (-1.1920928244535389e-07, 21239, '.ts'), (-0.000226472009671852, 760, ' |'), (-1.5139465176616795e-05, 10918, ' sed'), (0.0, 481, ' -'), (-1.5735502529423684e-05, 77, 'n'), (-1.1920928244535389e-07, 364, "" '""), (-1.4733916521072388, 99366, '13'), (-3.576278118089249e-07, 11, ','), (-1.2152299880981445, 99619, '23'), (0.0, 79, 'p'), (0.0, 1248, ""'\n""), (-0.0005529781919904053, 73022, '```'), (-0.0003868784988299012, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 616, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 12.905144691467285, 'response_sent_to_client_ts': 1767624885.6246538}}\r\n[2026-01-05 15:54:45] INFO: 10.86.2.252:48684 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +306,599737,"src/preview/inlineProvider.ts",251,0,"",typescript,selection_command +307,600061,"TERMINAL",0,0,"[2026-01-05 15:54:46] Receive: obj=GenerateReqInput(validation_time=2.385815605521202e-05, received_time=1767624886.7673194, received_time_perf=2479771.440245113, rid='e636e1323f6244548eae3c384491da5d', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 257, 220, 17, 21864, 314, 5586, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 19511, 90473, 369, 8822, 24983, 33562, 624, 257, 220, 21, 197, 353, 36097, 2480, 2213, 979, 68108, 916, 59341, 15010, 11288, 11, 4992, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 15807, 2512, 34209, 5179, 5169, 55008, 3839, 1975, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 17086, 2460, 25, 1372, 760, 845, 284, 845, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 102114, 4, 45225, 760, 220, 100632, 14, 101655, 508, 100441, 25, 101175, 27, 98503, 25, 100461, 11, 220, 220, 19, 13, 98965, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 102626, 4, 45225, 760, 220, 101140, 14, 101655, 508, 100441, 25, 100632, 27, 98503, 25, 101961, 11, 220, 220, 19, 13, 100104, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 104340, 4, 45225, 760, 220, 100919, 14, 101655, 508, 100441, 25, 102340, 27, 98503, 25, 100919, 11, 220, 220, 19, 13, 100702, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 104127, 4, 45225, 760, 220, 101294, 14, 101655, 508, 100441, 25, 101562, 27, 98503, 25, 100235, 11, 220, 220, 19, 13, 101474, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 102284, 4, 45225, 760, 220, 99698, 14, 101655, 508, 100441, 25, 99200, 27, 98503, 25, 100557, 11, 220, 220, 19, 13, 102721, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 103878, 4, 45225, 760, 220, 102340, 14, 101655, 508, 100441, 25, 101130, 27, 98503, 25, 99951, 11, 220, 220, 19, 13, 102501, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 103502, 4, 45225, 760, 220, 101961, 14, 101655, 508, 100441, 25, 102486, 27, 98503, 25, 98729, 11] ... [99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 46978, 3353, 455, 5047, 3109, 6, 374, 31457, 323, 686, 387, 6963, 304, 264, 3853, 2319, 13, 5209, 990, 3353, 2528, 3109, 6, 4518, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 30649, 25, 257, 220, 115925, 13, 15, 13, 15, 13, 16, 25, 19, 98965, 99367, 481, 330, 3806, 608, 455, 5047, 3109, 10125, 14, 16, 13, 16, 1, 220, 98867, 10397, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 5856, 11, 3949, 3009, 5856, 11, 3949, 3009, 76167, 5856, 11, 9276, 1131, 100919, 10793, 102486, 24, 24, 65, 2577, 21, 101562, 18, 100104, 68, 21, 4385, 104029, 68, 100899, 66, 103825, 66, 99951, 516, 1758, 40163, 83114, 5856, 11, 1467, 1131, 785, 6722, 3283, 315, 9621, 374, 516, 1946, 8076, 5856, 11, 1946, 23939, 82, 5856, 11, 2168, 1769, 5856, 11, 2766, 1769, 5856, 11, 7699, 1769, 5856, 11, 24816, 6745, 12848, 34388, 1210, 220, 15, 11, 364, 2810, 5921, 28735, 1210, 220, 23, 2137, 470, 5224, 19861, 5608, 11, 1487, 19861, 4906, 6043, 10824, 16, 11, 1909, 5224, 776, 1279, 4273, 28, 15, 11, 3950, 8076, 5224, 19861, 5856, 11, 470, 4326, 1243, 5224, 776, 1279, 5608, 11, 4269, 5608, 11, 1487, 37490, 3618, 11, 470, 26653, 22911, 5608, 11, 13220, 1361, 5856, 11, 3797, 6745, 5856, 11, 326, 6215, 2638, 5856, 11, 326, 6215, 842, 5856, 11, 2526, 5224, 275, 49805, 5856, 11, 26838, 12842, 5856, 11, 26838, 8715, 5856, 11, 26838, 24927, 5856, 11, 26838, 14435, 3097, 5856, 11, 16876, 54821, 2368, 5856, 11, 32559, 5608, 11, 821, 60201, 20375, 5856, 11, 1182, 70, 581, 77, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100919, 19, 21, 100067, 99698, 18, 24, 100919, 100104, 18, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 22, 17, 13, 22, 98729, 19, 101175, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100899, 22, 13, 101294, 17, 100235, 22, 22, 11, 9276, 1131, 99951, 17, 22, 67, 100067, 718, 17, 66, 19, 65, 102626, 64, 23, 69, 100067, 21, 21, 102626, 99619, 23, 122444, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:54:46 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 579, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +308,600329,"TERMINAL",0,0,"[2026-01-05 15:54:47 TP0] Prefill batch, #new-seq: 1, #new-token: 1497, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +309,601120,"TERMINAL",0,0,"[2026-01-05 15:54:47] Finish: obj=GenerateReqInput(validation_time=2.385815605521202e-05, received_time=1767624886.7673194, received_time_perf=2479771.440245113, rid='e636e1323f6244548eae3c384491da5d', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.3846904039382935e-05, received_time=1767624872.7194324, received_time_perf=2479757.3923577, rid='2727d90add2c4b79a8f9066792381927', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, \n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 257, 220, 17, 21864, 314, 5586, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 19511, 90473, 369, 8822, 24983, 33562, 624, 257, 220, 21, 197, 353, 36097, 2480, 2213, 979, 68108, 916, 59341, 15010, 11288, 11, 4992, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 15807, 2512, 34209, 5179, 5169, 55008, 3839, 1975, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 17086, 2460, 25, 1372, 760, 845, 284, 845, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 102114, 4, 45225, 760, 220, 100632, 14, 101655, 508, 100441, 25, 101175, 27, 98503, 25, 100461, 11, 220, 220, 19, 13, 98965, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 102626, 4, 45225, 760, 220, 101140, 14, 101655, 508, 100441, 25, 100632, 27, 98503, 25, 101961, 11, 220, 220, 19, 13, 100104, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 104340, 4, 45225, 760, 220, 100919, 14, 101655, 508, 100441, 25, 102340, 27, 98503, 25, 100919, 11, 220, 220, 19, 13, 100702, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 104127, 4, 45225, 760, 220, 101294, 14, 101655, 508, 100441, 25, 101562, 27, 98503, 25, 100235, 11, 220, 220, 19, 13, 101474, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 102284, 4, 45225, 760, 220, 99698, 14, 101655, 508, 100441, 25, 99200, 27, 98503, 25, 100557, 11, 220, 220, 19, 13, 102721, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 103878, 4, 45225, 760, 220, 102340, 14, 101655, 508, 100441, 25, 101130, 27, 98503, 25, 99951, 11, 220, 220, 19, 13, 102501, 82, 14, 275, 921, 8577, 5536, 295, 26444, 29186, 73534, 25, 220, 220, 103502, 4, 45225, 760, 220, 101961, 14, 101655, 508, 100441, 25, 102486, 27, 98503, 25, 98729, 11] ... [99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 46978, 3353, 455, 5047, 3109, 6, 374, 31457, 323, 686, 387, 6963, 304, 264, 3853, 2319, 13, 5209, 990, 3353, 2528, 3109, 6, 4518, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 30649, 25, 257, 220, 115925, 13, 15, 13, 15, 13, 16, 25, 19, 98965, 99367, 481, 330, 3806, 608, 455, 5047, 3109, 10125, 14, 16, 13, 16, 1, 220, 98867, 10397, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 5856, 11, 3949, 3009, 5856, 11, 3949, 3009, 76167, 5856, 11, 9276, 1131, 100919, 10793, 102486, 24, 24, 65, 2577, 21, 101562, 18, 100104, 68, 21, 4385, 104029, 68, 100899, 66, 103825, 66, 99951, 516, 1758, 40163, 83114, 5856, 11, 1467, 1131, 785, 6722, 3283, 315, 9621, 374, 516, 1946, 8076, 5856, 11, 1946, 23939, 82, 5856, 11, 2168, 1769, 5856, 11, 2766, 1769, 5856, 11, 7699, 1769, 5856, 11, 24816, 6745, 12848, 34388, 1210, 220, 15, 11, 364, 2810, 5921, 28735, 1210, 220, 23, 2137, 470, 5224, 19861, 5608, 11, 1487, 19861, 4906, 6043, 10824, 16, 11, 1909, 5224, 776, 1279, 4273, 28, 15, 11, 3950, 8076, 5224, 19861, 5856, 11, 470, 4326, 1243, 5224, 776, 1279, 5608, 11, 4269, 5608, 11, 1487, 37490, 3618, 11, 470, 26653, 22911, 5608, 11, 13220, 1361, 5856, 11, 3797, 6745, 5856, 11, 326, 6215, 2638, 5856, 11, 326, 6215, 842, 5856, 11, 2526, 5224, 275, 49805, 5856, 11, 26838, 12842, 5856, 11, 26838, 8715, 5856, 11, 26838, 24927, 5856, 11, 26838, 14435, 3097, 5856, 11, 16876, 54821, 2368, 5856, 11, 32559, 5608, 11, 821, 60201, 20375, 5856, 11, 1182, 70, 581, 77, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100919, 19, 21, 100067, 99698, 18, 24, 100919, 100104, 18, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 22, 17, 13, 22, 98729, 19, 101175, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100899, 22, 13, 101294, 17, 100235, 22, 22, 11, 9276, 1131, 99951, 17, 22, 67, 100067, 718, 17, 66, 19, 65, 102626, 64, 23, 69, 100067, 21, 21, 102626, 99619, 23, 122444, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/index.ts | sed -n '1,15p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 16, 11, 99082, 79, 1248, 73022, 151336], 'meta_info': {'id': 'e636e1323f6244548eae3c384491da5d', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 10268, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-4.756337511935271e-05, 73022, '```'), (0.0, 45937, 'bash'), (-5.960462772236497e-07, 198, '\n'), (-0.18812669813632965, 4616, 'cat'), (-0.01953856088221073, 481, ' -'), (-2.264974000354414e-06, 77, 'n'), (-1.2636104656849056e-05, 608, ' /'), (-1.1920922133867862e-06, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-6.05564855504781e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-8.344646857949556e-07, 79888, '-extension'), (-0.0035770260728895664, 13428, '/src'), (-0.0066371941938996315, 14, '/'), (-0.21595770120620728, 27082, 'preview'), (-1.3029091358184814, 9019, '/index'), (-2.3841855067985307e-07, 21239, '.ts'), (-0.0009459549910388887, 760, ' |'), (-0.0005965837044641376, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-1.0728830375228426e-06, 364, "" '""), (-0.9073053002357483, 16, '1'), (-3.576278118089249e-07, 11, ','), (-1.8862333297729492, 99082, '15'), (-2.3841855067985307e-07, 79, 'p'), (0.0, 1248, ""'\n""), (-0.00015841660206206143, 73022, '```'), (-6.318072337307967e-06, 151336, '<|user|>')], 'completion_tokens': 38, 'cached_tokens': 579, 'spec_accept_rate': 0.8181818181818182, 'spec_accept_length': 3.4545454545454546, 'spec_verify_ct': 11, 'spec_accept_token_num': 27, 'spec_draft_token_num': 33, 'e2e_latency': 1.040208339691162, 'response_sent_to_client_ts': 1767624887.807604}}\r\n[2026-01-05 15:54:47] INFO: 10.86.2.252:34906 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +310,601723,"src/preview/inlineProvider.ts",338,0,"",typescript,selection_command +311,602007,"TERMINAL",0,0,"[2026-01-05 15:54:48] Receive: obj=GenerateReqInput(validation_time=2.6152003556489944e-05, received_time=1767624888.7539015, received_time_perf=2479773.426826926, rid='eebef0858f5b472dbbaec8ff89391b5c', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 101175, 37715, 12969, 28, 22, 13, 99317, 18831, 1648, 220, 220, 99366, 4, 91, 51338, 51338, 51338, 15187, 10412, 237, 5238, 5180, 760, 220, 18, 14, 99619, 508, 98503, 25, 99619, 27, 100286, 25, 101562, 11, 220, 220, 20, 13, 101135, 82, 14, 275, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99064, 37715, 12969, 28, 22, 13, 99367, 18831, 1648, 220, 220, 99419, 4, 91, 51338, 51338, 51338, 51338, 15187, 10412, 234, 5238, 4293, 760, 220, 19, 14, 99619, 508, 98503, 25, 99619, 27, 100286, 25, 100539, 11, 220, 220, 18, 13, 101474, 82, 14, 275, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99317, 37715, 12969, 28, 22, 13, 100441, 18831, 1648, 220, 220, 100933, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 15187, 10412, 232, 6374, 760, 220, 98965, 14, 99619, 508, 98503, 25, 99446, 27, 98503, 25, 100002, 11, 220, 220, 17, 13, 101294, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 23, 37715, 12969, 28, 21, 13, 103205, 18831, 1648, 220, 220, 101411, 4, 91, 51338] ... [220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100919, 20, 23, 99082, 99618, 20, 20, 99146, 115937, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 23, 21, 13, 102269, 22, 18, 98729, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 16, 13, 19, 99698, 127399, 114240, 11, 9276, 1131, 68, 21, 100632, 68, 118843, 18, 69, 21, 99590, 100461, 19, 23, 68, 5918, 18, 66, 100919, 19, 101474, 16, 3235, 20, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:54:48 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 584, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +312,602353,"TERMINAL",0,0,"[2026-01-05 15:54:49 TP0] Prefill batch, #new-seq: 1, #new-token: 1550, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +313,602549,"TERMINAL",0,0,"[2026-01-05 15:54:49] Finish: obj=GenerateReqInput(validation_time=2.6152003556489944e-05, received_time=1767624888.7539015, received_time_perf=2479773.426826926, rid='eebef0858f5b472dbbaec8ff89391b5c', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=2.385815605521202e-05, received_time=1767624886.7673194, received_time_perf=2479771.440245113, rid='e636e1323f6244548eae3c384491da5d', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 101175, 37715, 12969, 28, 22, 13, 99317, 18831, 1648, 220, 220, 99366, 4, 91, 51338, 51338, 51338, 15187, 10412, 237, 5238, 5180, 760, 220, 18, 14, 99619, 508, 98503, 25, 99619, 27, 100286, 25, 101562, 11, 220, 220, 20, 13, 101135, 82, 14, 275, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99064, 37715, 12969, 28, 22, 13, 99367, 18831, 1648, 220, 220, 99419, 4, 91, 51338, 51338, 51338, 51338, 15187, 10412, 234, 5238, 4293, 760, 220, 19, 14, 99619, 508, 98503, 25, 99619, 27, 100286, 25, 100539, 11, 220, 220, 18, 13, 101474, 82, 14, 275, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99317, 37715, 12969, 28, 22, 13, 100441, 18831, 1648, 220, 220, 100933, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 15187, 10412, 232, 6374, 760, 220, 98965, 14, 99619, 508, 98503, 25, 99446, 27, 98503, 25, 100002, 11, 220, 220, 17, 13, 101294, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 23, 37715, 12969, 28, 21, 13, 103205, 18831, 1648, 220, 220, 101411, 4, 91, 51338] ... [220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100919, 20, 23, 99082, 99618, 20, 20, 99146, 115937, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 23, 21, 13, 102269, 22, 18, 98729, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 16, 13, 19, 99698, 127399, 114240, 11, 9276, 1131, 68, 21, 100632, 68, 118843, 18, 69, 21, 99590, 100461, 19, 23, 68, 5918, 18, 66, 100919, 19, 101474, 16, 3235, 20, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/hoverProvider.ts | sed -n '1,30p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 99064, 79, 1248, 73022, 151336], 'meta_info': {'id': 'eebef0858f5b472dbbaec8ff89391b5c', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 10326, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-1.0847986231965479e-05, 73022, '```'), (0.0, 45937, 'bash'), (-3.576278118089249e-07, 198, '\n'), (-0.1467658132314682, 4616, 'cat'), (-0.0233097355812788, 481, ' -'), (-6.318072337307967e-06, 77, 'n'), (-4.410646579344757e-05, 608, ' /'), (-1.1086402082582936e-05, 5117, 'home'), (-2.3841855067985307e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-9.238292841473594e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-2.145764938177308e-06, 79888, '-extension'), (-0.0053452192805707455, 13428, '/src'), (-0.005099741276353598, 14, '/'), (-0.09587249159812927, 27082, 'preview'), (-1.8349952697753906, 7530, '/h'), (0.0, 1975, 'over'), (-0.00012730741582345217, 5179, 'Provider'), (-3.576278118089249e-07, 21239, '.ts'), (-0.055846650153398514, 760, ' |'), (-0.00013422065239865333, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (0.0, 364, "" '""), (-0.5513038039207458, 16, '1'), (-4.768370445162873e-07, 11, ','), (-2.1257781982421875, 99064, '30'), (-1.5497195136049413e-06, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-9.274052717955783e-05, 73022, '```'), (-3.6954811548639555e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 584, 'spec_accept_rate': 0.8055555555555556, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 29, 'spec_draft_token_num': 36, 'e2e_latency': 0.5759742259979248, 'response_sent_to_client_ts': 1767624889.3299422}}\r\n[2026-01-05 15:54:49] INFO: 10.86.2.252:34910 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +314,606437,"src/preview/hoverProvider.ts",0,0,"",typescript,tab +315,606571,"src/preview/hoverProvider.ts",454,0,"",typescript,selection_command +316,606646,"TERMINAL",0,0,"[2026-01-05 15:54:53] Receive: obj=GenerateReqInput(validation_time=1.810118556022644e-05, received_time=1767624893.3196416, received_time_perf=2479777.992567036, rid='f9f114c35b784b4fa3bfd1c8a6113a0d', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99317, 37715, 12969, 28, 22, 13, 100441, 18831, 1648, 220, 220, 100933, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 15187, 10412, 232, 6374, 760, 220, 98965, 14, 99619, 508, 98503, 25, 99446, 27, 98503, 25, 100002, 11, 220, 220, 17, 13, 101294, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 23, 37715, 12969, 28, 21, 13, 103205, 18831, 1648, 220, 220, 101411, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 10412, 232, 9695, 760, 220, 99082, 14, 99619, 508, 98503, 25, 99916, 27, 98503, 25, 100286, 11, 220, 220, 19, 13, 100772, 275, 2687, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 198, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 21864, 314, 63561, 10546, 11, 86102, 335, 504, 2756, 31951, 804, 1010, 257, 220, 19, 21864, 314, 97156, 47, 23891, 25246, 5179, 335, 504, 2756, 5057, 5179, 1010, 257, 220, 20, 21864, 314, 15807, 2512, 34209, 5179, 335, 504, 2756, 17558, 5179, 1010, 257, 220, 21, 21864, 314, 1473, 31886, 2512, 24248, 36761, 11, 17231, 36761, 2077, 335, 504, 2756, 27669, 36761, 1010, 257, 220, 22, 21864, 314, 12558, 1912, 52279, 74313, 11, 702, 13771, 908, 11, 23578, 7339, 37343, 68070, 11, 23578, 57220, 13771, 290, 335, 504, 4927, 6031, 3446, 3092, 1010, 257, 220, 23, 1572, 257, 220, 24, 197, 322, 1032, 65347, 4494, 198, 262, 220, 98668, 59028, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 11, 56390, 335, 504, 2756, 9239, 1010, 262, 220, 98965, 59028, 314, 17231, 36761, 2077, 335, 504, 2756, 27669, 36761, 1010, 262, 220, 98886, 1572, 262, 220, 99366, 197, 1747, 262, 220, 99367, 197, 353, 2363, 1134, 279, 17105, 3689, 369, 11821, 6168, 624, 262, 220, 99082, 197, 353, 11322, 4343, 1459, 369, 678, 17105, 20852, 624, 262, 220, 99317, 197, 735, 262, 220, 99419, 59028, 536, 31110, 2043, 341, 262, 220, 99243, 2760, 869, 20187, 10546, 25, 63561, 10546, 280, 262, 220, 98729, 2760, 869, 7381, 5179, 25, 97156, 47, 23891, 25246, 5179, 280, 262, 220, 98360, 2760, 869, 19511, 5179, 25, 15807, 2512, 34209, 5179, 280, 262] ... [220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 21, 99082, 98867, 100235, 20, 21, 100933, 100809, 19, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 23, 23, 13, 100899, 18, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 18, 13, 19, 99916, 23, 99916, 24, 99916, 11, 9276, 1131, 2127, 57253, 100562, 20, 23, 69, 20, 65, 101655, 17, 1999, 4645, 757, 23, 542, 103502, 18, 104327, 65, 20, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:54:53 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 590, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +317,606719,"TERMINAL",0,0,"[2026-01-05 15:54:53] Receive: obj=GenerateReqInput(validation_time=1.5246216207742691e-05, received_time=1767624893.4695299, received_time_perf=2479778.14245516, rid='c8b7d5e9597d447bb428865958b11fce', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99317, 37715, 12969, 28, 22, 13, 100441, 18831, 1648, 220, 220, 100933, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 15187, 10412, 232, 6374, 760, 220, 98965, 14, 99619, 508, 98503, 25, 99446, 27, 98503, 25, 100002, 11, 220, 220, 17, 13, 101294, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 23, 37715, 12969, 28, 21, 13, 103205, 18831, 1648, 220, 220, 101411, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 10412, 232, 9695, 760, 220, 99082, 14, 99619, 508, 98503, 25, 99916, 27, 98503, 25, 100286, 11, 220, 220, 19, 13, 100772, 275, 2687, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 198, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 21864, 314, 63561, 10546, 11, 86102, 335, 504, 2756, 31951, 804, 1010, 257, 220, 19, 21864, 314, 97156, 47, 23891, 25246, 5179, 335, 504, 2756, 5057, 5179, 1010, 257, 220, 20, 21864, 314, 15807, 2512, 34209, 5179, 335, 504, 2756, 17558, 5179, 1010, 257, 220, 21, 21864, 314, 1473, 31886, 2512, 24248, 36761, 11, 17231, 36761, 2077, 335, 504, 2756, 27669, 36761, 1010, 257, 220, 22, 21864, 314, 12558, 1912, 52279, 74313, 11, 702, 13771, 908, 11, 23578, 7339, 37343, 68070, 11, 23578, 57220, 13771, 290, 335, 504, 4927, 6031, 3446, 3092, 1010, 257, 220, 23, 1572, 257, 220, 24, 197, 322, 1032, 65347, 4494, 198, 262, 220, 98668, 59028, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 11, 56390, 335, 504, 2756, 9239, 1010, 262, 220, 98965, 59028, 314, 17231, 36761, 2077, 335, 504, 2756, 27669, 36761, 1010, 262, 220, 98886, 1572, 262, 220, 99366, 197, 1747, 262, 220, 99367, 197, 353, 2363, 1134, 279, 17105, 3689, 369, 11821, 6168, 624, 262, 220, 99082, 197, 353, 11322, 4343, 1459, 369, 678, 17105, 20852, 624, 262, 220, 99317, 197, 735, 262, 220, 99419, 59028, 536, 31110, 2043, 341, 262, 220, 99243, 2760, 869, 20187, 10546, 25, 63561, 10546, 280, 262, 220, 98729, 2760, 869, 7381, 5179, 25, 97156, 47, 23891, 25246, 5179, 280, 262, 220, 98360, 2760, 869, 19511, 5179, 25, 15807, 2512, 34209, 5179, 280, 262] ... [220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 21, 99082, 98867, 100235, 20, 21, 100933, 100809, 19, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 23, 23, 13, 100899, 18, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 18, 13, 19, 99916, 23, 99916, 24, 99916, 11, 9276, 1131, 2127, 57253, 100562, 20, 23, 69, 20, 65, 101655, 17, 1999, 4645, 757, 23, 542, 103502, 18, 104327, 65, 20, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +318,606892,"TERMINAL",0,0,"[2026-01-05 15:54:53] Receive: obj=GenerateReqInput(validation_time=3.424100577831268e-05, received_time=1767624893.5998654, received_time_perf=2479778.272790539, rid='530196c5e7d64ea68db71b49ad81a0e9', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102721, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 98668, 112891, 101130, 21, 15, 99241, 21, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 18, 13, 18, 98729, 21, 102340, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 22, 13, 100809, 17, 101917, 22, 100441, 21, 11, 9276, 1131, 69, 24, 69, 114365, 66, 100235, 65, 100928, 19, 65, 19, 3632, 18, 80999, 16, 66, 23, 64, 21, 98965, 18, 64, 15, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +319,606963,"TERMINAL",0,0,"[2026-01-05 15:54:53 TP0] Prefill batch, #new-seq: 3, #new-token: 8192, #cached-token: 9388, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +320,607160,"TERMINAL",0,0,"[2026-01-05 15:54:53 TP0] Prefill batch, #new-seq: 1, #new-token: 4823, #cached-token: 0, token usage: 0.03, #running-req: 2, #queue-req: 0, \r\n",,terminal_output +321,607682,"TERMINAL",0,0,"[2026-01-05 15:54:54 TP0] Decode batch, #running-req: 3, #token: 19776, token usage: 0.04, accept len: 3.30, accept rate: 0.82, cuda graph: True, gen throughput (token/s): 7.45, #queue-req: 0, \r\n",,terminal_output +322,607812,"TERMINAL",0,0,"[2026-01-05 15:54:54] Finish: obj=GenerateReqInput(validation_time=3.424100577831268e-05, received_time=1767624893.5998654, received_time_perf=2479778.272790539, rid='530196c5e7d64ea68db71b49ad81a0e9', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.810118556022644e-05, received_time=1767624893.3196416, received_time_perf=2479777.992567036, rid='f9f114c35b784b4fa3bfd1c8a6113a0d', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102721, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 98668, 112891, 101130, 21, 15, 99241, 21, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 18, 13, 18, 98729, 21, 102340, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 22, 13, 100809, 17, 101917, 22, 100441, 21, 11, 9276, 1131, 69, 24, 69, 114365, 66, 100235, 65, 100928, 19, 65, 19, 3632, 18, 80999, 16, 66, 23, 64, 21, 98965, 18, 64, 15, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/index.ts | sed -n '93,113p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336], 'meta_info': {'id': '530196c5e7d64ea68db71b49ad81a0e9', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 9411, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-7.354942499659956e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.019839072600007057, 4616, 'cat'), (-0.0005544078885577619, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-3.576278118089249e-07, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.4305104514278355e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0011970981722697616, 13428, '/src'), (-0.00021908267808612436, 14, '/'), (-0.0002469711471349001, 27082, 'preview'), (-1.2297084331512451, 9019, '/index'), (-2.3841855067985307e-07, 21239, '.ts'), (-0.000667468411847949, 760, ' |'), (-8.4638240878121e-06, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.13195358216762543, 103946, '93'), (0.0, 11, ','), (-7.486063259420916e-05, 114240, '113'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.000226472009671852, 73022, '```'), (-9.274052717955783e-05, 151336, '<|user|>')], 'completion_tokens': 38, 'cached_tokens': 607, 'spec_accept_rate': 0.9333333333333333, 'spec_accept_length': 3.8, 'spec_verify_ct': 10, 'spec_accept_token_num': 28, 'spec_draft_token_num': 30, 'e2e_latency': 0.9227092266082764, 'response_sent_to_client_ts': 1767624894.522666}}\r\n[2026-01-05 15:54:54] INFO: 10.86.2.252:34942 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n[2026-01-05 15:54:54] Finish: obj=GenerateReqInput(validation_time=1.810118556022644e-05, received_time=1767624893.3196416, received_time_perf=2479777.992567036, rid='f9f114c35b784b4fa3bfd1c8a6113a0d', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""8588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/hoverProvider.ts | sed -n '13,33p'\n```<|user|>\n\n 13\t * Set the current action and its anchor line for hover detection.\n 14\t */\n 15\t setAction(action: Action, anchorLine: number): void {\n 16\t this.action = action;\n 17\t this.anchorLine = anchorLine;\n 18\t }\n 19\t\n 20\t /**\n 21\t * Clear the current action.\n 22\t */\n 23\t clearAction(): void {\n 24\t this.action = null;\n 25\t this.anchorLine = null;\n 26\t }\n 27\t\n 28\t /**\n 29\t * Provide hover content when user hovers over the indicator area.\n 30\t */\n 31\t provideHover(\n 32\t document: vscode.TextDocument,\n 33\t position: vscode.Position,\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99317, 37715, 12969, 28, 22, 13, 100441, 18831, 1648, 220, 220, 100933, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 15187, 10412, 232, 6374, 760, 220, 98965, 14, 99619, 508, 98503, 25, 99446, 27, 98503, 25, 100002, 11, 220, 220, 17, 13, 101294, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 23, 37715, 12969, 28, 21, 13, 103205, 18831, 1648, 220, 220, 101411, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 10412, 232, 9695, 760, 220, 99082, 14, 99619, 508, 98503, 25, 99916, 27, 98503, 25, 100286, 11, 220, 220, 19, 13, 100772, 275, 2687, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 198, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 21864, 314, 63561, 10546, 11, 86102, 335, 504, 2756, 31951, 804, 1010, 257, 220, 19, 21864, 314, 97156, 47, 23891, 25246, 5179, 335, 504, 2756, 5057, 5179, 1010, 257, 220, 20, 21864, 314, 15807, 2512, 34209, 5179, 335, 504, 2756, 17558, 5179, 1010, 257, 220, 21, 21864, 314, 1473, 31886, 2512, 24248, 36761, 11, 17231, 36761, 2077, 335, 504, 2756, 27669, 36761, 1010, 257, 220, 22, 21864, 314, 12558, 1912, 52279, 74313, 11, 702, 13771, 908, 11, 23578, 7339, 37343, 68070, 11, 23578, 57220, 13771, 290, 335, 504, 4927, 6031, 3446, 3092, 1010, 257, 220, 23, 1572, 257, 220, 24, 197, 322, 1032, 65347, 4494, 198, 262, 220, 98668, 59028, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 11, 56390, 335, 504, 2756, 9239, 1010, 262, 220, 98965, 59028, 314, 17231, 36761, 2077, 335, 504, 2756, 27669, 36761, 1010, 262, 220, 98886, 1572, 262, 220, 99366, 197, 1747, 262, 220, 99367, 197, 353, 2363, 1134, 279, 17105, 3689, 369, 11821, 6168, 624, 262, 220, 99082, 197, 353, 11322, 4343, 1459, 369, 678, 17105, 20852, 624, 262, 220, 99317, 197, 735, 262, 220, 99419, 59028, 536, 31110, 2043, 341, 262, 220, 99243, 2760, 869, 20187, 10546, 25, 63561, 10546, 280, 262, 220, 98729, 2760, 869, 7381, 5179, 25, 97156, 47, 23891, 25246, 5179, 280, 262, 220, 98360, 2760, 869, 19511, 5179, 25, 15807, 2512, 34209, 5179, 280, 262] ... [220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 21, 99082, 98867, 100235, 20, 21, 100933, 100809, 19, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 23, 23, 13, 100899, 18, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 18, 13, 19, 99916, 23, 99916, 24, 99916, 11, 9276, 1131, 2127, 57253, 100562, 20, 23, 69, 20, 65, 101655, 17, 1999, 4645, 757, 23, 542, 103502, 18, 104327, 65, 20, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/index.ts | sed -n '200,220p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 98867, 11, 108499, 79, 1248, 73022, 151336], 'meta_info': {'id': 'f9f114c35b784b4fa3bfd1c8a6113a0d', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 10887, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-2.098061486321967e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.02431314066052437, 4616, 'cat'), (-0.0027619570028036833, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-2.7418097943154862e-06, 608, ' /'), (-3.576278118089249e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-5.8412379075889476e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-2.3841855067985307e-07, 79888, '-extension'), (-0.0016862234333530068, 13428, '/src'), (-0.002520481590181589, 14, '/'), (-0.00150781380943954, 27082, 'preview'), (-1.0988423824310303, 9019, '/index'), (-1.1920922133867862e-06, 21239, '.ts'), (-0.0013546108966693282, 760, ' |'), (-0.00019727191829588264, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-1.6689286894688848e-06, 364, "" '""), (-0.48713618516921997, 98867, '200'), (-7.152555099310121e-07, 11, ','), (-0.025450007990002632, 108499, '220'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-9.274052717955783e-05, 73022, '```'), (-5.960462772236497e-07, 151336, '<|user|>')], 'completion_tokens': 38, 'cached_tokens': 590, 'spec_accept_rate': 0.7222222222222222, 'spec_accept_length': 3.1666666666666665, 'spec_verify_ct': 12, 'spec_accept_token_num': 26, 'spec_draft_token_num': 36, 'e2e_latency': 1.2357442378997803, 'response_sent_to_client_ts': 1767624894.555546}}\r\n[2026-01-05 15:54:54] Finish: obj=GenerateReqInput(validation_time=1.5246216207742691e-05, received_time=1767624893.4695299, received_time_perf=2479778.14245516, rid='c8b7d5e9597d447bb428865958b11fce', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""8588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/hoverProvider.ts | sed -n '13,33p'\n```<|user|>\n\n 13\t * Set the current action and its anchor line for hover detection.\n 14\t */\n 15\t setAction(action: Action, anchorLine: number): void {\n 16\t this.action = action;\n 17\t this.anchorLine = anchorLine;\n 18\t }\n 19\t\n 20\t /**\n 21\t * Clear the current action.\n 22\t */\n 23\t clearAction(): void {\n 24\t this.action = null;\n 25\t this.anchorLine = null;\n 26\t }\n 27\t\n 28\t /**\n 29\t * Provide hover content when user hovers over the indicator area.\n 30\t */\n 31\t provideHover(\n 32\t document: vscode.TextDocument,\n 33\t position: vscode.Position,\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99317, 37715, 12969, 28, 22, 13, 100441, 18831, 1648, 220, 220, 100933, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 15187, 10412, 232, 6374, 760, 220, 98965, 14, 99619, 508, 98503, 25, 99446, 27, 98503, 25, 100002, 11, 220, 220, 17, 13, 101294, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 23, 37715, 12969, 28, 21, 13, 103205, 18831, 1648, 220, 220, 101411, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 10412, 232, 9695, 760, 220, 99082, 14, 99619, 508, 98503, 25, 99916, 27, 98503, 25, 100286, 11, 220, 220, 19, 13, 100772, 275, 2687, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 198, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 21864, 314, 63561, 10546, 11, 86102, 335, 504, 2756, 31951, 804, 1010, 257, 220, 19, 21864, 314, 97156, 47, 23891, 25246, 5179, 335, 504, 2756, 5057, 5179, 1010, 257, 220, 20, 21864, 314, 15807, 2512, 34209, 5179, 335, 504, 2756, 17558, 5179, 1010, 257, 220, 21, 21864, 314, 1473, 31886, 2512, 24248, 36761, 11, 17231, 36761, 2077, 335, 504, 2756, 27669, 36761, 1010, 257, 220, 22, 21864, 314, 12558, 1912, 52279, 74313, 11, 702, 13771, 908, 11, 23578, 7339, 37343, 68070, 11, 23578, 57220, 13771, 290, 335, 504, 4927, 6031, 3446, 3092, 1010, 257, 220, 23, 1572, 257, 220, 24, 197, 322, 1032, 65347, 4494, 198, 262, 220, 98668, 59028, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 11, 56390, 335, 504, 2756, 9239, 1010, 262, 220, 98965, 59028, 314, 17231, 36761, 2077, 335, 504, 2756, 27669, 36761, 1010, 262, 220, 98886, 1572, 262, 220, 99366, 197, 1747, 262, 220, 99367, 197, 353, 2363, 1134, 279, 17105, 3689, 369, 11821, 6168, 624, 262, 220, 99082, 197, 353, 11322, 4343, 1459, 369, 678, 17105, 20852, 624, 262, 220, 99317, 197, 735, 262, 220, 99419, 59028, 536, 31110, 2043, 341, 262, 220, 99243, 2760, 869, 20187, 10546, 25, 63561, 10546, 280, 262, 220, 98729, 2760, 869, 7381, 5179, 25, 97156, 47, 23891, 25246, 5179, 280, 262, 220, 98360, 2760, 869, 19511, 5179, 25, 15807, 2512, 34209, 5179, 280, 262] ... [220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 21, 99082, 98867, 100235, 20, 21, 100933, 100809, 19, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 23, 23, 13, 100899, 18, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 18, 13, 19, 99916, 23, 99916, 24, 99916, 11, 9276, 1131, 2127, 57253, 100562, 20, 23, 69, 20, 65, 101655, 17, 1999, 4645, 757, 23, 542, 103502, 18, 104327, 65, 20, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11\n[... 7418 bytes truncated to respect terminal scrollback settings ...]\n",,terminal_output +323,610935,"src/preview/inlineProvider.ts",0,0,"",typescript,tab +324,611282,"TERMINAL",0,0,"[2026-01-05 15:54:57] Receive: obj=GenerateReqInput(validation_time=2.0763371139764786e-05, received_time=1767624897.9665122, received_time_perf=2479782.639437459, rid='e014fd3062d249cf917c8984ccd31c73', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102721, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 18, 13, 19, 99590, 99457, 20, 102114, 23, 100557, 99916, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 18, 13, 20, 100809, 23, 101411, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 23, 13, 99951, 17, 22, 100067, 20, 101294, 11, 9276, 1131, 20, 99064, 121818, 66, 20, 68, 22, 67, 102636, 12502, 101840, 1999, 103437, 65, 101474, 329, 104340, 64, 15, 68, 24, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98886, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:54:58 TP0] Prefill batch, #new-seq: 1, #new-token: 872, #cached-token: 9406, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +325,611459,"TERMINAL",0,0,"[2026-01-05 15:54:58] Finish: obj=GenerateReqInput(validation_time=2.0763371139764786e-05, received_time=1767624897.9665122, received_time_perf=2479782.639437459, rid='e014fd3062d249cf917c8984ccd31c73', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,12p'\n```<|user|>\n\n 1\timport * as vscode from 'vscode';\n 2\timport { Action, toVscodeRange, toVscodePosition } from './types';\n 3\t\n 4\t/**\n 5\t * Provides inline completion items (ghost text) for code edit actions.\n 6\t * This takes priority over Cursor's hints and works on empty lines.\n 7\t */\n 8\texport class CrowdPilotInlineProvider implements vscode.InlineCompletionItemProvider {\n 9\t private action: Action | null = null;\n 10\t private enabled: boolean = true;\n 11\t\n 12\t /**\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102721, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 18, 13, 19, 99590, 99457, 20, 102114, 23, 100557, 99916, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 18, 13, 20, 100809, 23, 101411, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 23, 13, 99951, 17, 22, 100067, 20, 101294, 11, 9276, 1131, 20, 99064, 121818, 66, 20, 68, 22, 67, 102636, 12502, 101840, 1999, 103437, 65, 101474, 329, 104340, 64, 15, 68, 24, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98886, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/index.ts | sed -n '93,113p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336], 'meta_info': {'id': 'e014fd3062d249cf917c8984ccd31c73', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 10278, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0002703301142901182, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.07394606620073318, 4616, 'cat'), (-0.0016171961324289441, 481, ' -'), (-2.3841855067985307e-07, 77, 'n'), (-1.1920922133867862e-06, 608, ' /'), (-2.3841855067985307e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-5.125986263010418e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.001975491177290678, 13428, '/src'), (-0.00027640812913887203, 14, '/'), (-0.00021050144277978688, 27082, 'preview'), (-0.8974650502204895, 9019, '/index'), (-1.1920928244535389e-07, 21239, '.ts'), (-0.0011449456214904785, 760, ' |'), (-1.645074735279195e-05, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.17496192455291748, 103946, '93'), (0.0, 11, ','), (-7.271502545336261e-05, 114240, '113'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.0002706876548472792, 73022, '```'), (-6.48477507638745e-05, 151336, '<|user|>')], 'completion_tokens': 38, 'cached_tokens': 9406, 'spec_accept_rate': 0.8181818181818182, 'spec_accept_length': 3.4545454545454546, 'spec_verify_ct': 11, 'spec_accept_token_num': 27, 'spec_draft_token_num': 33, 'e2e_latency': 0.2793252468109131, 'response_sent_to_client_ts': 1767624898.2458985}}\r\n[2026-01-05 15:54:58] INFO: 10.86.2.252:55772 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +326,720264,"src/preview/inlineProvider.ts",1641,0,"",typescript,selection_keyboard +327,720558,"TERMINAL",0,0,"[2026-01-05 15:56:47] Receive: obj=GenerateReqInput(validation_time=2.7895905077457428e-05, received_time=1767625007.2952058, received_time_perf=2479891.968131687, rid='38e46419dae544fdbf695b389fc3f3b2', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100372, 21, 100702, 22, 114240, 24, 102269, 19, 100928, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 22, 13, 24, 101478, 124184, 17, 11, 3949, 3009, 76167, 28, 99590, 22, 126189, 17, 13, 21, 101294, 19, 101140, 100461, 24, 11, 9276, 1131, 68, 15, 99367, 6902, 124540, 17, 67, 99590, 24, 9787, 24, 99419, 66, 23, 101663, 19, 95035, 100557, 66, 103388, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100933, 11, 101840, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 13751, 5656, 908, 198, 262, 220, 101917, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 102486, 13056, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 101729, 197, 3374, 262, 220, 102573, 13056, 442, 23177, 3410, 279, 9750, 481, 30530, 6119, 686, 1473, 432, 518, 279, 5656, 2309, 198, 262, 220, 99618, 13056, 442, 576, 19828, 1467, 7951, 518, 279, 2088, 5189, 11, 537, 14302, 518, 8127, 198, 262, 220, 103595, 13056, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 103319, 20789, 419, 12389, 2788, 345, 262, 220, 103302, 20789, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102636, 13056, 1439, 262, 220, 101411, 197, 3374, 262, 220, 101478, 13056, 470, 508, 1203, 935, 262, 220, 102952, 7472, 456, 262, 220, 101840, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:56:47 TP0] Prefill batch, #new-seq: 1, #new-token: 956, #cached-token: 10273, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +328,720799,"TERMINAL",0,0,"[2026-01-05 15:56:47] Finish: obj=GenerateReqInput(validation_time=2.7895905077457428e-05, received_time=1767625007.2952058, received_time_perf=2479891.968131687, rid='38e46419dae544fdbf695b389fc3f3b2', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""<|user|>\n\n 48\t context: vscode.InlineCompletionContext,\n 49\t token: vscode.CancellationToken\n 50\t ): vscode.ProviderResult {\n 51\t if (!this.enabled || !this.action) {\n 52\t return [];\n 53\t }\n 54\t\n 55\t // Handle insertions\n 56\t if (this.action.kind === 'editInsert') {\n 57\t const insertPos = toVscodePosition(this.action.position);\n 58\t \n 59\t // Always provide the completion - VS Code will show it at the insert position\n 60\t // The ghost text appears at the range specified, not necessarily at cursor\n 61\t const item = new vscode.InlineCompletionItem(\n 62\t this.action.text,\n 63\t new vscode.Range(insertPos, insertPos)\n 64\t );\n 65\t \n 66\t return [item];\n 67\t }\n 68\t\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100372, 21, 100702, 22, 114240, 24, 102269, 19, 100928, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 22, 13, 24, 101478, 124184, 17, 11, 3949, 3009, 76167, 28, 99590, 22, 126189, 17, 13, 21, 101294, 19, 101140, 100461, 24, 11, 9276, 1131, 68, 15, 99367, 6902, 124540, 17, 67, 99590, 24, 9787, 24, 99419, 66, 23, 101663, 19, 95035, 100557, 66, 103388, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100933, 11, 101840, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 13751, 5656, 908, 198, 262, 220, 101917, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 102486, 13056, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 101729, 197, 3374, 262, 220, 102573, 13056, 442, 23177, 3410, 279, 9750, 481, 30530, 6119, 686, 1473, 432, 518, 279, 5656, 2309, 198, 262, 220, 99618, 13056, 442, 576, 19828, 1467, 7951, 518, 279, 2088, 5189, 11, 537, 14302, 518, 8127, 198, 262, 220, 103595, 13056, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 103319, 20789, 419, 12389, 2788, 345, 262, 220, 103302, 20789, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102636, 13056, 1439, 262, 220, 101411, 197, 3374, 262, 220, 101478, 13056, 470, 508, 1203, 935, 262, 220, 102952, 7472, 456, 262, 220, 101840, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '68,88p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101840, 11, 101252, 79, 1248, 73022, 151336], 'meta_info': {'id': '38e46419dae544fdbf695b389fc3f3b2', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 11229, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-8.022463589441031e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.00949934870004654, 4616, 'cat'), (-0.0003238391946069896, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-1.1920928244535389e-07, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-7.152555099310121e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0008124388405121863, 13428, '/src'), (-0.0001512651506345719, 14, '/'), (-1.847726889536716e-05, 27082, 'preview'), (-0.3270418643951416, 14, '/'), (-0.06679009646177292, 5057, 'inline'), (0.0, 5179, 'Provider'), (0.0, 21239, '.ts'), (-0.00018940561858471483, 760, ' |'), (-4.768370445162873e-07, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-1.1920928244535389e-07, 364, "" '""), (-0.31771427392959595, 101840, '68'), (-1.1920928244535389e-07, 11, ','), (-0.02132067270576954, 101252, '88'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-4.541770613286644e-05, 73022, '```'), (-0.00015841660206206143, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 10273, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 0.2925395965576172, 'response_sent_to_client_ts': 1767625007.5878165}}\r\n[2026-01-05 15:56:47] INFO: 10.86.2.252:34154 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +329,722364,"src/preview/inlineProvider.ts",1571,0,"",typescript,selection_command +330,722601,"src/preview/inlineProvider.ts",1522,0,"",typescript,selection_command +331,722638,"src/preview/inlineProvider.ts",1493,0,"",typescript,selection_command +332,722679,"src/preview/inlineProvider.ts",1492,0,"",typescript,selection_command +333,722679,"TERMINAL",0,0,"[2026-01-05 15:56:49] Receive: obj=GenerateReqInput(validation_time=1.571793109178543e-05, received_time=1767625009.3940308, received_time_perf=2479894.066956104, rid='06eb7c39844c4194acf39c2973a8d92b', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100933, 11, 101840, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 13751, 5656, 908, 198, 262, 220, 101917, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 102486, 13056, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 101729, 197, 3374, 262, 220, 102573, 13056, 442, 23177, 3410, 279, 9750, 481, 30530, 6119, 686, 1473, 432, 518, 279, 5656, 2309, 198, 262, 220, 99618, 13056, 442, 576, 19828, 1467, 7951, 518, 279, 2088, 5189, 11, 537, 14302, 518, 8127, 198, 262, 220, 103595, 13056, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 103319, 20789, 419, 12389, 2788, 345, 262, 220, 103302, 20789, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102636, 13056, 1439, 262, 220, 101411, 197, 3374, 262, 220, 101478, 13056, 470, 508, 1203, 935, 262, 220, 102952, 7472, 456, 262, 220, 101840, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100928, 24, 20, 100067, 99200, 22, 22, 100461, 22, 101961, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 22, 13, 100104, 20, 120547, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 16, 13, 24, 101840, 118901, 101840, 22, 11, 9276, 1131, 100919, 68, 101562, 19, 98729, 88444, 20, 101723, 93437, 69, 21, 101804, 65, 100919, 24, 8315, 18, 69, 18, 65, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:56:49 TP0] Prefill batch, #new-seq: 1, #new-token: 685, #cached-token: 11224, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +334,722711,"src/preview/inlineProvider.ts",1482,0,"",typescript,selection_command +335,722734,"src/preview/inlineProvider.ts",1459,0,"",typescript,selection_command +336,722753,"src/preview/inlineProvider.ts",1414,0,"",typescript,selection_command +337,722788,"src/preview/inlineProvider.ts",1322,0,"",typescript,selection_command +338,722823,"src/preview/inlineProvider.ts",1282,0,"",typescript,selection_command +339,722887,"TERMINAL",0,0,"[2026-01-05 15:56:49 TP0] Decode batch, #running-req: 1, #token: 0, token usage: 0.00, accept len: 3.23, accept rate: 0.81, cuda graph: True, gen throughput (token/s): 1.46, #queue-req: 0, \r\n[2026-01-05 15:56:49] Finish: obj=GenerateReqInput(validation_time=1.571793109178543e-05, received_time=1767625009.3940308, received_time_perf=2479894.066956104, rid='06eb7c39844c4194acf39c2973a8d92b', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=2.7895905077457428e-05, received_time=1767625007.2952058, received_time_perf=2479891.968131687, rid='38e46419dae544fdbf695b389fc3f3b2', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100933, 11, 101840, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 13751, 5656, 908, 198, 262, 220, 101917, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 102486, 13056, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 101729, 197, 3374, 262, 220, 102573, 13056, 442, 23177, 3410, 279, 9750, 481, 30530, 6119, 686, 1473, 432, 518, 279, 5656, 2309, 198, 262, 220, 99618, 13056, 442, 576, 19828, 1467, 7951, 518, 279, 2088, 5189, 11, 537, 14302, 518, 8127, 198, 262, 220, 103595, 13056, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 103319, 20789, 419, 12389, 2788, 345, 262, 220, 103302, 20789, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102636, 13056, 1439, 262, 220, 101411, 197, 3374, 262, 220, 101478, 13056, 470, 508, 1203, 935, 262, 220, 102952, 7472, 456, 262, 220, 101840, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100928, 24, 20, 100067, 99200, 22, 22, 100461, 22, 101961, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 22, 13, 100104, 20, 120547, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 16, 13, 24, 101840, 118901, 101840, 22, 11, 9276, 1131, 100919, 68, 101562, 19, 98729, 88444, 20, 101723, 93437, 69, 21, 101804, 65, 100919, 24, 8315, 18, 69, 18, 65, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/index.ts | sed -n '93,113p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336], 'meta_info': {'id': '06eb7c39844c4194acf39c2973a8d92b', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 11909, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-1.8596476365928538e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.012803140096366405, 4616, 'cat'), (-0.0003240775258745998, 481, ' -'), (0.0, 77, 'n'), (-1.1920928244535389e-07, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-8.344646857949556e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0004798214649781585, 13428, '/src'), (-0.00010561384988250211, 14, '/'), (-6.770858453819528e-05, 27082, 'preview'), (-1.9469228982925415, 9019, '/index'), (-1.1920928244535389e-07, 21239, '.ts'), (-0.0007929041748866439, 760, ' |'), (-1.1086402082582936e-05, 10918, ' sed'), (-7.152555099310121e-07, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.12778699398040771, 103946, '93'), (0.0, 11, ','), (-3.45700973412022e-05, 114240, '113'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-9.274052717955783e-05, 73022, '```'), (-0.00032360086333937943, 151336, '<|user|>')], 'completion_tokens': 38, 'cached_tokens': 11224, 'spec_accept_rate': 0.8181818181818182, 'spec_accept_length': 3.4545454545454546, 'spec_verify_ct': 11, 'spec_accept_token_num': 27, 'spec_draft_token_num': 33, 'e2e_latency': 0.26985979080200195, 'response_sent_to_client_ts': 1767625009.6639795}}\r\n[2026-01-05 15:56:49] INFO: 10.86.2.252:34162 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +340,723185,"TERMINAL",0,0,"[2026-01-05 15:56:49] Receive: obj=GenerateReqInput(validation_time=1.460174098610878e-05, received_time=1767625009.8186285, received_time_perf=2479894.491553614, rid='4718b125998e4b4b8fc7fc377eaa053d', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98503, 11, 4193, 2422, 25, 220, 18, 13, 99619, 11, 4193, 4379, 25, 220, 15, 13, 104340, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 16, 13, 101562, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 35357, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 428, 151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:56:49 TP0] Prefill batch, #new-seq: 1, #new-token: 1003, #cached-token: 11904, token usage: 0.03, #running-req: 0, #queue-req: 0, \r\n[2026-01-05 15:56:49] Receive: obj=GenerateReqInput(validation_time=1.3567041605710983e-05, received_time=1767625009.8534846, received_time_perf=2479894.52640959, rid='5ccc6b0037604f3fb0c3c2167528c802', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98503, 11, 4193, 2422, 25, 220, 18, 13, 99619, 11, 4193, 4379, 25, 220, 15, 13, 104340, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 16, 13, 101562, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 35357, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 428, 151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:56:49 TP0] Prefill batch, #new-seq: 1, #new-token: 2, #cached-token: 12905, token usage: 0.03, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +341,723487,"TERMINAL",0,0,"[2026-01-05 15:56:50] Finish: obj=GenerateReqInput(validation_time=1.460174098610878e-05, received_time=1767625009.8186285, received_time_perf=2479894.491553614, rid='4718b125998e4b4b8fc7fc377eaa053d', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ': 0, token usage: 0.00, accept len: 3.23, accept rate: 0.81, cuda graph: True, gen throughput (token/s): 1.46, #queue-req: 0, \n[2026-01-05 15:56:49] Finish: obj=GenerateReqInput(validation_time=1.571793109178543e-05, received_time=1767625009.3940308, received_time_perf=2479894.066956104, rid=\'06eb7c39844c4194acf39c2973a8d92b\', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n \'START,ENDp\'`, which produces 6-character right-aligned line numbers\n... [truncated]\n/nothink<|assistant|>\n', input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98503, 11, 4193, 2422, 25, 220, 18, 13, 99619, 11, 4193, 4379, 25, 220, 15, 13, 104340, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 16, 13, 101562, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 35357, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 428, 151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '48,68p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100933, 11, 101840, 79, 1248, 73022, 151336], 'meta_info': {'id': '4718b125998e4b4b8fc7fc377eaa053d', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 12907, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0016456407029181719, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.04534756764769554, 4616, 'cat'), (-0.0019445574143901467, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-1.6689286894688848e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.6689286894688848e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.0008104139124043286, 13428, '/src'), (-0.0001641377166379243, 14, '/'), (-0.00028618055512197316, 27082, 'preview'), (-0.3520166575908661, 14, '/'), (-0.09417729079723358, 5057, 'inline'), (-1.0728830375228426e-06, 5179, 'Provider'), (0.0, 21239, '.ts'), (-0.0003238391946069896, 760, ' |'), (-7.033323527139146e-06, 10918, ' sed'), (-2.3841855067985307e-07, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (0.0, 364, "" '""), (-0.8768115639686584, 100933, '48'), (0.0, 11, ','), (-0.0006634180317632854, 101840, '68'), (0.0, 79, 'p'), (-1.4305104514278355e-06, 1248, ""'\n""), (-0.0009446449112147093, 73022, '```'), (-7.760223525110632e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 11904, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 0.4549131393432617, 'response_sent_to_client_ts': 1767625010.2737079}}\r\n[2026-01-05 15:56:50] Finish: obj=GenerateReqInput(validation_time=1.3567041605710983e-05, received_time=1767625009.8534846, received_time_perf=2479894.52640959, rid='5ccc6b0037604f3fb0c3c2167528c802', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ': 0, token usage: 0.00, accept len: 3.23, accept rate: 0.81, cuda graph: True, gen throughput (token/s): 1.46, #queue-req: 0, \n[2026-01-05 15:56:49] Finish: obj=GenerateReqInput(validation_time=1.571793109178543e-05, received_time=1767625009.3940308, received_time_perf=2479894.066956104, rid=\'06eb7c39844c4194acf39c2973a8d92b\', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n \'START,ENDp\'`, which produces 6-character right-aligned line numbers\n... [truncated]\n/nothink<|assistant|>\n', input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103946, 20789, 1438, 280, 262, 220, 103992, 7472, 456, 262, 220, 101804, 2760, 456, 262, 220, 102487, 1572, 262, 220, 103205, 2760, 1554, 262, 220, 101663, 9356, 353, 12017, 678, 17105, 3689, 624, 262, 220, 100809, 9356, 735, 256, 220, 99457, 2760, 2797, 4555, 737, 341, 256, 220, 107609, 7472, 419, 2285, 26367, 10546, 7426, 2403, 543, 256, 220, 109871, 7472, 419, 29307, 5179, 7426, 2512, 543, 256, 220, 110248, 7472, 419, 77795, 5179, 7426, 2512, 543, 256, 220, 109803, 7472, 419, 4952, 2512, 284, 845, 280, 256, 220, 108345, 7472, 419, 26579, 284, 895, 280, 256, 220, 109626, 2760, 456, 256, 220, 110733, 1572, 256, 220, 108479, 2760, 1554, 256, 220, 110610, 9356, 353, 4248, 421, 264, 17105, 374, 5023, 9430, 624, 256, 220, 104550, 9356, 735, 256, 220, 111659, 2760, 71197, 4555, 2710, 341, 256, 220, 110800, 7472, 470, 419, 26579, 280, 256, 220, 114240, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 22, 37715, 12969, 28, 21, 13, 102487, 18831, 1648, 220, 220, 100096, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 236, 4597, 760, 220, 99317, 14, 99619, 508, 98503, 25, 99951, 27, 98503, 25, 100772, 11, 220, 220, 17, 13, 101130, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13] ... [11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98503, 11, 4193, 2422, 25, 220, 18, 13, 99619, 11, 4193, 4379, 25, 220, 15, 13, 104340, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 16, 13, 101562, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 35357, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 428, 151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/index.ts | sed -n '93,113p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 9019, 21239, 760, 10918, 481, 77, 364, 103946, 11, 114240, 79, 1248, 73022, 151336], 'meta_info': {'id': '5ccc6b0037604f3fb0c3c2167528c802', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 12907, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.001642546383664012, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.04355461522936821, 4616, 'cat'), (-0.0016251702327281237, 481, ' -'), (-2.3841855067985307e-07, 77, 'n'), (-2.264974000354414e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.6689286894688848e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.0006762839620932937, 13428, '/src'), (-0.0001731960946926847, 14, '/'), (-0.0002798642381094396, 27082, 'preview'), (-1.799684762954712, 9019, '/index'), (-1.1920928244535389e-07, 21239, '.ts'), (-0.0006687788409180939, 760, ' |'), (-1.5258672647178173e-05, 10918, ' sed'), (-2.3841855067985307e-07, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.13669270277023315, 103946, '93'), (0.0, 11, ','), (-1.7762025890988298e-05, 114240, '113'), (0.0, 79, 'p'), (-8.344646857949556e-07, 1248, ""'\n""), (-0.0007901645149104297, 73022, '```'), (-0.00013255194062367082, 151336, '<|user|>')], 'completion_tokens': 38, 'cached_tokens': 12905, 'spec_accept_rate': 0.8181818181818182, 'spec_accept_length': 3.4545454545454546, 'spec_verify_ct': 11, 'spec_accept_token_num': 27, 'spec_draft_token_num': 33, 'e2e_latency': 0.42016148567199707, 'response_sent_to_client_ts': 1767625010.275222}}\r\n[2026-01-05 15:56:50] INFO: 10.86.2.252:34180 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +342,732163,"src/preview/inlineProvider.ts",1233,0,"",typescript,selection_command +343,732261,"src/preview/inlineProvider.ts",1198,0,"",typescript,selection_command +344,732364,"src/preview/inlineProvider.ts",1159,0,"",typescript,selection_command +345,732449,"TERMINAL",0,0,"[2026-01-05 15:56:59] Receive: obj=GenerateReqInput(validation_time=2.072099596261978e-05, received_time=1767625019.1935859, received_time_perf=2479903.86651138, rid='c50a64ed29b24f1d8435ee7ff49f59ce', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 101663, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 102284, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 16, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103205, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 104340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 17, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 99064, 13, 100772, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 104340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 15, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 16, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 17, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 18, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 15, 60, 15678, 7834, 4237, 10330, 13, 1833, 10426, 28, 15, 13, 98503, 18831, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 18, 60, 15678, 7834, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651] ... [382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99618, 127031, 100614, 23, 21, 98668, 23, 100928, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 23, 99243, 21, 99869, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 101474, 16, 101130, 18, 21, 99367, 11, 9276, 1131, 101655, 16, 23, 65, 109641, 100809, 23, 68, 19, 65, 19, 65, 23, 8315, 22, 8315, 101140, 22, 68, 5305, 100002, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101140, 11, 102486, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 13751, 5656, 908, 198, 262, 220, 101917, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 102486, 13056, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:56:59 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 590, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +346,732518,"TERMINAL",0,0,"[2026-01-05 15:56:59] Receive: obj=GenerateReqInput(validation_time=1.53309665620327e-05, received_time=1767625019.290924, received_time_perf=2479903.963849417, rid='ef3d0b5485f64e4fa1d328375acee249', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 101663, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 102284, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 16, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103205, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 104340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 17, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 99064, 13, 100772, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 104340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 15, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 16, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 17, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 18, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 15, 60, 15678, 7834, 4237, 10330, 13, 1833, 10426, 28, 15, 13, 98503, 18831, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 18, 60, 15678, 7834, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651] ... [382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99618, 127031, 100614, 23, 21, 98668, 23, 100928, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 23, 99243, 21, 99869, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 101474, 16, 101130, 18, 21, 99367, 11, 9276, 1131, 101655, 16, 23, 65, 109641, 100809, 23, 68, 19, 65, 19, 65, 23, 8315, 22, 8315, 101140, 22, 68, 5305, 100002, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101140, 11, 102486, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 13751, 5656, 908, 198, 262, 220, 101917, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 102486, 13056, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +347,732717,"src/preview/inlineProvider.ts",1125,0,"",typescript,selection_command +348,732726,"TERMINAL",0,0,"[2026-01-05 15:56:59] Receive: obj=GenerateReqInput(validation_time=1.4069955796003342e-05, received_time=1767625019.4937465, received_time_perf=2479904.166671736, rid='0806c75a3bce4c30b7d4c26c92228eb7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 100933, 37715, 12969, 28, 20, 13, 100441, 18831, 1648, 256, 220, 15, 4, 91, 5238, 15417, 760, 220, 15, 14, 99619, 508, 98503, 25, 98503, 89226, 937, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99243, 37715, 12969, 28, 19, 13, 104127, 18831, 1648, 220, 220, 99064, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 233, 42464, 760, 220, 22, 14, 99619, 508, 98503, 25, 100772, 27, 98503, 25, 100772, 11, 220, 220, 20, 13, 101140, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 104340, 18831, 1648, 220, 220, 102487, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 10412, 234, 262, 760, 220, 99241, 14, 99619, 508, 98503, 25, 100772, 27, 98503, 25, 98503, 11, 220, 99419, 13, 101478, 275, 2687, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 27669] ... [11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101140, 11, 102486, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 13751, 5656, 908, 198, 262, 220, 101917, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 102486, 13056, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100372, 17, 100614, 24, 20, 102487, 99916, 16, 126189, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 98729, 18, 101729, 20, 24, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 18, 13, 23, 101478, 20, 98965, 100919, 11, 9276, 1131, 66, 99200, 64, 102636, 291, 100104, 65, 99590, 69, 16, 67, 23, 102088, 20, 2127, 22, 542, 101474, 69, 102573, 346, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:56:59 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8781, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +349,733070,"TERMINAL",0,0,"[2026-01-05 15:56:59] Receive: obj=GenerateReqInput(validation_time=1.3747718185186386e-05, received_time=1767625019.7926326, received_time_perf=2479904.465557746, rid='b84987a20bf54451a198dc2f539dad92', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 100933, 37715, 12969, 28, 20, 13, 100441, 18831, 1648, 256, 220, 15, 4, 91, 5238, 15417, 760, 220, 15, 14, 99619, 508, 98503, 25, 98503, 89226, 937, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99243, 37715, 12969, 28, 19, 13, 104127, 18831, 1648, 220, 220, 99064, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 233, 42464, 760, 220, 22, 14, 99619, 508, 98503, 25, 100772, 27, 98503, 25, 100772, 11, 220, 220, 20, 13, 101140, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 104340, 18831, 1648, 220, 220, 102487, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 10412, 234, 262, 760, 220, 99241, 14, 99619, 508, 98503, 25, 100772, 27, 98503, 25, 98503, 11, 220, 99419, 13, 101478, 275, 2687, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 27669, 36761, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 5714, 315, 279, 3974, 3735, 16216, 624, 257, 220, 21, 197, 735, 257, 220, 22, 59028, 943, 17231, 36761, 2077, 284, 364, 10325, 6, 760, 364, 80672, 6, 760, 845, 280, 257, 220, 23, 1572, 257, 220, 24, 197, 1747, 262, 220, 98668, 197, 353, 6928, 264, 3974, 3735, 13220, 369, 279, 15268, 1917, 624, 262, 220, 98965, 197, 353, 12193, 979, 15010, 374, 10730, 323, 47286, 646, 944, 387, 6839, 624, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 104340, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100590, 27, 98503, 25, 98503, 11, 220, 220, 19, 13, 100928, 275, 2687, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99869, 28964, 15, 60, 8450, 287, 220, 115547, 24335, 4771, 14220, 198, 58] ... [11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99698, 21, 100809, 20, 102486, 24, 103306, 100702, 19, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 101474, 18, 22, 101562, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 19, 13, 122569, 102952, 16, 22, 100632, 11, 9276, 1131, 15, 99695, 21, 66, 100899, 64, 18, 65, 346, 19, 66, 99064, 65, 22, 67, 19, 66, 99916, 66, 24, 99241, 99869, 3065, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:56:59 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 616, token usage: 0.03, #running-req: 1, #queue-req: 1, \r\n",,terminal_output +350,733351,"TERMINAL",0,0,"[2026-01-05 15:57:00 TP0] Prefill batch, #new-seq: 1, #new-token: 6390, #cached-token: 0, token usage: 0.05, #running-req: 2, #queue-req: 1, \r\n",,terminal_output +351,733642,"TERMINAL",0,0,"[2026-01-05 15:57:00 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 590, token usage: 0.06, #running-req: 3, #queue-req: 0, \r\n",,terminal_output +352,733903,"TERMINAL",0,0,"[2026-01-05 15:57:00 TP0] Prefill batch, #new-seq: 1, #new-token: 5495, #cached-token: 0, token usage: 0.08, #running-req: 3, #queue-req: 0, \r\n",,terminal_output +353,734367,"TERMINAL",0,0,"[2026-01-05 15:57:01] Finish: obj=GenerateReqInput(validation_time=1.3747718185186386e-05, received_time=1767625019.7926326, received_time_perf=2479904.465557746, rid='b84987a20bf54451a198dc2f539dad92', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.4069955796003342e-05, received_time=1767625019.4937465, received_time_perf=2479904.166671736, rid='0806c75a3bce4c30b7d4c26c92228eb7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 100933, 37715, 12969, 28, 20, 13, 100441, 18831, 1648, 256, 220, 15, 4, 91, 5238, 15417, 760, 220, 15, 14, 99619, 508, 98503, 25, 98503, 89226, 937, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 99243, 37715, 12969, 28, 19, 13, 104127, 18831, 1648, 220, 220, 99064, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 21410, 10412, 233, 42464, 760, 220, 22, 14, 99619, 508, 98503, 25, 100772, 27, 98503, 25, 100772, 11, 220, 220, 20, 13, 101140, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 104340, 18831, 1648, 220, 220, 102487, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 10412, 234, 262, 760, 220, 99241, 14, 99619, 508, 98503, 25, 100772, 27, 98503, 25, 98503, 11, 220, 99419, 13, 101478, 275, 2687, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 27669, 36761, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 5714, 315, 279, 3974, 3735, 16216, 624, 257, 220, 21, 197, 735, 257, 220, 22, 59028, 943, 17231, 36761, 2077, 284, 364, 10325, 6, 760, 364, 80672, 6, 760, 845, 280, 257, 220, 23, 1572, 257, 220, 24, 197, 1747, 262, 220, 98668, 197, 353, 6928, 264, 3974, 3735, 13220, 369, 279, 15268, 1917, 624, 262, 220, 98965, 197, 353, 12193, 979, 15010, 374, 10730, 323, 47286, 646, 944, 387, 6839, 624, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 104340, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100590, 27, 98503, 25, 98503, 11, 220, 220, 19, 13, 100928, 275, 2687, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99869, 28964, 15, 60, 8450, 287, 220, 115547, 24335, 4771, 14220, 198, 58] ... [11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99698, 21, 100809, 20, 102486, 24, 103306, 100702, 19, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 101474, 18, 22, 101562, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 19, 13, 122569, 102952, 16, 22, 100632, 11, 9276, 1131, 15, 99695, 21, 66, 100899, 64, 18, 65, 346, 19, 66, 99064, 65, 22, 67, 19, 66, 99916, 66, 24, 99241, 99869, 3065, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '68,88p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101840, 11, 101252, 79, 1248, 73022, 151336], 'meta_info': {'id': 'b84987a20bf54451a198dc2f539dad92', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 14277, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-7.343022298300639e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.021463146433234215, 4616, 'cat'), (-0.0033011725172400475, 481, ' -'), (-3.576278118089249e-07, 77, 'n'), (-2.9802276912960224e-06, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-4.768360213347478e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0011531615164130926, 13428, '/src'), (-0.000636255950666964, 14, '/'), (-0.0033593906555324793, 27082, 'preview'), (-0.1097329705953598, 14, '/'), (-0.0238471832126379, 5057, 'inline'), (-1.5497195136049413e-06, 5179, 'Provider'), (-1.1920928244535389e-07, 21239, '.ts'), (-0.00015853578224778175, 760, ' |'), (-5.483612312673358e-06, 10918, ' sed'), (0.0, 481, ' -'), (-3.576278118089249e-07, 77, 'n'), (-1.1920928244535389e-07, 364, "" '""), (-1.3281558752059937, 101840, '68'), (-2.3841855067985307e-07, 11, ','), (-0.056654516607522964, 101252, '88'), (-1.1920928244535389e-07, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.00032360086333937943, 73022, '```'), (-0.00013255194062367082, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 590, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.338683843612671, 'response_sent_to_client_ts': 1767625021.131391}}\r\n[2026-01-05 15:57:01] INFO: 10.86.2.252:47908 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n[2026-01-05 15:57:01] Finish: obj=GenerateReqInput(validation_time=2.072099596261978e-05, received_time=1767625019.1935859, received_time_perf=2479903.86651138, rid='c50a64ed29b24f1d8435ee7ff49f59ce', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""dout>/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '37,57p'\n```<|user|>\n\n 37\t */\n 38\t setEnabled(enabled: boolean): void {\n 39\t this.enabled = enabled;\n 40\t }\n 41\t\n 42\t /**\n 43\t * Provide inline completion items.\n 44\t */\n 45\t provideInlineCompletionItems(\n 46\t document: vscode.TextDocument,\n 47\t position: vscode.Position,\n 48\t context: vscode.InlineCompletionContext,\n 49\t token: vscode.CancellationToken\n 50\t ): vscode.ProviderResult {\n 51\t if (!this.enabled || !this.action) {\n 52\t return [];\n 53\t }\n 54\t\n 55\t // Handle insertions\n 56\t if (this.action.kind === 'editInsert') {\n 57\t const insertPos = toVscodePosition(this.action.position);\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 101663, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 102284, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 16, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103205, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 104340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 17, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 99064, 13, 100772, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 104340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 15, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 16, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 17, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 18, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 15, 60, 15678, 7834, 4237, 10330, 13, 1833, 10426, 28, 15, 13, 98503, 18831, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 18, 60, 15678, 7834, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651] ... [382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99618, 127031, 100614, 23, 21, 98668, 23, 100928, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 23, 99243, 21, 99869, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 101474, 16, 101130, 18, 21, 99367, 11, 9276, 1131, 101655, 16, 23, 65, 109641, 100809, 23, 68, 19, 65, 19, 65, 23, 8315, 22, 8315, 101140, 22, 68, 5305, 100002, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101140, 11, 102486, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 13751, 5656, 908, 198, 262, 220, 101917, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 102486, 13056, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/hoverProvider.ts | sed -n '1,21p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 99146, 79, 1248, 73022, 151336], 'meta_info': {'id': 'c50a64ed29b24f1d8435ee7ff49f59ce', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 13565, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0002586507180240005, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.050319597125053406, 4616, 'cat'), (-0.008035357110202312, 481, ' -'), (-2.3841855067985307e-07, 77, 'n'), (-5.245195097813848e-06, 608, ' /'), (-7.152555099310121e-07, 5117, 'home'), (-3.576278118089249e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-2.634490556374658e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-4.768370445162873e-07, 79888, '-extension'), (-0.003389805555343628, 13428, '/src'), (-0.000947503256611526, 14, '/'), (-0.011660278774797916, 27082, 'preview'), (-1.4455231428146362, 7530, '/h'), (-2.50339189733495e-06, 1975, 'over'), (-3.2305197237292305e-05, 5179, 'Provider'), (-2.3841855067985307e-07, 21239, '.ts'), (-0.013671335764229298, 760, ' |'), (-3.814689989667386e-06, 10918, ' sed'), (-5.364403477869928e-06, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-1.2768726348876953, 16, '1'), (-4.768370445162873e-07, 11, ','), (-0.12451309710741043, 99146, '21'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.0002706876548472792, 73022, '```'), (-1.0847986231965479e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 590, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.960096836090088, 'response_sent_to_client_ts': 1767625021.1539273}}\r\n[2026-01-05 15:57:01] Finish: obj=GenerateReqInput(validation_time=1.53309665620327e-05, received_time=1767625019.290924, received_time_perf=2479903.963849417, rid='ef3d0b5485f64e4fa1d328375acee249', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""dout>/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '37,57p'\n```<|user|>\n\n 37\t */\n 38\t setEnabled(enabled: boolean): void {\n 39\t this.enabled = enabled;\n 40\t }\n 41\t\n 42\t /**\n 43\t * Provide inline completion items.\n 44\t */\n 45\t provideInlineCompletionItems(\n 46\t document: vscode.TextDocument,\n 47\t position: vscode.Position,\n 48\t context: vscode.InlineCompletionContext,\n 49\t token: vscode.CancellationToken\n 50\t ): vscode.ProviderResult {\n 51\t if (!this.enabled || !this.action) {\n 52\t return [];\n 53\t }\n 54\t\n 55\t // Handle insertions\n 56\t if (this.action.kind === 'editInsert') {\n 57\t const insertPos = toVscodePosition(this.action.position);\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 21, 13, 103878, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100104, 27, 98503, 25, 98503, 11, 220, 220, 16, 13, 99916, 82, 14, 275, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98729, 28964, 15, 60, 8450, 287, 220, 121860, 24, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 18, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103992, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 22, 13, 100590, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 15, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 101663, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 102284, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 16, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 100104, 13, 103205, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 104340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 98360, 28964, 17, 60, 39669, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 99064, 13, 100772, 274, 13, 1833, 10426, 28, 15, 13, 100928, 18831, 13, 37715, 1833, 28, 21, 13, 104340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 15, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 16, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 17, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 18, 60, 15678, 7834, 4237, 3161, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 15, 60, 15678, 7834, 4237, 10330, 13, 1833, 10426, 28, 15, 13, 98503, 18831, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99146, 28964, 18, 60, 15678, 7834, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651] ... [382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99618, 127031, 100614, 23, 21, 98668, 23, 100928, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 23, 99243, 21, 99869, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 101474, 16, 101130, 18, 21, 99367, 11, 9276, 1131, 101655, 16, 23, 65, 109641, 100809, 23, 68, 19, 65, 19, 65, 23, 8315, 22, 8315, 101140, 22, 68, 5305, 100002, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16\n[... 24757 bytes truncated to respect terminal scrollback settings ...]\n",,terminal_output +354,740605,"src/preview/inlineProvider.ts",1125,33," provideInlineCompletionItems(",typescript,selection_command +355,740889,"TERMINAL",0,0,"[2026-01-05 15:57:07] Receive: obj=GenerateReqInput(validation_time=1.885322853922844e-05, received_time=1767625027.6340504, received_time_perf=2479912.306975693, rid='97df92f70f0941f2a3cc2d050f0dabb2', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 5714, 315, 279, 3974, 3735, 16216, 624, 257, 220, 21, 197, 735, 257, 220, 22, 59028, 943, 17231, 36761, 2077, 284, 364, 10325, 6, 760, 364, 80672, 6, 760, 845, 280, 257, 220, 23, 1572, 257, 220, 24, 197, 1747, 262, 220, 98668, 197, 353, 6928, 264, 3974, 3735, 13220, 369, 279, 15268, 1917, 624, 262, 220, 98965, 197, 353, 12193, 979, 15010, 374, 10730, 323, 47286, 646, 944, 387, 6839, 624, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 104340, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100590, 27, 98503, 25, 98503, 11, 220, 220, 19, 13, 100928, 275, 2687, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99869, 28964, 15, 60, 8450, 287, 220, 115547, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100104, 28964, 15, 60, 39669, 9955, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 20, 13, 103093, 274, 13, 1833, 10426, 28, 15, 13, 101562, 18831, 13, 37715, 1833, 28, 19, 13, 104340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100104, 28964, 15, 60, 39669, 9955, 13029, 24335, 4771, 3161, 13, 1096, 646, 1896, 705, 311, 3807, 4420, 13, 37715, 1833, 28, 19, 13, 104340, 18831, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100104, 28964, 16, 60, 39669, 9955, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 20, 13, 103093, 274, 13, 1833, 10426, 28, 15, 13, 101562, 18831, 13, 37715, 1833, 28, 19, 13, 102269, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100104, 28964, 16, 60, 39669, 9955, 13029, 24335, 4771, 3161, 13, 1096, 646, 1896, 705, 311, 3807, 4420, 13, 37715, 1833, 28, 19, 13, 102269, 18831, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100104, 28964, 17, 60, 39669, 9955, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 20, 13, 100096, 274, 13, 1833, 10426, 28, 15, 13, 101562, 18831, 13, 37715, 1833, 28, 19, 13, 102269, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082] ... [11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101140, 19, 102114, 16, 23, 99243, 20, 99243, 21, 100919, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 102626, 17, 21, 101175, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 19, 13, 19, 101411, 101130, 22, 22, 101562, 11, 9276, 1131, 65, 23, 101474, 103878, 64, 98360, 13225, 20, 101723, 102624, 64, 100759, 7628, 17, 69, 20, 101294, 55469, 103825, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:57:07 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 586, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +356,741407,"TERMINAL",0,0,"[2026-01-05 15:57:07 TP0] Prefill batch, #new-seq: 1, #new-token: 5953, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +357,741583,"TERMINAL",0,0,"[2026-01-05 15:57:08] Finish: obj=GenerateReqInput(validation_time=1.885322853922844e-05, received_time=1767625027.6340504, received_time_perf=2479912.306975693, rid='97df92f70f0941f2a3cc2d050f0dabb2', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.3747718185186386e-05, received_time=1767625019.7926326, received_time_perf=2479904.465557746, rid='b84987a20bf54451a198dc2f539dad92', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 5714, 315, 279, 3974, 3735, 16216, 624, 257, 220, 21, 197, 735, 257, 220, 22, 59028, 943, 17231, 36761, 2077, 284, 364, 10325, 6, 760, 364, 80672, 6, 760, 845, 280, 257, 220, 23, 1572, 257, 220, 24, 197, 1747, 262, 220, 98668, 197, 353, 6928, 264, 3974, 3735, 13220, 369, 279, 15268, 1917, 624, 262, 220, 98965, 197, 353, 12193, 979, 15010, 374, 10730, 323, 47286, 646, 944, 387, 6839, 624, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 104340, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 100590, 27, 98503, 25, 98503, 11, 220, 220, 19, 13, 100928, 275, 2687, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99869, 28964, 15, 60, 8450, 287, 220, 115547, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100104, 28964, 15, 60, 39669, 9955, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 20, 13, 103093, 274, 13, 1833, 10426, 28, 15, 13, 101562, 18831, 13, 37715, 1833, 28, 19, 13, 104340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100104, 28964, 15, 60, 39669, 9955, 13029, 24335, 4771, 3161, 13, 1096, 646, 1896, 705, 311, 3807, 4420, 13, 37715, 1833, 28, 19, 13, 104340, 18831, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100104, 28964, 16, 60, 39669, 9955, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 20, 13, 103093, 274, 13, 1833, 10426, 28, 15, 13, 101562, 18831, 13, 37715, 1833, 28, 19, 13, 102269, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100104, 28964, 16, 60, 39669, 9955, 13029, 24335, 4771, 3161, 13, 1096, 646, 1896, 705, 311, 3807, 4420, 13, 37715, 1833, 28, 19, 13, 102269, 18831, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100104, 28964, 17, 60, 39669, 9955, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 20, 13, 100096, 274, 13, 1833, 10426, 28, 15, 13, 101562, 18831, 13, 37715, 1833, 28, 19, 13, 102269, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082] ... [11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101140, 19, 102114, 16, 23, 99243, 20, 99243, 21, 100919, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 102626, 17, 21, 101175, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 19, 13, 19, 101411, 101130, 22, 22, 101562, 11, 9276, 1131, 65, 23, 101474, 103878, 64, 98360, 13225, 20, 101723, 102624, 64, 100759, 7628, 17, 69, 20, 101294, 55469, 103825, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,12p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98886, 79, 1248, 73022, 151336], 'meta_info': {'id': '97df92f70f0941f2a3cc2d050f0dabb2', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 14731, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00022921319759916514, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.08096983283758163, 4616, 'cat'), (-0.006734176073223352, 481, ' -'), (-9.536738616588991e-07, 77, 'n'), (-1.0251946150674485e-05, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (-2.3841855067985307e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.6689286894688848e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0019516960019245744, 13428, '/src'), (-0.0013378250878304243, 14, '/'), (-0.04711297154426575, 27082, 'preview'), (-0.15803663432598114, 14, '/'), (-0.003851141082122922, 5057, 'inline'), (-2.0265558760002023e-06, 5179, 'Provider'), (0.0, 21239, '.ts'), (-0.0009454786195419729, 760, ' |'), (-6.4490144723095e-05, 10918, ' sed'), (0.0, 481, ' -'), (-4.768370445162873e-07, 77, 'n'), (-2.3841855067985307e-07, 364, "" '""), (-1.160483717918396, 16, '1'), (-5.245195097813848e-06, 11, ','), (-0.26524072885513306, 98886, '12'), (-2.3841855067985307e-07, 79, 'p'), (-3.576278118089249e-07, 1248, ""'\n""), (-0.0006610354175791144, 73022, '```'), (-2.658331868587993e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 586, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 0.742457389831543, 'response_sent_to_client_ts': 1767625028.3765674}}\r\n[2026-01-05 15:57:08] INFO: 10.86.2.252:38636 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +358,817495,"src/preview/inlineProvider.ts",1125,0,"",typescript,selection_command +359,817779,"TERMINAL",0,0,"[2026-01-05 15:58:24] Receive: obj=GenerateReqInput(validation_time=2.3699365556240082e-05, received_time=1767625104.5270398, received_time_perf=2479989.199965454, rid='a29e5a40bd2e44e5b4b757d8691a78af', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 198, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 311, 3037, 438, 7381, 9750, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 442, 30174, 30530, 6119, 311, 312, 65012, 7381, 3459, 908, 198, 262, 220, 99243, 7472, 55008, 33739, 7769, 4062, 492, 8866, 12389, 29307, 50, 3799, 28455, 1157, 262, 220, 98729, 2760, 456, 262, 220, 98360, 1572, 262, 220, 99146, 2760, 1554, 262, 220, 99241, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99619, 9356, 735, 262, 220, 99590, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99446, 7472, 419, 12389, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 2126, 279, 1482, 1917, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 633, 2512, 4555, 5586, 760, 845, 341, 262, 220, 101175, 7472, 470, 419, 12389, 280, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309] ... [103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102486, 25, 100372, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101252, 20, 18, 99241, 102284, 18, 24, 99241, 23, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 17, 22, 13, 21, 101135, 15, 99200, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 100809, 16, 17, 13, 124540, 24, 100899, 103093, 18, 11, 9276, 1131, 103205, 2940, 103825, 69, 100096, 69, 100614, 19, 16, 69, 17, 64, 18, 638, 17, 67, 15, 99200, 69, 15, 67, 12517, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:58:24 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 607, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +360,818055,"TERMINAL",0,0,"[2026-01-05 15:58:24 TP0] Prefill batch, #new-seq: 1, #new-token: 6106, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +361,818404,"TERMINAL",0,0,"[2026-01-05 15:58:25 TP0] Decode batch, #running-req: 1, #token: 14924, token usage: 0.03, accept len: 3.37, accept rate: 0.84, cuda graph: True, gen throughput (token/s): 3.04, #queue-req: 0, \r\n",,terminal_output +362,818524,"TERMINAL",0,0,"[2026-01-05 15:58:25] Finish: obj=GenerateReqInput(validation_time=2.3699365556240082e-05, received_time=1767625104.5270398, received_time_perf=2479989.199965454, rid='a29e5a40bd2e44e5b4b757d8691a78af', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.885322853922844e-05, received_time=1767625027.6340504, received_time_perf=2479912.306975693, rid='97df92f70f0941f2a3cc2d050f0dabb2', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 198, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 311, 3037, 438, 7381, 9750, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 442, 30174, 30530, 6119, 311, 312, 65012, 7381, 3459, 908, 198, 262, 220, 99243, 7472, 55008, 33739, 7769, 4062, 492, 8866, 12389, 29307, 50, 3799, 28455, 1157, 262, 220, 98729, 2760, 456, 262, 220, 98360, 1572, 262, 220, 99146, 2760, 1554, 262, 220, 99241, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99619, 9356, 735, 262, 220, 99590, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99446, 7472, 419, 12389, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 2126, 279, 1482, 1917, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 633, 2512, 4555, 5586, 760, 845, 341, 262, 220, 101175, 7472, 470, 419, 12389, 280, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309] ... [103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102486, 25, 100372, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101252, 20, 18, 99241, 102284, 18, 24, 99241, 23, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 17, 22, 13, 21, 101135, 15, 99200, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 100809, 16, 17, 13, 124540, 24, 100899, 103093, 18, 11, 9276, 1131, 103205, 2940, 103825, 69, 100096, 69, 100614, 19, 16, 69, 17, 64, 18, 638, 17, 67, 15, 99200, 69, 15, 67, 12517, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '69,89p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103093, 11, 103502, 79, 1248, 73022, 151336], 'meta_info': {'id': 'a29e5a40bd2e44e5b4b757d8691a78af', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 14905, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0001230164198204875, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.021905258297920227, 4616, 'cat'), (-0.00033480284037068486, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-8.344646857949556e-07, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-2.3841855067985307e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.00193896540440619, 13428, '/src'), (-0.00011634149996098131, 14, '/'), (-0.000557267339900136, 27082, 'preview'), (-0.07834383100271225, 14, '/'), (-0.0013103241799399257, 5057, 'inline'), (-1.1920928244535389e-07, 5179, 'Provider'), (0.0, 21239, '.ts'), (-0.0004627825692296028, 760, ' |'), (-4.339123915997334e-05, 10918, ' sed'), (0.0, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (0.0, 364, "" '""), (-1.1477540731430054, 103093, '69'), (-1.1920928244535389e-07, 11, ','), (-0.017452361062169075, 103502, '89'), (0.0, 79, 'p'), (0.0, 1248, ""'\n""), (-0.00015841660206206143, 73022, '```'), (-0.0005530973430722952, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 607, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 0.7762701511383057, 'response_sent_to_client_ts': 1767625105.3033721}}\r\n[2026-01-05 15:58:25] INFO: 10.86.2.252:40236 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +363,864434,"src/preview/inlineProvider.ts",0,3153,"",typescript,content +364,864496,"src/preview/inlineProvider.ts",0,0,"import * as vscode from 'vscode';\nimport { Action, toVscodeRange, toVscodePosition } from './types';\n\n/**\n * Provides inline completion items (ghost text) for code edit actions.\n * This takes priority over Cursor's hints and works on empty lines.\n */\nexport class CrowdPilotInlineProvider implements vscode.InlineCompletionItemProvider {\n private action: Action | null = null;\n private enabled: boolean = true;\n\n /**\n * Set the current action to display as inline completion.\n */\n setAction(action: Action): void {\n this.action = action;\n // Trigger VS Code to re-query inline completions\n vscode.commands.executeCommand('editor.action.inlineSuggest.trigger');\n }\n\n /**\n * Clear the current action.\n */\n clearAction(): void {\n this.action = null;\n }\n\n /**\n * Get the current action.\n */\n getAction(): Action | null {\n return this.action;\n }\n\n /**\n * Enable or disable the provider.\n */\n setEnabled(enabled: boolean): void {\n this.enabled = enabled;\n }\n\n /**\n * Provide inline completion items.\n */\n provideInlineCompletionItems(\n document: vscode.TextDocument,\n position: vscode.Position,\n context: vscode.InlineCompletionContext,\n token: vscode.CancellationToken\n ): vscode.ProviderResult {\n if (!this.enabled || !this.action) {\n return [];\n }\n\n // Only handle pure insertions (not replacements)\n // Replacements are handled by decorations to properly show what's being deleted\n if (this.action.kind !== 'editInsert') {\n return [];\n }\n\n const insertPos = toVscodePosition(this.action.position);\n \n // Only provide completion if insert position is at or after the cursor\n // VS Code's inline completion API shows ghost text at/after cursor position\n if (insertPos.isBefore(position)) {\n return [];\n }\n \n const item = new vscode.InlineCompletionItem(\n this.action.text,\n new vscode.Range(insertPos, insertPos)\n );\n \n return [item];\n }\n\n /**\n * Check if the current action's position is near the given cursor position.\n * Used to determine if we need a fallback indicator.\n */\n isActionNearCursor(cursorLine: number): boolean {\n if (!this.action) return false;\n \n if (this.action.kind === 'editInsert') {\n return Math.abs(this.action.position[0] - cursorLine) <= 1;\n }\n \n if (this.action.kind === 'editReplace') {\n return cursorLine >= this.action.range.start[0] - 1 && \n cursorLine <= this.action.range.end[0] + 1;\n }\n \n return false;\n }\n}\n\n\n",typescript,content +365,864498,"src/preview/inlineProvider.ts",2863,2,"",typescript,content +366,864833,"TERMINAL",0,0,"[2026-01-05 15:59:11] Receive: obj=GenerateReqInput(validation_time=2.2106803953647614e-05, received_time=1767625151.529731, received_time_perf=2480036.20265658, rid='8c7ac8947ea84ebab0e1382c5ef16f10', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 100933, 37715, 12969, 28, 19, 13, 101478, 18831, 1648, 256, 220, 15, 4, 91, 5238, 15417, 760, 220, 15, 14, 99619, 508, 98503, 25, 98503, 89226, 937, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 100461, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 98503, 27, 98503, 25, 98503, 11, 220, 102088, 13, 99317, 275, 2687, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 8450, 287, 220, 103093, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 17, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 99951, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 18, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 100562, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102636, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 16, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 101135, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 100235, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 100461, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 1932, 10779, 4273, 28735, 28, 111156, 101135, 18, 11, 11873, 291, 43082, 483, 2368, 28, 23, 98729, 17, 11, 1932, 43082, 483, 28735, 28, 114491, 104029, 11, 1932, 37139, 37022, 28, 100933, 11, 2266, 6043, 28, 118901, 100372, 17, 11, 2500, 35450, 12969, 28, 19, 13, 100461, 18831, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16] ... [8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 101130, 11, 104160, 66, 5661, 286, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 62115, 286, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 14901, 14901, 6, 82, 1660, 11057, 5661, 286, 421, 320, 574, 12389, 36442, 4376, 7127, 14901, 6, 3587, 13771, 14901, 14901, 863, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 61534, 286, 3044, 286, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 5661, 286, 442, 30530, 6119, 14901, 14901, 6, 82, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 5661, 286, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 286, 3044, 286, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 7, 5661, 310, 419, 12389, 2788, 42626, 310, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 62115, 286, 6903, 5661, 286, 3044, 286, 470, 508, 1203, 5265, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 102856, 11, 103498, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:59:11 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 594, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +367,865068,"TERMINAL",0,0,"[2026-01-05 15:59:11] Receive: obj=GenerateReqInput(validation_time=1.4983117580413818e-05, received_time=1767625151.830202, received_time_perf=2480036.50312739, rid='2cbad6f94e9649a9bca629dae4f6166c', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 100933, 37715, 12969, 28, 19, 13, 101478, 18831, 1648, 256, 220, 15, 4, 91, 5238, 15417, 760, 220, 15, 14, 99619, 508, 98503, 25, 98503, 89226, 937, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 100461, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 98503, 27, 98503, 25, 98503, 11, 220, 102088, 13, 99317, 275, 2687, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 8450, 287, 220, 103093, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 17, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 99951, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 18, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 100562, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102636, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 16, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 101135, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 100235, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 100461, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 1932, 10779, 4273, 28735, 28, 111156, 101135, 18, 11, 11873, 291, 43082, 483, 2368, 28, 23, 98729, 17, 11, 1932, 43082, 483, 28735, 28, 114491, 104029, 11, 1932, 37139, 37022, 28, 100933, 11, 2266, 6043, 28, 118901, 100372, 17, 11, 2500, 35450, 12969, 28, 19, 13, 100461, 18831, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16] ... [7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 5661, 286, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 286, 3044, 286, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 7, 5661, 310, 419, 12389, 2788, 42626, 310, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 62115, 286, 6903, 5661, 286, 3044, 286, 470, 508, 1203, 5265, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 102856, 11, 103498, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 112098, 21, 99695, 101294, 20, 100632, 19, 102269, 16, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99082, 16, 13, 20, 100104, 22, 100557, 11, 3949, 3009, 76167, 28, 99590, 23, 98503, 18, 21, 13, 115937, 101411, 21, 101729, 11, 9276, 1131, 23, 66, 22, 580, 103502, 19, 22, 12502, 104029, 3065, 370, 15, 68, 115547, 17, 66, 20, 823, 99317, 69, 98668, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:59:11 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8785, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +368,865446,"TERMINAL",0,0,"[2026-01-05 15:59:12 TP0] Prefill batch, #new-seq: 1, #new-token: 4963, #cached-token: 0, token usage: 0.03, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +369,865861,"TERMINAL",0,0,"[2026-01-05 15:59:12] Finish: obj=GenerateReqInput(validation_time=2.2106803953647614e-05, received_time=1767625151.529731, received_time_perf=2480036.20265658, rid='8c7ac8947ea84ebab0e1382c5ef16f10', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""ical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '54,74p'\n```<|user|>\n\n 54\t\n 55\t // Only handle pure insertions (not replacements)\n 56\t // Replacements are handled by decorations to properly show what's being deleted\n 57\t if (this.action.kind !== 'editInsert') {\n 58\t return [];\n 59\t }\n 60\t\n 61\t const insertPos = toVscodePosition(this.action.position);\n 62\t \n 63\t // Only provide completion if insert position is at or after the cursor\n 64\t // VS Code's inline completion API shows ghost text at/after cursor position\n 65\t if (insertPos.isBefore(position)) {\n 66\t return [];\n 67\t }\n 68\t \n 69\t const item = new vscode.InlineCompletionItem(\n 70\t this.action.text,\n 71\t new vscode.Range(insertPos, insertPos)\n 72\t );\n 73\t \n 74\t return [item];\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 100933, 37715, 12969, 28, 19, 13, 101478, 18831, 1648, 256, 220, 15, 4, 91, 5238, 15417, 760, 220, 15, 14, 99619, 508, 98503, 25, 98503, 89226, 937, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 100461, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 98503, 27, 98503, 25, 98503, 11, 220, 102088, 13, 99317, 275, 2687, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 8450, 287, 220, 103093, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 17, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 99951, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 18, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 100562, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102636, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 16, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 101135, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 100235, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 100461, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 1932, 10779, 4273, 28735, 28, 111156, 101135, 18, 11, 11873, 291, 43082, 483, 2368, 28, 23, 98729, 17, 11, 1932, 43082, 483, 28735, 28, 114491, 104029, 11, 1932, 37139, 37022, 28, 100933, 11, 2266, 6043, 28, 118901, 100372, 17, 11, 2500, 35450, 12969, 28, 19, 13, 100461, 18831, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16] ... [8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 101130, 11, 104160, 66, 5661, 286, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 62115, 286, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 14901, 14901, 6, 82, 1660, 11057, 5661, 286, 421, 320, 574, 12389, 36442, 4376, 7127, 14901, 6, 3587, 13771, 14901, 14901, 863, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 61534, 286, 3044, 286, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 5661, 286, 442, 30530, 6119, 14901, 14901, 6, 82, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 5661, 286, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 286, 3044, 286, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 7, 5661, 310, 419, 12389, 2788, 42626, 310, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 62115, 286, 6903, 5661, 286, 3044, 286, 470, 508, 1203, 5265, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 102856, 11, 103498, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '75,95p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100899, 11, 101804, 79, 1248, 73022, 151336], 'meta_info': {'id': '8c7ac8947ea84ebab0e1382c5ef16f10', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15023, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0001081169830285944, 73022, '```'), (0.0, 45937, 'bash'), (-3.576278118089249e-07, 198, '\n'), (-0.1875956952571869, 4616, 'cat'), (-0.016403932124376297, 481, ' -'), (-1.0728830375228426e-06, 77, 'n'), (-2.5748875486897305e-05, 608, ' /'), (-1.9073468138230965e-06, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-7.271740287251305e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-9.536738616588991e-07, 79888, '-extension'), (-0.008274913765490055, 13428, '/src'), (-0.0007177399238571525, 14, '/'), (-0.016568683087825775, 27082, 'preview'), (-0.14272180199623108, 14, '/'), (-0.006987184751778841, 5057, 'inline'), (-1.0609570381348021e-05, 5179, 'Provider'), (-1.4305104514278355e-06, 21239, '.ts'), (-0.0002711643755901605, 760, ' |'), (-6.556489552167477e-06, 10918, ' sed'), (-1.5497195136049413e-06, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.8761998414993286, 100899, '75'), (-4.446407547220588e-05, 11, ','), (-0.1488351672887802, 101804, '95'), (-1.1920928244535389e-07, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.0002706876548472792, 73022, '```'), (-4.410734163684538e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 594, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.0683925151824951, 'response_sent_to_client_ts': 1767625152.598187}}\r\n[2026-01-05 15:59:12] Finish: obj=GenerateReqInput(validation_time=1.4983117580413818e-05, received_time=1767625151.830202, received_time_perf=2480036.50312739, rid='2cbad6f94e9649a9bca629dae4f6166c', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=2.2106803953647614e-05, received_time=1767625151.529731, received_time_perf=2480036.20265658, rid='8c7ac8947ea84ebab0e1382c5ef16f10', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, \n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 100933, 37715, 12969, 28, 19, 13, 101478, 18831, 1648, 256, 220, 15, 4, 91, 5238, 15417, 760, 220, 15, 14, 99619, 508, 98503, 25, 98503, 89226, 937, 275, 2687, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 40314, 1677, 44532, 320, 1279, 28, 16, 37715, 12969, 28, 19, 13, 100461, 18831, 1648, 220, 99457, 4, 91, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 51338, 15187, 91, 220, 99619, 14, 99619, 508, 98503, 25, 98503, 27, 98503, 25, 98503, 11, 220, 102088, 13, 99317, 275, 2687, 921, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 8450, 287, 220, 103093, 24335, 4771, 14220, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 17, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 99951, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 18, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 100562, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102636, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 16, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 101135, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 102340, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 39669, 9955, 13029, 24335, 4771, 835, 13, 4120, 25255, 25, 220, 16, 13, 100235, 274, 13, 1833, 10426, 28, 15, 13, 100632, 18831, 13, 37715, 1833, 28, 19, 13, 100461, 18831, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 99064, 28964, 15, 60, 1932, 10779, 4273, 28735, 28, 111156, 101135, 18, 11, 11873, 291, 43082, 483, 2368, 28, 23, 98729, 17, 11, 1932, 43082, 483, 28735, 28, 114491, 104029, 11, 1932, 37139, 37022, 28, 100933, 11, 2266, 6043, 28, 118901, 100372, 17, 11, 2500, 35450, 12969, 28, 19, 13, 100461, 18831, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16] ... [7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 5661, 286, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 286, 3044, 286, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 7, 5661, 310, 419, 12389, 2788, 42626, 310, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 62115, 286, 6903, 5661, 286, 3044, 286, 470, 508, 1203, 5265, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 102856, 11, 103498, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 112098, 21, 99695, 101294, 20, 100632, 19, 102269, 16, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99082, 16, 13, 20, 100104, 22, 100557, 11, 3949, 3009, 76167, 28, 99590, 23, 98503, 18, 21, 13, 115937, 101411, 21, 101729, 11, 9276, 1131, 23, 66, 22, 580, 103502, 19, 22, 12502, 104029, 3065, 370, 15, 68, 115547, 17, 66, 20, 823, 99317, 69, 98668, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/decorationProvider.ts | sed -n '1,20p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 26367, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98360, 79, 1248, 73022, 151336], 'meta_info': {'id': '2cbad6f94e9649a9bca629dae4f6166c', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15703, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.000382707774406299, 73022, '```'), (0.0, 45937, 'bash'), (-1.1920928244535389e-07, 198, '\n'), (-0.23864342272281647, 4616, 'cat'), (-0.01657501421868801, 481, ' -'), (-8.344646857949556e-07, 77, 'n'), (-3.611976353568025e-05, 608, ' /'), (-2.9802276912960224e-06, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-4.6491513785440475e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-5.960462772236497e-07, 79888, '-extension'), (-0.00826746504753828, 13428, '/src'), (-0.0007441850611940026, 14, '/'), (-0.0023801589850336313, 27082, 'preview'), (-1.0655856132507324, 22490, '/de'), (-0.09361784160137177, 26367, 'coration'), (-0.009304730221629143, 5179, 'Provider'), (-3.099436753473128e-06, 21239, '.ts'), (-6.496695277746767e-05, 760, ' |'), (-3.576278118089249e-07, 10918, ' sed'), (-0.013669924810528755, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.027281716465950012, 16, '1'), (0.0, 11, ','), (-0.5977448225021362, 98360, '20'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-7.760223525110632e-05, 73022, '```'), (-4.410734163684538e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 8785, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 0.7845337390899658, 'response_sent_to_client_ts': 1767625152.6147966}}\r\n[2026-01-05 15:59:12] INFO: 10.86.2.252:42494 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +370,880939,"src/preview/inlineProvider.ts",0,2862,"",typescript,content +371,881000,"src/preview/inlineProvider.ts",0,0,"import * as vscode from 'vscode';\nimport { Action, toVscodeRange, toVscodePosition } from './types';\n\n/**\n * Provides inline completion items (ghost text) for code edit actions.\n * This takes priority over Cursor's hints and works on empty lines.\n */\nexport class CrowdPilotInlineProvider implements vscode.InlineCompletionItemProvider {\n private action: Action | null = null;\n private enabled: boolean = true;\n\n /**\n * Set the current action to display as inline completion.\n */\n setAction(action: Action): void {\n this.action = action;\n // Trigger VS Code to re-query inline completions\n vscode.commands.executeCommand('editor.action.inlineSuggest.trigger');\n }\n\n /**\n * Clear the current action.\n */\n clearAction(): void {\n this.action = null;\n }\n\n /**\n * Get the current action.\n */\n getAction(): Action | null {\n return this.action;\n }\n\n /**\n * Enable or disable the provider.\n */\n setEnabled(enabled: boolean): void {\n this.enabled = enabled;\n }\n\n /**\n * Provide inline completion items.\n */\n provideInlineCompletionItems(\n document: vscode.TextDocument,\n position: vscode.Position,\n context: vscode.InlineCompletionContext,\n token: vscode.CancellationToken\n ): vscode.ProviderResult {\n if (!this.enabled || !this.action) {\n return [];\n }\n\n // Only handle pure insertions (not replacements)\n // Replacements are handled by decorations to properly show what's being deleted\n if (this.action.kind !== 'editInsert') {\n return [];\n }\n\n const insertPos = toVscodePosition(this.action.position);\n \n // Only provide completion if insert position is at or after the cursor\n // VS Code's inline completion API shows ghost text at/after cursor position\n if (insertPos.isBefore(position)) {\n return [];\n }\n \n const item = new vscode.InlineCompletionItem(\n this.action.text,\n new vscode.Range(insertPos, insertPos)\n );\n \n return [item];\n }\n\n /**\n * Check if the current action's position is near the given cursor position.\n * Used to determine if we need a fallback indicator.\n */\n isActionNearCursor(cursorLine: number): boolean {\n if (!this.action) {\n return false;\n }\n \n if (this.action.kind === 'editInsert') {\n return Math.abs(this.action.position[0] - cursorLine) <= 1;\n }\n \n if (this.action.kind === 'editReplace') {\n return cursorLine >= this.action.range.start[0] - 1 && \n cursorLine <= this.action.range.end[0] + 1;\n }\n \n return false;\n }\n}\n\n\n",typescript,content +372,881002,"src/preview/inlineProvider.ts",2887,2,"",typescript,content +373,885695,"src/preview/inlineProvider.ts",0,2886,"",typescript,content +374,885824,"src/preview/inlineProvider.ts",0,0,"import * as vscode from 'vscode';\nimport { Action, toVscodePosition } from './types';\n\n/**\n * Provides inline completion items (ghost text) for code edit actions.\n * This takes priority over Cursor's hints and works on empty lines.\n */\nexport class CrowdPilotInlineProvider implements vscode.InlineCompletionItemProvider {\n private action: Action | null = null;\n private enabled: boolean = true;\n\n /**\n * Set the current action to display as inline completion.\n */\n setAction(action: Action): void {\n this.action = action;\n // Trigger VS Code to re-query inline completions\n vscode.commands.executeCommand('editor.action.inlineSuggest.trigger');\n }\n\n /**\n * Clear the current action.\n */\n clearAction(): void {\n this.action = null;\n }\n\n /**\n * Get the current action.\n */\n getAction(): Action | null {\n return this.action;\n }\n\n /**\n * Enable or disable the provider.\n */\n setEnabled(enabled: boolean): void {\n this.enabled = enabled;\n }\n\n /**\n * Provide inline completion items.\n */\n provideInlineCompletionItems(\n document: vscode.TextDocument,\n position: vscode.Position,\n context: vscode.InlineCompletionContext,\n token: vscode.CancellationToken\n ): vscode.ProviderResult {\n if (!this.enabled || !this.action) {\n return [];\n }\n\n // Only handle pure insertions (not replacements)\n // Replacements are handled by decorations to properly show what's being deleted\n if (this.action.kind !== 'editInsert') {\n return [];\n }\n\n const insertPos = toVscodePosition(this.action.position);\n \n // Only provide completion if insert position is at or after the cursor\n // VS Code's inline completion API shows ghost text at/after cursor position\n if (insertPos.isBefore(position)) {\n return [];\n }\n \n const item = new vscode.InlineCompletionItem(\n this.action.text,\n new vscode.Range(insertPos, insertPos)\n );\n \n return [item];\n }\n\n /**\n * Check if the current action's position is near the given cursor position.\n * Used to determine if we need a fallback indicator.\n */\n isActionNearCursor(cursorLine: number): boolean {\n if (!this.action) {\n return false;\n }\n \n if (this.action.kind === 'editInsert') {\n return Math.abs(this.action.position[0] - cursorLine) <= 1;\n }\n \n if (this.action.kind === 'editReplace') {\n return cursorLine >= this.action.range.start[0] - 1 && \n cursorLine <= this.action.range.end[0] + 1;\n }\n \n return false;\n }\n}\n\n\n",typescript,content +375,885826,"src/preview/inlineProvider.ts",2872,2,"",typescript,content +376,905419,"TERMINAL",0,0,"[2026-01-05 15:59:52] Receive: obj=GenerateReqInput(validation_time=2.4422071874141693e-05, received_time=1767625192.177766, received_time_perf=2480076.85069166, rid='609529067ca3470f80336373bf48b0bb', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99618, 20, 23, 102626, 20, 99082, 99698, 24, 101562, 24, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 22, 16, 13, 101663, 24, 19, 101411, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100899, 21, 13, 101478, 17, 18, 100067, 20, 99366, 11, 9276, 1131, 3235, 100286, 65, 19, 67, 18, 66, 98668, 65, 19, 68, 20, 69, 23, 65, 24, 65, 18, 631, 99619, 34285, 17, 65, 99146, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498] ... [99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 17, 11, 104160, 66, 5661, 474, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 7127, 14901, 6, 1725, 9239, 14901, 14901, 6967, 5661, 5661, 3663, 5661, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 13, 5661, 353, 1096, 4990, 10614, 916, 27971, 14901, 14901, 6, 82, 30523, 323, 4278, 389, 4287, 5128, 13, 5661, 639, 5661, 1533, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 94714, 262, 869, 1917, 25, 5586, 760, 845, 284, 845, 55409, 262, 869, 8967, 25, 2710, 284, 830, 55409, 5661, 262, 4165, 5661, 257, 353, 2573, 279, 1482, 1917, 311, 3037, 438, 7381, 9750, 13, 5661, 257, 639, 5661, 262, 738, 2512, 15294, 25, 5586, 1648, 737, 94714, 286, 419, 12389, 284, 1917, 55409, 286, 442, 30174, 30530, 6119, 311, 312, 65012, 7381, 3459, 908, 5661, 286, 55008, 33739, 7769, 4062, 32604, 14901, 6, 8866, 12389, 29307, 50, 3799, 28455, 14901, 14901, 4667, 5661, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 12017, 279, 1482, 1917, 13, 5661, 257, 639, 5661, 262, 2797, 2512, 4555, 737, 94714, 286, 419, 12389, 284, 845, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 2126, 279, 1482, 1917, 13, 5661, 257, 639, 5661, 262, 633, 2512, 4555, 5586, 760, 845, 94714, 286, 470, 419, 12389, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 18535, 476, 11151, 279, 9106, 13, 5661, 257, 639, 5661, 262, 738, 5462, 87027, 25, 2710, 1648, 737, 94714, 286, 419, 22086, 284, 8967, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 39350, 7381, 9750, 3589, 13, 5661, 257, 639, 5661, 262, 3410, 25246, 33030, 4353, 7, 5661, 286, 2197, 25, 55008, 1979, 7524, 42626, 286, 2309, 25, 55008, 21900, 42626, 286, 2266, 25, 55008, 5337, 1056, 33030, 1972, 42626, 286, 3950, 25, 55008, 727, 23860, 5661, 262, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 94714, 286, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 62115, 286, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 14901, 14901, 6, 82, 1660, 11057, 5661, 286, 421, 320, 574, 12389, 36442, 4376, 7127, 14901, 6, 3587, 13771, 14901, 14901, 863, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 61534, 286, 3044, 286, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 5661, 286, 442, 30530, 6119, 14901, 14901, 6, 82, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 5661, 286, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 286, 3044, 286, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 7, 5661, 310, 419, 12389, 2788, 42626, 310, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 62115, 286, 6903, 5661, 286, 3044, 286, 470, 508, 1203, 5265, 5661, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 4248, 421, 279, 1482, 1917, 14901, 14901, 6, 82, 2309, 374, 3143, 279, 2661, 8127, 2309, 13, 5661, 257, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 13, 5661, 257, 639, 5661, 262, 374, 2512, 51621, 14533, 27583, 2460, 25, 1372, 1648, 2710, 94714, 286, 421, 1505, 574, 12389, 8, 94714, 310, 470, 895, 55409, 286, 335, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100702, 11, 102721, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 15:59:52 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 584, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +377,905721,"TERMINAL",0,0,"[2026-01-05 15:59:52 TP0] Prefill batch, #new-seq: 1, #new-token: 6253, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +378,906187,"TERMINAL",0,0,"[2026-01-05 15:59:52] Finish: obj=GenerateReqInput(validation_time=2.4422071874141693e-05, received_time=1767625192.177766, received_time_perf=2480076.85069166, rid='609529067ca3470f80336373bf48b0bb', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""\\\n if (!this.action) {\\\n return false;\\\n }' /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts && cat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '33,53p'\n```<|user|>\n\n 33\t }\n 34\t\n 35\t /**\n 36\t * Enable or disable the provider.\n 37\t */\n 38\t setEnabled(enabled: boolean): void {\n 39\t this.enabled = enabled;\n 40\t }\n 41\t\n 42\t /**\n 43\t * Provide inline completion items.\n 44\t */\n 45\t provideInlineCompletionItems(\n 46\t document: vscode.TextDocument,\n 47\t position: vscode.Position,\n 48\t context: vscode.InlineCompletionContext,\n 49\t token: vscode.CancellationToken\n 50\t ): vscode.ProviderResult {\n 51\t if (!this.enabled || !this.action) {\n 52\t return [];\n 53\t }\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99618, 20, 23, 102626, 20, 99082, 99698, 24, 101562, 24, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 22, 16, 13, 101663, 24, 19, 101411, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100899, 21, 13, 101478, 17, 18, 100067, 20, 99366, 11, 9276, 1131, 3235, 100286, 65, 19, 67, 18, 66, 98668, 65, 19, 68, 20, 69, 23, 65, 24, 65, 18, 631, 99619, 34285, 17, 65, 99146, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498] ... [99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 17, 11, 104160, 66, 5661, 474, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 7127, 14901, 6, 1725, 9239, 14901, 14901, 6967, 5661, 5661, 3663, 5661, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 13, 5661, 353, 1096, 4990, 10614, 916, 27971, 14901, 14901, 6, 82, 30523, 323, 4278, 389, 4287, 5128, 13, 5661, 639, 5661, 1533, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 94714, 262, 869, 1917, 25, 5586, 760, 845, 284, 845, 55409, 262, 869, 8967, 25, 2710, 284, 830, 55409, 5661, 262, 4165, 5661, 257, 353, 2573, 279, 1482, 1917, 311, 3037, 438, 7381, 9750, 13, 5661, 257, 639, 5661, 262, 738, 2512, 15294, 25, 5586, 1648, 737, 94714, 286, 419, 12389, 284, 1917, 55409, 286, 442, 30174, 30530, 6119, 311, 312, 65012, 7381, 3459, 908, 5661, 286, 55008, 33739, 7769, 4062, 32604, 14901, 6, 8866, 12389, 29307, 50, 3799, 28455, 14901, 14901, 4667, 5661, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 12017, 279, 1482, 1917, 13, 5661, 257, 639, 5661, 262, 2797, 2512, 4555, 737, 94714, 286, 419, 12389, 284, 845, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 2126, 279, 1482, 1917, 13, 5661, 257, 639, 5661, 262, 633, 2512, 4555, 5586, 760, 845, 94714, 286, 470, 419, 12389, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 18535, 476, 11151, 279, 9106, 13, 5661, 257, 639, 5661, 262, 738, 5462, 87027, 25, 2710, 1648, 737, 94714, 286, 419, 22086, 284, 8967, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 39350, 7381, 9750, 3589, 13, 5661, 257, 639, 5661, 262, 3410, 25246, 33030, 4353, 7, 5661, 286, 2197, 25, 55008, 1979, 7524, 42626, 286, 2309, 25, 55008, 21900, 42626, 286, 2266, 25, 55008, 5337, 1056, 33030, 1972, 42626, 286, 3950, 25, 55008, 727, 23860, 5661, 262, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 94714, 286, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 62115, 286, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 14901, 14901, 6, 82, 1660, 11057, 5661, 286, 421, 320, 574, 12389, 36442, 4376, 7127, 14901, 6, 3587, 13771, 14901, 14901, 863, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 61534, 286, 3044, 286, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 5661, 286, 442, 30530, 6119, 14901, 14901, 6, 82, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 5661, 286, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 286, 3044, 286, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 7, 5661, 310, 419, 12389, 2788, 42626, 310, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 62115, 286, 6903, 5661, 286, 3044, 286, 470, 508, 1203, 5265, 5661, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 4248, 421, 279, 1482, 1917, 14901, 14901, 6, 82, 2309, 374, 3143, 279, 2661, 8127, 2309, 13, 5661, 257, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 13, 5661, 257, 639, 5661, 262, 374, 2512, 51621, 14533, 27583, 2460, 25, 1372, 1648, 2710, 94714, 286, 421, 1505, 574, 12389, 8, 94714, 310, 470, 895, 55409, 286, 335, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100702, 11, 102721, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '80,100p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99695, 11, 99457, 79, 1248, 73022, 151336], 'meta_info': {'id': '609529067ca3470f80336373bf48b0bb', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15029, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00034409802174195647, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.5330263376235962, 4616, 'cat'), (-0.011461838148534298, 481, ' -'), (-3.576278118089249e-07, 77, 'n'), (-3.099436753473128e-06, 608, ' /'), (-5.960462772236497e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-3.576278118089249e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0001408954558428377, 13428, '/src'), (-0.00015925093612167984, 14, '/'), (-0.00562560698017478, 27082, 'preview'), (-0.00944538414478302, 14, '/'), (-0.0006792622152715921, 5057, 'inline'), (-1.1920922133867862e-06, 5179, 'Provider'), (0.0, 21239, '.ts'), (-0.0788983404636383, 760, ' |'), (-0.0008629412623122334, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-2.0265558760002023e-06, 364, "" '""), (-2.009467601776123, 99695, '80'), (-0.00012587709352374077, 11, ','), (-0.14982955157756805, 99457, '100'), (-2.3841855067985307e-07, 79, 'p'), (-7.152555099310121e-07, 1248, ""'\n""), (-0.0007901645149104297, 73022, '```'), (-5.364403477869928e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 584, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 0.7560441493988037, 'response_sent_to_client_ts': 1767625192.9338765}}\r\n[2026-01-05 15:59:52] INFO: 10.86.2.252:40862 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +379,927034,"src/preview/inlineProvider.ts",2871,0,"",typescript,selection_command +380,927323,"TERMINAL",0,0,"[2026-01-05 16:00:14] Receive: obj=GenerateReqInput(validation_time=2.0868144929409027e-05, received_time=1767625214.0633364, received_time_perf=2480098.736261882, rid='ccdbd148fff34261b7d1764c2fd2a4d1', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 46978, 3353, 455, 5047, 3109, 6, 374, 31457, 323, 686, 387, 6963, 304, 264, 3853, 2319, 13, 5209, 990, 3353, 2528, 3109, 6, 4518, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 30649, 25, 257, 220, 115925, 13, 15, 13, 15, 13, 16, 25, 19, 98965, 99367, 481, 330, 3806, 608, 455, 5047, 3109, 10125, 14, 16, 13, 16, 1, 220, 98867, 10397, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 5856, 11, 3949, 3009, 5856, 11, 3949, 3009, 76167, 5856, 11, 9276, 1131, 100919, 10793, 102486, 24, 24, 65, 2577, 21, 101562, 18, 100104, 68, 21, 4385, 104029, 68, 100899, 66, 103825, 66, 99951, 516, 1758, 40163, 83114, 5856, 11, 1467, 1131, 785, 6722, 3283, 315, 9621, 374, 516, 1946, 8076, 5856, 11, 1946, 23939, 82, 5856, 11, 2168, 1769, 5856, 11, 2766, 1769, 5856, 11, 7699, 1769, 5856, 11, 24816, 6745, 12848, 34388, 1210, 220, 15, 11, 364, 2810, 5921, 28735, 1210, 220, 23, 2137, 470, 5224, 19861, 5608, 11, 1487, 19861, 4906, 6043, 10824, 16, 11, 1909, 5224, 776, 1279, 4273, 28, 15, 11, 3950, 8076, 5224, 19861, 5856, 11, 470, 4326, 1243, 5224, 776, 1279, 5608, 11, 4269, 5608, 11, 1487, 37490, 3618, 11, 470, 26653, 22911, 5608, 11, 13220, 1361, 5856, 11, 3797, 6745, 5856, 11, 326, 6215, 2638, 5856, 11, 326, 6215, 842, 5856, 11, 2526, 5224, 275, 49805, 5856, 11, 26838, 12842, 5856, 11, 26838, 8715, 5856, 11, 26838, 24927, 5856, 11, 26838, 14435, 3097, 5856, 11, 16876, 54821, 2368, 5856, 11, 32559, 5608, 11, 821, 60201, 20375, 5856, 11, 1182, 70, 581, 77, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100919, 19, 21, 100067, 99698, 18, 24, 100919, 100104, 18, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 22, 17, 13, 22, 98729, 19, 101175, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100899, 22, 13, 101294, 17, 100235, 22, 22, 11, 9276, 1131, 99951, 17, 22, 67, 100067, 718, 17, 66, 19, 65, 102626, 64, 23, 69, 100067, 21, 21, 102626, 99619, 23, 122444, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220] ... [77, 364, 100702, 11, 102721, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 102501, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 101723, 17, 124080, 126182, 19, 99367, 121416, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98729, 17, 13, 124898, 22, 101478, 11, 3949, 3009, 76167, 28, 99590, 23, 125744, 21, 13, 122589, 103093, 16, 101478, 11, 9276, 1131, 99618, 24, 20, 100104, 100539, 22, 924, 101135, 22, 15, 69, 99695, 18, 100632, 18, 103388, 13225, 100933, 65, 15, 6066, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101252, 11, 100809, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101252, 7472, 456, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:14 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 598, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +381,927602,"TERMINAL",0,0,"[2026-01-05 16:00:14 TP0] Prefill batch, #new-seq: 1, #new-token: 6379, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +382,927989,"TERMINAL",0,0,"[2026-01-05 16:00:14 TP0] Decode batch, #running-req: 1, #token: 15190, token usage: 0.03, accept len: 3.10, accept rate: 0.77, cuda graph: True, gen throughput (token/s): 1.44, #queue-req: 0, \r\n",,terminal_output +383,928327,"src/preview/inlineProvider.ts",1267,0,"",typescript,selection_keyboard +384,928614,"TERMINAL",0,0,"[2026-01-05 16:00:15 TP0] Decode batch, #running-req: 1, #token: 15316, token usage: 0.03, accept len: 3.15, accept rate: 0.79, cuda graph: True, gen throughput (token/s): 222.29, #queue-req: 0, \r\n[2026-01-05 16:00:15] Receive: obj=GenerateReqInput(validation_time=1.6937032341957092e-05, received_time=1767625215.3570883, received_time_perf=2480100.030013762, rid='efadbe24e2164053afdba64e6c3a6863', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100919, 20, 23, 99082, 99618, 20, 20, 99146, 115937, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 23, 21, 13, 102269, 22, 18, 98729, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 16, 13, 19, 99698, 127399, 114240, 11, 9276, 1131, 68, 21, 100632, 68, 118843, 18, 69, 21, 99590, 100461, 19, 23, 68, 5918, 18, 66, 100919, 19, 101474, 16, 3235, 20, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11] ... [574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99367, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100562, 21, 23, 99367, 101474, 17, 24, 99698, 100067, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 19, 13, 100539, 18, 18, 100632, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 98503, 24, 23, 13, 22, 100632, 99916, 16, 101252, 17, 11, 9276, 1131, 638, 1999, 67, 121919, 14083, 101135, 17, 103595, 65, 22, 67, 123853, 19, 66, 17, 6902, 17, 64, 19, 67, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:15 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.03, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +385,928897,"TERMINAL",0,0,"[2026-01-05 16:00:15 TP0] Prefill batch, #new-seq: 1, #new-token: 6318, #cached-token: 0, token usage: 0.05, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +386,929410,"TERMINAL",0,0,"[2026-01-05 16:00:16] Finish: obj=GenerateReqInput(validation_time=2.0868144929409027e-05, received_time=1767625214.0633364, received_time_perf=2480098.736261882, rid='ccdbd148fff34261b7d1764c2fd2a4d1', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... "", 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, \n... [truncated]\n/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '88,99p'\n```<|user|>\n\n 88\t }\n 89\t \n 90\t if (this.action.kind === 'editReplace') {\n 91\t return cursorLine >= this.action.range.start[0] - 1 && \n 92\t cursorLine <= this.action.range.end[0] + 1;\n 93\t }\n 94\t \n 95\t return false;\n 96\t }\n 97\t}\n 98\t\n 99\t\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 46978, 3353, 455, 5047, 3109, 6, 374, 31457, 323, 686, 387, 6963, 304, 264, 3853, 2319, 13, 5209, 990, 3353, 2528, 3109, 6, 4518, 624, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 30649, 25, 257, 220, 115925, 13, 15, 13, 15, 13, 16, 25, 19, 98965, 99367, 481, 330, 3806, 608, 455, 5047, 3109, 10125, 14, 16, 13, 16, 1, 220, 98867, 10397, 198, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 5856, 11, 3949, 3009, 5856, 11, 3949, 3009, 76167, 5856, 11, 9276, 1131, 100919, 10793, 102486, 24, 24, 65, 2577, 21, 101562, 18, 100104, 68, 21, 4385, 104029, 68, 100899, 66, 103825, 66, 99951, 516, 1758, 40163, 83114, 5856, 11, 1467, 1131, 785, 6722, 3283, 315, 9621, 374, 516, 1946, 8076, 5856, 11, 1946, 23939, 82, 5856, 11, 2168, 1769, 5856, 11, 2766, 1769, 5856, 11, 7699, 1769, 5856, 11, 24816, 6745, 12848, 34388, 1210, 220, 15, 11, 364, 2810, 5921, 28735, 1210, 220, 23, 2137, 470, 5224, 19861, 5608, 11, 1487, 19861, 4906, 6043, 10824, 16, 11, 1909, 5224, 776, 1279, 4273, 28, 15, 11, 3950, 8076, 5224, 19861, 5856, 11, 470, 4326, 1243, 5224, 776, 1279, 5608, 11, 4269, 5608, 11, 1487, 37490, 3618, 11, 470, 26653, 22911, 5608, 11, 13220, 1361, 5856, 11, 3797, 6745, 5856, 11, 326, 6215, 2638, 5856, 11, 326, 6215, 842, 5856, 11, 2526, 5224, 275, 49805, 5856, 11, 26838, 12842, 5856, 11, 26838, 8715, 5856, 11, 26838, 24927, 5856, 11, 26838, 14435, 3097, 5856, 11, 16876, 54821, 2368, 5856, 11, 32559, 5608, 11, 821, 60201, 20375, 5856, 11, 1182, 70, 581, 77, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100919, 19, 21, 100067, 99698, 18, 24, 100919, 100104, 18, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 22, 17, 13, 22, 98729, 19, 101175, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100899, 22, 13, 101294, 17, 100235, 22, 22, 11, 9276, 1131, 99951, 17, 22, 67, 100067, 718, 17, 66, 19, 65, 102626, 64, 23, 69, 100067, 21, 21, 102626, 99619, 23, 122444, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220] ... [77, 364, 100702, 11, 102721, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 102501, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 101723, 17, 124080, 126182, 19, 99367, 121416, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98729, 17, 13, 124898, 22, 101478, 11, 3949, 3009, 76167, 28, 99590, 23, 125744, 21, 13, 122589, 103093, 16, 101478, 11, 9276, 1131, 99618, 24, 20, 100104, 100539, 22, 924, 101135, 22, 15, 69, 99695, 18, 100632, 18, 103388, 13225, 100933, 65, 15, 6066, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101252, 11, 100809, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101252, 7472, 456, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\nsed -i '88,99c\\\n }\\\n \\\n // For insertions, check if cursor is near the insertion point\\\n if (this.action.kind === 'editInsert') {\\\n const insertLine = this.action.position[0];\\\n return Math.abs(cursorLine - insertLine) <= 1;\\\n }\\\n \\\n // For replacements, check if cursor is within or near the replacement range\\\n if (this.action.kind === 'editReplace') {\\\n return cursorLine >= this.action.range.start[0] - 1 && \\\n cursorLine <= this.action.range.end[0] + 1;\\\n }\\\n \\\n return false;\\\n }\\\n}' /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts && cat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '88,99p'\n```"", 'output_ids': [198, 73022, 45937, 198, 31193, 481, 72, 364, 101252, 11, 100809, 66, 5661, 286, 335, 5661, 286, 3044, 286, 442, 1752, 5656, 908, 11, 1779, 421, 8127, 374, 3143, 279, 35747, 1459, 5661, 286, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 94714, 310, 733, 5656, 2460, 284, 419, 12389, 6187, 58, 15, 5265, 5661, 310, 470, 4149, 14562, 27583, 2460, 481, 5656, 2460, 8, 2651, 220, 16, 55409, 286, 335, 5661, 286, 3044, 286, 442, 1752, 52703, 11, 1779, 421, 8127, 374, 2878, 476, 3143, 279, 13714, 2088, 5661, 286, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 94714, 310, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 3044, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 55409, 286, 335, 5661, 286, 3044, 286, 470, 895, 55409, 262, 335, 5661, 11655, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101252, 11, 100809, 79, 1248, 73022, 151336], 'meta_info': {'id': 'ccdbd148fff34261b7d1764c2fd2a4d1', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15169, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00034171465085819364, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.34893059730529785, 31193, 'sed'), (0.0, 481, ' -'), (-6.48477507638745e-05, 72, 'i'), (-0.0020306934602558613, 364, "" '""), (-1.7033129930496216, 101252, '88'), (-0.09193392097949982, 11, ','), (-0.5323030352592468, 100809, '99'), (-0.0673322081565857, 66, 'c'), (-0.0002329078270122409, 5661, '\\\n'), (-0.008343127556145191, 286, ' '), (-0.722920298576355, 335, ' }'), (-0.0014016337227076292, 5661, '\\\n'), (-0.048647623509168625, 286, ' '), (-0.003046873025596142, 3044, ' \\\n'), (-1.1920928244535389e-07, 286, ' '), (-0.47609075903892517, 442, ' //'), (-0.3868712782859802, 1752, ' For'), (-0.0898522362112999, 5656, ' insert'), (-0.1832706481218338, 908, 'ions'), (-0.0005644158809445798, 11, ','), (-0.22187435626983643, 1779, ' check'), (-0.0004884004010818899, 421, ' if'), (-0.12685097754001617, 8127, ' cursor'), (-3.6238969187252223e-05, 374, ' is'), (-0.30260980129241943, 3143, ' near'), (-0.006060793064534664, 279, ' the'), (-0.7930671572685242, 35747, ' insertion'), (-0.8442103862762451, 1459, ' point'), (-0.10293956845998764, 5661, '\\\n'), (-1.1920928244535389e-07, 286, ' '), (-0.08909780532121658, 421, ' if'), (-1.3112935448589269e-05, 320, ' ('), (-2.5987286790041253e-05, 574, 'this'), (0.0, 12389, '.action'), (-1.156323378381785e-05, 36442, '.kind'), (-2.264974000354414e-06, 2049, ' ==='), (-5.185469490243122e-05, 364, "" '""), (-3.099436753473128e-06, 3587, 'edit'), (-0.00019000156316906214, 13771, 'Insert'), (-0.002305232686921954, 863, ""')""), (-4.458328112377785e-05, 94714, ' {\\\n'), (-1.1920922133867862e-06, 310, ' '), (-0.6981557011604309, 733, ' const'), (-0.27671390771865845, 5656, ' insert'), (-0.004727021791040897, 2460, 'Line'), (-4.887569048150908e-06, 284, ' ='), (-0.0198151133954525, 419, ' this'), (-6.508615479106084e-05, 12389, '.action'), (-1.9073304429184645e-05, 6187, '.position'), (-0.04736153781414032, 58, '['), (-0.15502093732357025, 15, '0'), (-0.00023493390472140163, 5265, '];'), (-7.152531907195225e-06, 5661, '\\\n'), (-5.960462772236497e-07, 310, ' '), (-0.003542578313499689, 470, ' return'), (-0.02786862663924694, 4149, ' Math'), (0.0, 14562, '.abs'), (-0.005620746873319149, 27583, '(cursor'), (0.0, 2460, 'Line'), (0.0, 481, ' -'), (0.0, 5656, ' insert'), (0.0, 2460, 'Line'), (0.0, 8, ')'), (-4.0531076592742465e-06, 2651, ' <='), (-4.053033626405522e-05, 220, ' '), (-0.3069133162498474, 16, '1'), (-0.0023249757941812277, 55409, ';\\\n'), (0.0, 286, ' '), (-1.1920928244535389e-07, 335, ' }'), (-2.1576648578047752e-05, 5661, '\\\n'), (-0.00018285033002030104, 286, ' '), (-0.033062245696783066, 3044, ' \\\n'), (0.0, 286, ' '), (-0.1187867522239685, 442, ' //'), (-8.77341881277971e-05, 1752, ' For'), (-0.0012024560710415244, 52703, ' replacements'), (-4.911301948595792e-05, 11, ','), (-0.00042703570215962827, 1779, ' check'), (-5.61460001335945e-05, 421, ' if'), (-5.960462772236497e-07, 8127, ' cursor'), (-0.0006082591717131436, 374, ' is'), (-0.6339299082756042, 2878, ' within'), (-0.16347534954547882, 476, ' or'), (-0.2995389699935913, 3143, ' near'), (-0.0001530530134914443, 279, ' the'), (-0.2740061283111572, 13714, ' replacement'), (-3.3378546504536644e-06, 2088, ' range'), (-3.2186455882765586e-06, 5661, '\\\n'), (0.0, 286, ' '), (-1.168244216387393e-05, 421, ' if'), (0.0, 320, ' ('), (0.0, 574, 'this'), (0.0, 12389, '.action'), (0.0, 36442, '.kind'), (-2.3841855067985307e-07, 2049, ' ==='), (0.0, 364, "" '""), (0.0, 3587, 'edit'), (-1.311301275563892e-06, 23044, 'Replace'), (-2.3841855067985307e-07, 863, ""')""), (-8.344646857949556e-07, 94714, ' {\\\n'), (0.0, 310, ' '), (-0.016326766461133957, 470, ' return'), (-1.0728830375228426e-06, 8127, ' cursor'), (0.0, 2460, 'Line'), (0.0, 2604, ' >='), (-7.152555099310121e-07, 419, ' this'), (0.0, 12389, '.action'), (-1.0728830375228426e-06, 30029, '.range'), (-1.1920922133867862e-06, 4962, '.start'), (-7.152555099310121e-07, 58, '['), (0.0, 15, '0'), (0.0, 60, ']'), (-1.8596476365928538e-05, 481, ' -'), (-1.4305104514278355e-06, 220, ' '), (-1.7881377516459906e-06, 16, '1'), (-0.0009450022480450571, 1009, ' &&'), (-0.014943092130124569, 3044, ' \\\n'), (-1.0728830375228426e-06, 4293, ' '), (0.0, 8127, ' cursor'), (0.0, 2460, 'Line'), (-4.768370445162873e-07, 2651, ' <='), (0.0, 419, ' this'), (-2.3841855067985307e-07, 12389, '.action'), (-1.4305012882687151e-05, 30029, '.range'), (-6.9141146923357155e-06, 5073, '.end'), (0.0, 58, '['), (0.0, 15, '0'), (-2.145764938177308e-06, 60, ']'), (0.0, 488, ' +'), (0.0, 220, ' '), (0.0, 16, '1'), (-1.1920928244535389e-07, 55409, ';\\\n'), (0.0, 286, ' '), (0.0, 335, ' }'), (-3.814689989667386e-06, 5661, '\\\n'), (-1.585470999998506e-05, 286, ' '), (-0.00015901254664640874, 3044, ' \\\n'), (0.0, 286, ' '), (-0.0019300420535728335, 470, ' return'), (-0.0002728328399825841, 895, ' false'), (-0.0018294993788003922, 55409, ';\\\n'), (-3.576272320060525e-06, 262, ' '), (-7.510157047363464e-06, 335, ' }'), (-0.00013302871957421303, 5661, '\\\n'), (-0.02675887942314148, 11655, ""}'""), (-3.397406908334233e-05, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (0.0, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (0.0, 13428, '/src'), (0.0, 14, '/'), (0.0, 27082, 'preview'), (-2.145764938177308e-06, 14, '/'), (0.0, 5057, 'inline'), (0.0, 5179, 'Provider'), (0.0, 21239, '.ts'), (-9.405170567333698e-05, 1009, ' &&'), (-1.4305104514278355e-06, 8250, ' cat'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (0.0, 2899, '/c'), (0.0, 651, 'row'), (-1.1920928244535389e-07, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (0.0, 13428, '/src'), (0.0, 14, '/'), (0.0, 27082, 'preview'), (-3.099436753473128e-06, 14, '/'), (0.0, 5057, 'inline'), (0.0, 5179, 'Provider'), (0.0, 21239, '.ts'), (-3.6954811548639555e-06, 760, ' |'), (-2.3841855067985307e-07, 10918, ' sed'), (-5.364403477869928e-06, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.47875064611434937, 101252, '88'), (0.0, 11, ','), (-0.17539525032043457, 100809, '99'), (-1.1920928244535389e-07, 79, 'p'), (-7.152555099310121e-07, 1248, ""'\n""), (-3.1709168979432434e-05, 73022, '```'), (-1.549708758830093e-05, 151336, '<|user|>')], 'completion_tokens': 207, 'cached_tokens': 598, 'spec_accept_rate': 0.7121212121212122, 'spec_accept_length': 3.1363636363636362, 'spec_verify_ct': 66, 'spec_accept_token_num': 141, 'spec_draft_token_num': 198, 'e2e_latency': 2.1077144145965576, 'response_sent_to_client_ts': 1767625216.1712046}}\r\n",,terminal_output +387,929807,"TERMINAL",0,0,"[2026-01-05 16:00:16 TP0] Decode batch, #running-req: 1, #token: 15213, token usage: 0.03, accept len: 3.13, accept rate: 0.78, cuda graph: True, gen throughput (token/s): 144.02, #queue-req: 0, \r\n",,terminal_output +388,930023,"src/preview/inlineProvider.ts",1307,0,"",typescript,selection_command +389,930276,"src/preview/inlineProvider.ts",1399,0,"",typescript,selection_command +390,930292,"src/preview/inlineProvider.ts",1444,0,"",typescript,selection_command +391,930318,"TERMINAL",0,0,"[2026-01-05 16:00:17 TP0] Decode batch, #running-req: 1, #token: 15344, token usage: 0.03, accept len: 3.27, accept rate: 0.82, cuda graph: True, gen throughput (token/s): 230.28, #queue-req: 0, \r\n[2026-01-05 16:00:17] Receive: obj=GenerateReqInput(validation_time=2.5319866836071014e-05, received_time=1767625217.0528352, received_time_perf=2480101.725760564, rid='c1a8b0dad167477d812c7c35265c2347', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 21, 99082, 98867, 100235, 20, 21, 100933, 100809, 19, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 23, 23, 13, 100899, 18, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 18, 13, 19, 99916, 23, 99916, 24, 99916, 11, 9276, 1131, 2127, 57253, 100562, 20, 23, 69, 20, 65, 101655, 17, 1999, 4645, 757, 23, 542, 103502, 18, 104327, 65, 20, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99082, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 99317, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99082, 11, 4193, 4379, 25, 220, 15, 13, 102626, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 123564, 13, 100104, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99082, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 103093, 18, 100096, 18, 99619, 19, 122414, 100096, 24, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 20, 13, 100235, 22, 100562, 23, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 15, 13, 15, 99064, 15, 99366, 102269, 17, 11, 9276, 1131, 823, 329, 1371, 99590, 68, 122250, 99698, 20, 18, 2577, 88714, 102636, 68, 21, 66, 18, 64, 101840, 21, 18, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:17 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.03, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +392,930327,"src/preview/inlineProvider.ts",1467,0,"",typescript,selection_command +393,930354,"src/preview/inlineProvider.ts",1477,0,"",typescript,selection_command +394,930435,"src/preview/inlineProvider.ts",1478,0,"",typescript,selection_command +395,930609,"TERMINAL",0,0,"[2026-01-05 16:00:17 TP0] Prefill batch, #new-seq: 1, #new-token: 6258, #cached-token: 0, token usage: 0.05, #running-req: 1, #queue-req: 0, \r\n[2026-01-05 16:00:17] Receive: obj=GenerateReqInput(validation_time=1.7181970179080963e-05, received_time=1767625217.382863, received_time_perf=2480102.055788188, rid='967a3eccd3924b8db8a6d74b3fd44cc7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102721, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 98668, 112891, 101130, 21, 15, 99241, 21, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 18, 13, 18, 98729, 21, 102340, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 22, 13, 100809, 17, 101917, 22, 100441, 21, 11, 9276, 1131, 69, 24, 69, 114365, 66, 100235, 65, 100928, 19, 65, 19, 3632, 18, 80999, 16, 66, 23, 64, 21, 98965, 18, 64, 15, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965] ... [11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 101723, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99951, 11, 4193, 4379, 25, 220, 15, 13, 104160, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 112596, 13, 99869, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 20, 100557, 101663, 21, 101840, 18, 99618, 22, 107609, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100002, 17, 23, 100235, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 16, 13, 22, 99446, 22, 99618, 101917, 19, 11, 9276, 1131, 66, 16, 64, 23, 65, 15, 55469, 124047, 101655, 22, 67, 23, 98886, 66, 22, 66, 100235, 17, 101411, 66, 99619, 19, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +396,930691,"TERMINAL",0,0,"[2026-01-05 16:00:17] Receive: obj=GenerateReqInput(validation_time=2.2584106773138046e-05, received_time=1767625217.4639854, received_time_perf=2480102.13691057, rid='56f87aa312c542e0a35070282c2a96a7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102721, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 98668, 112891, 101130, 21, 15, 99241, 21, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 18, 13, 18, 98729, 21, 102340, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 22, 13, 100809, 17, 101917, 22, 100441, 21, 11, 9276, 1131, 69, 24, 69, 114365, 66, 100235, 65, 100928, 19, 65, 19, 3632, 18, 80999, 16, 66, 23, 64, 21, 98965, 18, 64, 15, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965] ... [11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 101723, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99951, 11, 4193, 4379, 25, 220, 15, 13, 104160, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 112596, 13, 99869, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 20, 100557, 101663, 21, 101840, 18, 99618, 22, 107609, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100002, 17, 23, 100235, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 16, 13, 22, 99446, 22, 99618, 101917, 19, 11, 9276, 1131, 66, 16, 64, 23, 65, 15, 55469, 124047, 101655, 22, 67, 23, 98886, 66, 22, 66, 100235, 17, 101411, 66, 99619, 19, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +397,931216,"TERMINAL",0,0,"[2026-01-05 16:00:18 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 814, token usage: 0.07, #running-req: 2, #queue-req: 1, \r\n",,terminal_output +398,931503,"TERMINAL",0,0,"[2026-01-05 16:00:18 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 9005, token usage: 0.08, #running-req: 2, #queue-req: 0, \r\n",,terminal_output +399,931845,"TERMINAL",0,0,"[2026-01-05 16:00:18 TP0] Prefill batch, #new-seq: 1, #new-token: 3773, #cached-token: 0, token usage: 0.10, #running-req: 3, #queue-req: 0, \r\n",,terminal_output +400,932288,"TERMINAL",0,0,"[2026-01-05 16:00:19] Finish: obj=GenerateReqInput(validation_time=2.5319866836071014e-05, received_time=1767625217.0528352, received_time_perf=2480101.725760564, rid='c1a8b0dad167477d812c7c35265c2347', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... "": 15316, token usage: 0.03, accept len: 3.15, accept rate: 0.79, cuda graph: True, gen throughput (token/s): 222.29, #queue-req: 0, \n[2026-01-05 16:00:15] Receive: obj=GenerateReqInput(validation_time=1.6937032341957092e-05, received_time=1767625215.3570883, received_time_perf=2480100.030013762, rid='efadbe24e2164053afdba64e6c3a6863', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, \n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 21, 99082, 98867, 100235, 20, 21, 100933, 100809, 19, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 23, 23, 13, 100899, 18, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 18, 13, 19, 99916, 23, 99916, 24, 99916, 11, 9276, 1131, 2127, 57253, 100562, 20, 23, 69, 20, 65, 101655, 17, 1999, 4645, 757, 23, 542, 103502, 18, 104327, 65, 20, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99082, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 99317, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99082, 11, 4193, 4379, 25, 220, 15, 13, 102626, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 123564, 13, 100104, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99082, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 103093, 18, 100096, 18, 99619, 19, 122414, 100096, 24, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 20, 13, 100235, 22, 100562, 23, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 15, 13, 15, 99064, 15, 99366, 102269, 17, 11, 9276, 1131, 823, 329, 1371, 99590, 68, 122250, 99698, 20, 18, 2577, 88714, 102636, 68, 21, 66, 18, 64, 101840, 21, 18, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '60,80p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99618, 11, 99695, 79, 1248, 73022, 151336], 'meta_info': {'id': 'c1a8b0dad167477d812c7c35265c2347', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15046, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0003599472693167627, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.3372364938259125, 4616, 'cat'), (-0.013687563128769398, 481, ' -'), (-1.0728830375228426e-06, 77, 'n'), (-3.099436753473128e-06, 608, ' /'), (-1.311301275563892e-06, 5117, 'home'), (-2.3841855067985307e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-2.861018856492592e-06, 2899, '/c'), (-1.1920928244535389e-07, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0010091932490468025, 13428, '/src'), (-0.0005427317810244858, 14, '/'), (-0.011527478694915771, 27082, 'preview'), (-0.17416410148143768, 14, '/'), (-0.012341560795903206, 5057, 'inline'), (-3.4689302992774174e-05, 5179, 'Provider'), (-4.768370445162873e-07, 21239, '.ts'), (-0.0023076115176081657, 760, ' |'), (-0.0001411338453181088, 10918, ' sed'), (-2.3841855067985307e-07, 481, ' -'), (-3.2186455882765586e-06, 77, 'n'), (-1.1920928244535389e-07, 364, "" '""), (-1.5980873107910156, 99618, '60'), (-1.0728830375228426e-06, 11, ','), (-0.06009805575013161, 99695, '80'), (-1.1920928244535389e-07, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-9.274052717955783e-05, 73022, '```'), (-2.2172682292875834e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.9962780475616455, 'response_sent_to_client_ts': 1767625219.049547}}\r\n[2026-01-05 16:00:19] Finish: obj=GenerateReqInput(validation_time=1.7181970179080963e-05, received_time=1767625217.382863, received_time_perf=2480102.055788188, rid='967a3eccd3924b8db8a6d74b3fd44cc7', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... "": 15344, token usage: 0.03, accept len: 3.27, accept rate: 0.82, cuda graph: True, gen throughput (token/s): 230.28, #queue-req: 0, \n[2026-01-05 16:00:17] Receive: obj=GenerateReqInput(validation_time=2.5319866836071014e-05, received_time=1767625217.0528352, received_time_perf=2480101.725760564, rid='c1a8b0dad167477d812c7c35265c2347', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, \n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102721, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 98668, 112891, 101130, 21, 15, 99241, 21, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 18, 13, 18, 98729, 21, 102340, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 22, 13, 100809, 17, 101917, 22, 100441, 21, 11, 9276, 1131, 69, 24, 69, 114365, 66, 100235, 65, 100928, 19, 65, 19, 3632, 18, 80999, 16, 66, 23, 64, 21, 98965, 18, 64, 15, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965] ... [11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 101723, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99951, 11, 4193, 4379, 25, 220, 15, 13, 104160, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 112596, 13, 99869, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 20, 100557, 101663, 21, 101840, 18, 99618, 22, 107609, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100002, 17, 23, 100235, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 16, 13, 22, 99446, 22, 99618, 101917, 19, 11, 9276, 1131, 66, 16, 64, 23, 65, 15, 55469, 124047, 101655, 22, 67, 23, 98886, 66, 22, 66, 100235, 17, 101411, 66, 99619, 19, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '83,103p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 104127, 11, 110248, 79, 1248, 73022, 151336], 'meta_info': {'id': '967a3eccd3924b8db8a6d74b3fd44cc7', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 14988, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0001714082609396428, 73022, '```'), (0.0, 45937, 'bash'), (-1.1920928244535389e-07, 198, '\n'), (-0.2014058232307434, 4616, 'cat'), (-0.011468320153653622, 481, ' -'), (-5.960462772236497e-07, 77, 'n'), (-1.0728830375228426e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-7.152555099310121e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0019848900847136974, 13428, '/src'), (-0.0004663571889977902, 14, '/'), (-0.005742955021560192, 27082, 'preview'), (-0.2495354264974594, 14, '/'), (-0.026609497144818306, 5057, 'inline'), (-8.535020606359467e-05, 5179, 'Provider'), (-1.1920922133867862e-06, 21239, '.ts'), (-0.0013505632523447275, 760, ' |'), (-0.00022206225548870862, 10918, ' sed'), (0.0, 481, ' -'), (-1.0967194612021558e-05, 77, 'n'), (0.0, 364, "" '""), (-2.6663737297058105, 104127, '83'), (-4.0531076592742465e-06, 11, ','), (-1.5372048616409302, 110248, '103'), (-3.576278118089249e-07, 79, 'p'), (-2.3841855067985307e-07, 1248, ""'\n""), (-0.00018952481332235038, 73022, '```'), (-0.001129108713939786, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 814, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.6664173603057861, 'response_sent_to_client_ts': 1767625219.0517914}}\r\n[2026-01-05 16:00:19] Finish: obj=GenerateReqInput(validation_time=2.2584106773138046e-05, received_time=1767625217.4639854, received_time_perf=2480102.13691057, rid='56f87aa312c542e0a35070282c2a96a7', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... "": 15344, token usage: 0.03, accept len: 3.27, accept rate: 0.82, cuda graph: True, gen throughput (token/s): 230.28, #queue-req: 0, \n[2026-01-05 16:00:17] Receive: obj=GenerateReqInput(validation_time=2.5319866836071014e-05, received_time=1767625217.0528352, received_time_perf=2480101.725760564, rid='c1a8b0dad167477d812c7c35265c2347', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, \n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 7530, 1975, 5179, 21239, 760, 10918, 481, 77, 364, 99366, 11, 100702, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 323, 1181, 17086, 1555, 369, 19511, 17957, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 11, 17086, 2460, 25, 1372, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 419, 44106, 2460, 284, 17086, 2460, 280, 262, 220, 99243, 2760, 456, 262, 220, 98729, 1572, 262, 220, 98360, 2760, 1554, 262, 220, 99146, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99241, 9356, 735, 262, 220, 99619, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99590, 7472, 419, 12389, 284, 845, 280, 262, 220, 99446, 7472, 419, 44106, 2460, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 39350, 19511, 2213, 979, 1196, 305, 8966, 916, 279, 20396, 3082, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 3410, 34209, 1006, 262, 220, 101175, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 100702, 7472, 2309, 25, 55008, 21900, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102721, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 98668, 112891, 101130, 21, 15, 99241, 21, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 18, 13, 18, 98729, 21, 102340, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 22, 13, 100809, 17, 101917, 22, 100441, 21, 11, 9276, 1131, 69, 24, 69, 114365, 66, 100235, 65, 100928, 19, 65, 19, 3632, 18, 80999, 16, 66, 23, 64, 21, 98965, 18, 64, 15, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965] ... [11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 1006\n[... 7264 bytes truncated to respect terminal scrollback settings ...]\n",,terminal_output +401,933939,"src/preview/inlineProvider.ts",1536,0,"",typescript,selection_command +402,934194,"src/preview/inlineProvider.ts",1625,0,"",typescript,selection_command +403,934229,"src/preview/inlineProvider.ts",1674,0,"",typescript,selection_command +404,934229,"TERMINAL",0,0,"[2026-01-05 16:00:20] Receive: obj=GenerateReqInput(validation_time=1.8578022718429565e-05, received_time=1767625220.9683833, received_time_perf=2480105.64130862, rid='27682577a0f44559a5b506b5899aeef5', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102721, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 98668, 112891, 101130, 21, 15, 99241, 21, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 18, 13, 18, 98729, 21, 102340, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 22, 13, 100809, 17, 101917, 22, 100441, 21, 11, 9276, 1131, 69, 24, 69, 114365, 66, 100235, 65, 100928, 19, 65, 19, 3632, 18, 80999, 16, 66, 23, 64, 21, 98965, 18, 64, 15, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11] ... [11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 99446, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99243, 98729, 22, 15, 99419, 100067, 23, 100614, 21, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100919, 17, 102807, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 17, 13, 100002, 20, 100928, 23, 117786, 11, 9276, 1131, 102487, 22, 64, 18, 70783, 67, 101294, 17, 19, 65, 23, 1999, 23, 64, 21, 67, 103498, 65, 18, 6902, 101723, 638, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:21 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +405,934259,"src/preview/inlineProvider.ts",1697,0,"",typescript,selection_command +406,934283,"src/preview/inlineProvider.ts",1707,0,"",typescript,selection_command +407,934364,"src/preview/inlineProvider.ts",1708,0,"",typescript,selection_command +408,934365,"src/preview/inlineProvider.ts",1774,0,"",typescript,selection_command +409,934491,"TERMINAL",0,0,"[2026-01-05 16:00:21 TP0] Prefill batch, #new-seq: 1, #new-token: 6619, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +410,934589,"src/preview/inlineProvider.ts",1783,0,"",typescript,selection_command +411,934645,"TERMINAL",0,0,"[2026-01-05 16:00:21] Receive: obj=GenerateReqInput(validation_time=2.0687934011220932e-05, received_time=1767625221.3950489, received_time_perf=2480106.067974301, rid='ecd76b0c76fd42298c2fc16ba3c5bfef', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100372, 21, 100702, 22, 114240, 24, 102269, 19, 100928, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 22, 13, 24, 101478, 124184, 17, 11, 3949, 3009, 76167, 28, 99590, 22, 126189, 17, 13, 21, 101294, 19, 101140, 100461, 24, 11, 9276, 1131, 68, 15, 99367, 6902, 124540, 17, 67, 99590, 24, 9787, 24, 99419, 66, 23, 101663, 19, 95035, 100557, 66, 103388, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11] ... [25, 98503, 25, 98360, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102284, 22, 99695, 17, 99951, 16, 23, 101961, 101804, 21, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99241, 15, 13, 24, 101840, 100919, 18, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 20, 13, 21, 102340, 123786, 103319, 11, 9276, 1131, 99951, 21, 23, 99446, 102114, 64, 15, 69, 19, 100461, 102573, 64, 20, 65, 99200, 21, 65, 101729, 24, 24, 5918, 823, 20, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99200, 11, 100096, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101478, 16, 24, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +412,934781,"TERMINAL",0,0,"[2026-01-05 16:00:21 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 603, token usage: 0.03, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +413,934949,"TERMINAL",0,0,"[2026-01-05 16:00:21] Receive: obj=GenerateReqInput(validation_time=1.8737278878688812e-05, received_time=1767625221.6955338, received_time_perf=2480106.368459049, rid='292eb81c56134651887e59013c9ba495', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100933, 11, 101840, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 13751, 5656, 908, 198, 262, 220, 101917, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 102486, 13056, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 101729, 197, 3374, 262, 220, 102573, 13056, 442, 23177, 3410, 279, 9750, 481, 30530, 6119, 686, 1473, 432, 518, 279, 5656, 2309, 198, 262, 220, 99618, 13056, 442, 576, 19828, 1467, 7951, 518, 279, 2088, 5189, 11, 537, 14302, 518, 8127, 198, 262, 220, 103595, 13056, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 103319, 20789, 419, 12389, 2788, 345, 262, 220, 103302, 20789, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102636, 13056, 1439, 262, 220, 101411, 197, 3374, 262, 220, 101478, 13056, 470, 508, 1203, 935, 262, 220, 102952, 7472, 456, 262, 220, 101840, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100928, 24, 20, 100067, 99200, 22, 22, 100461, 22, 101961, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 22, 13, 100104, 20, 120547, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 16, 13, 24, 101840, 118901, 101840, 22, 11, 9276, 1131, 100919, 68, 101562, 19, 98729, 88444, 20, 101723, 93437, 69, 21, 101804, 65, 100919, 24, 8315, 18, 69, 18, 65, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11] ... [608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99200, 11, 100096, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101478, 16, 24, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100539, 23, 102626, 18, 99698, 16, 115760, 100614, 18, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99241, 16, 13, 101294, 20, 100590, 23, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 21, 13, 100539, 22, 103205, 19, 122559, 11, 9276, 1131, 37012, 102269, 65, 15, 66, 102269, 6902, 19, 99241, 101663, 66, 17, 8315, 99317, 4645, 18, 66, 20, 13225, 823, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +414,935056,"TERMINAL",0,0,"[2026-01-05 16:00:21 TP0] Prefill batch, #new-seq: 1, #new-token: 6238, #cached-token: 0, token usage: 0.05, #running-req: 1, #queue-req: 1, \r\n",,terminal_output +415,935445,"TERMINAL",0,0,"[2026-01-05 16:00:22 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 611, token usage: 0.07, #running-req: 2, #queue-req: 0, \r\n",,terminal_output +416,935683,"TERMINAL",0,0,"[2026-01-05 16:00:22 TP0] Prefill batch, #new-seq: 1, #new-token: 6087, #cached-token: 0, token usage: 0.08, #running-req: 2, #queue-req: 0, \r\n",,terminal_output +417,935888,"TERMINAL",0,0,"[2026-01-05 16:00:22 TP0] Decode batch, #running-req: 3, #token: 44179, token usage: 0.10, accept len: 3.09, accept rate: 0.77, cuda graph: True, gen throughput (token/s): 42.99, #queue-req: 0, \r\n",,terminal_output +418,936085,"TERMINAL",0,0,"[2026-01-05 16:00:22] Finish: obj=GenerateReqInput(validation_time=1.8578022718429565e-05, received_time=1767625220.9683833, received_time_perf=2480105.64130862, rid='27682577a0f44559a5b506b5899aeef5', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""n: 6258, #cached-token: 0, token usage: 0.05, #running-req: 1, #queue-req: 0, \n[2026-01-05 16:00:17] Receive: obj=GenerateReqInput(validation_time=1.7181970179080963e-05, received_time=1767625217.382863, received_time_perf=2480102.055788188, rid='967a3eccd3924b8db8a6d74b3fd44cc7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 7\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102721, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 98668, 112891, 101130, 21, 15, 99241, 21, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 18, 13, 18, 98729, 21, 102340, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 102114, 22, 13, 100809, 17, 101917, 22, 100441, 21, 11, 9276, 1131, 69, 24, 69, 114365, 66, 100235, 65, 100928, 19, 65, 19, 3632, 18, 80999, 16, 66, 23, 64, 21, 98965, 18, 64, 15, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11] ... [11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 99446, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99243, 98729, 22, 15, 99419, 100067, 23, 100614, 21, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100919, 17, 102807, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 17, 13, 100002, 20, 100928, 23, 117786, 11, 9276, 1131, 102487, 22, 64, 18, 70783, 67, 101294, 17, 19, 65, 23, 1999, 23, 64, 21, 67, 103498, 65, 18, 6902, 101723, 638, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,20p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98360, 79, 1248, 73022, 151336], 'meta_info': {'id': '27682577a0f44559a5b506b5899aeef5', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15407, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00046659549116156995, 73022, '```'), (0.0, 45937, 'bash'), (-4.768370445162873e-07, 198, '\n'), (-0.2869880497455597, 4616, 'cat'), (-0.03311990946531296, 481, ' -'), (-1.1920922133867862e-06, 77, 'n'), (-8.940656698541716e-06, 608, ' /'), (-4.768370445162873e-07, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-7.152531907195225e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.0013962768716737628, 13428, '/src'), (-0.0006497179274447262, 14, '/'), (-0.0003408804477658123, 27082, 'preview'), (-0.01766999624669552, 14, '/'), (-0.04118524119257927, 5057, 'inline'), (-0.00038532938924618065, 5179, 'Provider'), (-8.344646857949556e-07, 21239, '.ts'), (-0.004706257954239845, 760, ' |'), (-0.00022230061586014926, 10918, ' sed'), (0.0, 481, ' -'), (-3.6954811548639555e-06, 77, 'n'), (-4.768370445162873e-07, 364, "" '""), (-0.7515575885772705, 16, '1'), (-3.814689989667386e-06, 11, ','), (-0.9210524559020996, 98360, '20'), (-4.768370445162873e-07, 79, 'p'), (-3.576278118089249e-07, 1248, ""'\n""), (-9.274052717955783e-05, 73022, '```'), (-7.629365427419543e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.8785560131072998, 'response_sent_to_client_ts': 1767625222.8471963}}\r\n[2026-01-05 16:00:22] Finish: obj=GenerateReqInput(validation_time=1.8737278878688812e-05, received_time=1767625221.6955338, received_time_perf=2480106.368459049, rid='292eb81c56134651887e59013c9ba495', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=2.0687934011220932e-05, received_time=1767625221.3950489, received_time_perf=2480106.067974301, rid='ecd76b0c76fd42298c2fc16ba3c5bfef', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100933, 11, 101840, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 13751, 5656, 908, 198, 262, 220, 101917, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 102486, 13056, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 101729, 197, 3374, 262, 220, 102573, 13056, 442, 23177, 3410, 279, 9750, 481, 30530, 6119, 686, 1473, 432, 518, 279, 5656, 2309, 198, 262, 220, 99618, 13056, 442, 576, 19828, 1467, 7951, 518, 279, 2088, 5189, 11, 537, 14302, 518, 8127, 198, 262, 220, 103595, 13056, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 103319, 20789, 419, 12389, 2788, 345, 262, 220, 103302, 20789, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102636, 13056, 1439, 262, 220, 101411, 197, 3374, 262, 220, 101478, 13056, 470, 508, 1203, 935, 262, 220, 102952, 7472, 456, 262, 220, 101840, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100928, 24, 20, 100067, 99200, 22, 22, 100461, 22, 101961, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 22, 13, 100104, 20, 120547, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 16, 13, 24, 101840, 118901, 101840, 22, 11, 9276, 1131, 100919, 68, 101562, 19, 98729, 88444, 20, 101723, 93437, 69, 21, 101804, 65, 100919, 24, 8315, 18, 69, 18, 65, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11] ... [608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99200, 11, 100096, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101478, 16, 24, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100539, 23, 102626, 18, 99698, 16, 115760, 100614, 18, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99241, 16, 13, 101294, 20, 100590, 23, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 21, 13, 100539, 22, 103205, 19, 122559, 11, 9276, 1131, 37012, 102269, 65, 15, 66, 102269, 6902, 19, 99241, 101663, 66, 17, 8315, 99317, 4645, 18, 66, 20, 13225, 823, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,10p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98668, 79, 1248, 73022, 151336], 'meta_info': {'id': '292eb81c56134651887e59013c9ba495', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 14890, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0010531361913308501, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.23332513868808746, 4616, 'cat'), (-0.006725413724780083, 481, ' -'), (-1.7881377516459906e-06, 77, 'n'), (-1.6689286894688848e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-3.576278118089249e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.000832449528388679, 13428, '/src'), (-0.00013660451804753393, 14, '/'), (-0.0004687402688432485, 27082, 'preview'), (-0.010149062611162663, 14, '/'), (-0.005473508033901453, 5057, 'inline'), (-7.724463648628443e-05, 5179, 'Provider'), (-9.536738616588991e-07, 21239, '.ts'), (-0.0007913556764833629, 760, ' |'), (-0.0005777596961706877, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.26377320289611816, 16, '1'), (-2.038458114839159e-05, 11, ','), (-0.7006619572639465, 98668, '10'), (-2.3841855067985307e-07, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-9.274052717955783e-05, 73022, '```'), (-5.4238757002167404e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 611, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.1515781879425049, 'response_sent_to_client_ts': 1767625222.8491504}}\r\n[2026-01-05 16:00:22] INFO: 10.86.2.252:55732 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n[2026-01-05 16:00:22] Finish: obj=GenerateReqInput(validation_time=2.0687934011220932e-05, received_time=1767625221.3950489, received_time_perf=2480106.067974301, rid='ecd76b0c76fd42298c2fc16ba3c5bfef', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""2\t return [];\n 53\t }\n 54\t\n 55\t // Only handle pure insertions (not replacements)\n 56\t // Replacements are handled by decorations to properly show what's being deleted\n 57\t if (this.action.kind !== 'editInsert') {\n 58\t return [];\n 59\t }\n 60\t\n 61\t const insertPos = toVscodePosition(this.action.position);\n 62\t \n 63\t // Only provide completion if insert position is at or after the cursor\n 64\t // VS Code's inline completion API shows ghost text at/after cursor position\n 65\t if (insertPos.isBefore(position)) {\n 66\t return [];\n 67\t }\n 68\t \n 69\t const item = new vscode.InlineCompletionItem(\n 70\t this.action.text,\n/nothink<|user|>\n\n[2026-01-05 16:00:21 TP0] Prefill batch, #new-seq: 1, #new-token: 6619, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0,\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 6046, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102856, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100372, 21, 100702, 22, 114240, 24, 102269, 19, 100928, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100933, 24, 22, 13, 24, 101478, 124184, 17, 11, 3949, 3009, 76167, 28, 99590, 22, 126189, 17, 13, 21, 101294, 19, 101140, 100461, 24, 11, 9276, 1131, 68, 15, 99367, 6902, 124540, 17, 67, 99590, 24, 9787, 24, 99419, 66, 23, 101663, 19, 95035, 100557, 66, 103388, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11] ... [25, 98503, 25, 98360, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102284, 22, 99695, 17, 99951, 16, 23, 101961, 101804, 21, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99241, 15, 13, 24, 101840, 100919, 18, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 20, 13, 21, 102340, 123786, 103319, 11, 9276, 1131, 99951, 21, 23, 99446, 102114, 64, 15, 69, 19, 100461, 102573, 64, 20, 65, 99200, 21, 65, 101729, 24, 24, 5918, 823, 20, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, \n[... 7399 bytes truncated to respect terminal scrollback settings ...]\n",,terminal_output +419,945441,"src/preview/inlineProvider.ts",1774,0,"",typescript,selection_command +420,945681,"src/preview/inlineProvider.ts",1708,0,"",typescript,selection_command +421,945713,"src/preview/inlineProvider.ts",1707,0,"",typescript,selection_command +422,945740,"TERMINAL",0,0,"[2026-01-05 16:00:32] Receive: obj=GenerateReqInput(validation_time=1.7195940017700195e-05, received_time=1767625232.4701402, received_time_perf=2480117.143065515, rid='5f7d06b339784f4a9cb6a02781501f4b', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100928, 24, 20, 100067, 99200, 22, 22, 100461, 22, 101961, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 22, 13, 100104, 20, 120547, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 16, 13, 24, 101840, 118901, 101840, 22, 11, 9276, 1131, 100919, 68, 101562, 19, 98729, 88444, 20, 101723, 93437, 69, 21, 101804, 65, 100919, 24, 8315, 18, 69, 18, 65, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220] ... [220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 103388, 22, 99951, 101252, 22, 23, 101840, 101252, 16, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99241, 16, 13, 21, 101804, 20, 100702, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 21, 13, 100632, 23, 100461, 24, 100590, 24, 11, 9276, 1131, 100104, 17, 3065, 104340, 66, 101917, 16, 101135, 21, 20, 99243, 103878, 68, 20, 100067, 99366, 66, 24, 4645, 101474, 20, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:32 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 594, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +423,945755,"src/preview/inlineProvider.ts",1697,0,"",typescript,selection_command +424,945786,"src/preview/inlineProvider.ts",1674,0,"",typescript,selection_command +425,945817,"src/preview/inlineProvider.ts",1625,0,"",typescript,selection_command +426,946047,"src/preview/inlineProvider.ts",1536,0,"",typescript,selection_command +427,946090,"TERMINAL",0,0,"[2026-01-05 16:00:32 TP0] Prefill batch, #new-seq: 1, #new-token: 6514, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n[2026-01-05 16:00:32] Receive: obj=GenerateReqInput(validation_time=1.385388895869255e-05, received_time=1767625232.816109, received_time_perf=2480117.489034523, rid='ab2ad1b2cafa419293aaa04e4d51bc3d', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916] ... [18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 98729, 102573, 19, 110228, 22, 100096, 15, 98729, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 17, 13, 19, 100096, 108157, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 121498, 100539, 20, 20, 99082, 11, 9276, 1131, 20, 69, 22, 67, 100539, 65, 100702, 24, 100928, 19, 69, 19, 64, 24, 7221, 21, 64, 15, 99951, 23, 99082, 100286, 69, 19, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:32] Receive: obj=GenerateReqInput(validation_time=1.3438984751701355e-05, received_time=1767625232.8457003, received_time_perf=2480117.518625439, rid='8527d8b208794cf5af6b29df079a43e9', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916] ... [18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 98729, 102573, 19, 110228, 22, 100096, 15, 98729, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 17, 13, 19, 100096, 108157, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 121498, 100539, 20, 20, 99082, 11, 9276, 1131, 20, 69, 22, 67, 100539, 65, 100702, 24, 100928, 19, 69, 19, 64, 24, 7221, 21, 64, 15, 99951, 23, 99082, 100286, 69, 19, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +428,946210,"src/preview/inlineProvider.ts",1478,0,"",typescript,selection_command +429,946301,"TERMINAL",0,0,"[2026-01-05 16:00:33 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.03, #running-req: 1, #queue-req: 1, \r\n",,terminal_output +430,946374,"TERMINAL",0,0,"[2026-01-05 16:00:33] Receive: obj=GenerateReqInput(validation_time=4.370696842670441e-05, received_time=1767625233.147987, received_time_perf=2480117.820912243, rid='553e9398d337491286d87ab1460af677', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98503, 11, 4193, 2422, 25, 220, 18, 13, 99619, 11, 4193, 4379, 25, 220, 15, 13, 104340, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 16, 13, 101562, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 35357, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 428, 151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99618, 127031, 100614, 23, 21, 98668, 23, 100928, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 23, 99243, 21, 99869, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 101474, 16, 101130, 18, 21, 99367, 11, 9276, 1131, 101655, 16, 23, 65, 109641, 100809, 23, 68, 19, 65, 19, 65, 23, 8315, 22, 8315, 101140, 22, 68, 5305, 100002, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235] ... [11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101411, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100919, 20, 100919, 23, 23, 101804, 102807, 24, 122300, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 17, 13, 23, 99317, 110610, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 100933, 24, 100441, 19, 20, 99619, 11, 9276, 1131, 370, 17, 329, 16, 65, 17, 924, 3632, 19, 98729, 100104, 18, 32497, 100590, 68, 19, 67, 102624, 8901, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +431,946472,"TERMINAL",0,0,"[2026-01-05 16:00:33] Receive: obj=GenerateReqInput(validation_time=1.2704171240329742e-05, received_time=1767625233.23923, received_time_perf=2480117.912155005, rid='d48c6a08c5134a2bb962b15023a14bab', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98503, 11, 4193, 2422, 25, 220, 18, 13, 99619, 11, 4193, 4379, 25, 220, 15, 13, 104340, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 16, 13, 101562, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 35357, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 428, 151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99618, 127031, 100614, 23, 21, 98668, 23, 100928, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 23, 99243, 21, 99869, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 101474, 16, 101130, 18, 21, 99367, 11, 9276, 1131, 101655, 16, 23, 65, 109641, 100809, 23, 68, 19, 65, 19, 65, 23, 8315, 22, 8315, 101140, 22, 68, 5305, 100002, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235] ... [99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101411, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100919, 20, 100919, 23, 23, 101804, 102807, 24, 122300, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 17, 13, 23, 99317, 110610, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 100933, 24, 100441, 19, 20, 99619, 11, 9276, 1131, 370, 17, 329, 16, 65, 17, 924, 3632, 19, 98729, 100104, 18, 32497, 100590, 68, 19, 67, 102624, 8901, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100702, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +432,946580,"TERMINAL",0,0,"[2026-01-05 16:00:33 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8787, token usage: 0.05, #running-req: 1, #queue-req: 2, \r\n",,terminal_output +433,946941,"TERMINAL",0,0,"[2026-01-05 16:00:33 TP0] Prefill batch, #new-seq: 1, #new-token: 4841, #cached-token: 0, token usage: 0.07, #running-req: 2, #queue-req: 2, \r\n",,terminal_output +434,947155,"TERMINAL",0,0,"[2026-01-05 16:00:33 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 597, token usage: 0.07, #running-req: 3, #queue-req: 1, \r\n",,terminal_output +435,947429,"TERMINAL",0,0,"[2026-01-05 16:00:34 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8788, token usage: 0.08, #running-req: 3, #queue-req: 0, \r\n",,terminal_output +436,947788,"TERMINAL",0,0,"[2026-01-05 16:00:34 TP0] Prefill batch, #new-seq: 1, #new-token: 4851, #cached-token: 0, token usage: 0.10, #running-req: 4, #queue-req: 0, \r\n",,terminal_output +437,948396,"TERMINAL",0,0,"[2026-01-05 16:00:35] Finish: obj=GenerateReqInput(validation_time=1.7195940017700195e-05, received_time=1767625232.4701402, received_time_perf=2480117.143065515, rid='5f7d06b339784f4a9cb6a02781501f4b', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.8737278878688812e-05, received_time=1767625221.6955338, received_time_perf=2480106.368459049, rid='292eb81c56134651887e59013c9ba495', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100928, 24, 20, 100067, 99200, 22, 22, 100461, 22, 101961, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 22, 13, 100104, 20, 120547, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 16, 13, 24, 101840, 118901, 101840, 22, 11, 9276, 1131, 100919, 68, 101562, 19, 98729, 88444, 20, 101723, 93437, 69, 21, 101804, 65, 100919, 24, 8315, 18, 69, 18, 65, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220] ... [220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 103388, 22, 99951, 101252, 22, 23, 101840, 101252, 16, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99241, 16, 13, 21, 101804, 20, 100702, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 21, 13, 100632, 23, 100461, 24, 100590, 24, 11, 9276, 1131, 100104, 17, 3065, 104340, 66, 101917, 16, 101135, 21, 20, 99243, 103878, 68, 20, 100067, 99366, 66, 24, 4645, 101474, 20, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,10p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98668, 79, 1248, 73022, 151336], 'meta_info': {'id': '5f7d06b339784f4a9cb6a02781501f4b', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15300, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00019536493346095085, 73022, '```'), (0.0, 45937, 'bash'), (-1.311301275563892e-06, 198, '\n'), (-0.27947530150413513, 4616, 'cat'), (-0.03312256187200546, 481, ' -'), (-3.4570634852570947e-06, 77, 'n'), (-1.2397689715726301e-05, 608, ' /'), (-1.0728830375228426e-06, 5117, 'home'), (-2.3841855067985307e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.4662635294371285e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.0014559156261384487, 13428, '/src'), (-0.0010387268848717213, 14, '/'), (-0.0068904641084373, 27082, 'preview'), (-0.00781324878334999, 14, '/'), (-0.08585668355226517, 5057, 'inline'), (-0.0006040894077159464, 5179, 'Provider'), (-7.986990567587782e-06, 21239, '.ts'), (-0.003953143022954464, 760, ' |'), (-0.0006859333370812237, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-2.3841855067985307e-07, 364, "" '""), (-0.4712097644805908, 16, '1'), (-8.177422569133341e-05, 11, ','), (-0.7844096422195435, 98668, '10'), (-2.3841855067985307e-07, 79, 'p'), (-4.768370445162873e-07, 1248, ""'\n""), (-0.00015841660206206143, 73022, '```'), (-3.6954811548639555e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 594, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 2.6941070556640625, 'response_sent_to_client_ts': 1767625235.1645339}}\r\n[2026-01-05 16:00:35] Finish: obj=GenerateReqInput(validation_time=1.385388895869255e-05, received_time=1767625232.816109, received_time_perf=2480117.489034523, rid='ab2ad1b2cafa419293aaa04e4d51bc3d', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.7195940017700195e-05, received_time=1767625232.4701402, received_time_perf=2480117.143065515, rid='5f7d06b339784f4a9cb6a02781501f4b', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916] ... [18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 98729, 102573, 19, 110228, 22, 100096, 15, 98729, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 17, 13, 19, 100096, 108157, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 121498, 100539, 20, 20, 99082, 11, 9276, 1131, 20, 69, 22, 67, 100539, 65, 100702, 24, 100928, 19, 69, 19, 64, 24, 7221, 21, 64, 15, 99951, 23, 99082, 100286, 69, 19, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,20p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98360, 79, 1248, 73022, 151336], 'meta_info': {'id': 'ab2ad1b2cafa419293aaa04e4d51bc3d', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15304, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0006126672378741205, 73022, '```'), (0.0, 45937, 'bash'), (-2.145764938177308e-06, 198, '\n'), (-0.32063907384872437, 4616, 'cat'), (-0.027948152273893356, 481, ' -'), (-3.933898824470816e-06, 77, 'n'), (-1.645074735279195e-05, 608, ' /'), (-1.1920922133867862e-06, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.7881233361549675e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.0019983339589089155, 13428, '/src'), (-0.0007993363542482257, 14, '/'), (-0.0023820616770535707, 27082, 'preview'), (-0.011188386939466, 14, '/'), (-0.06134971231222153, 5057, 'inline'), (-0.00019965562387369573, 5179, 'Provider'), (-3.814689989667386e-06, 21239, '.ts'), (-0.00672517716884613, 760, ' |'), (-0.0008127961773425341, 10918, ' sed'), (-2.3841855067985307e-07, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-4.768370445162873e-07, 364, "" '""), (-0.372224360704422, 16, '1'), (-4.076874756719917e-05, 11, ','), (-1.136245846748352, 98360, '20'), (-1.1920928244535389e-07, 79, 'p'), (-3.576278118089249e-07, 1248, ""'\n""), (-0.00013255194062367082, 73022, '```'), (-3.6954811548639555e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 2.348245143890381, 'response_sent_to_client_ts': 1767625235.166092}}\r\n[2026-01-05 16:00:35] Finish: obj=GenerateReqInput(validation_time=1.2704171240329742e-05, received_time=1767625233.23923, received_time_perf=2480117.912155005, rid='d48c6a08c5134a2bb962b15023a14bab', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""_time=1767625232.816109, received_time_perf=2480117.489034523, rid='ab2ad1b2cafa419293aaa04e4d51bc3d', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79\n... [truncated]\n/nothink<|user|>\n\n[2026-01-05 16:00:33 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.03, #running-req: 1, #queue-req: 1,\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98503, 11, 4193, 2422, 25, 220, 18, 13, 99619, 11, 4193, 4379, 25, 220, 15, 13, 104340, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 16, 13, 101562, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 35357, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 428, 151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99618, 127031, 100614, 23, 21, 98668, 23, 100928, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 23, 99243, 21, 99869, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 101474, 16, 101130, 18, 21, 99367, 11, 9276, 1131, 101655, 16, 23, 65, 109641, 100809, 23, 68, 19, 65, 19, 65, 23, 8315, 22, 8315, 101140, 22, 68, 5305, 100002, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235] ... [99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931,\n[... 24145 bytes truncated to respect terminal scrollback settings ...]\n",,terminal_output +438,948638,"TERMINAL",0,0,"[2026-01-05 16:00:35 TP0] Decode batch, #running-req: 1, #token: 15387, token usage: 0.03, accept len: 3.28, accept rate: 0.82, cuda graph: True, gen throughput (token/s): 26.98, #queue-req: 0, \r\n",,terminal_output +439,948799,"TERMINAL",0,0,"[2026-01-05 16:00:35] Finish: obj=GenerateReqInput(validation_time=1.3438984751701355e-05, received_time=1767625232.8457003, received_time_perf=2480117.518625439, rid='8527d8b208794cf5af6b29df079a43e9', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.7195940017700195e-05, received_time=1767625232.4701402, received_time_perf=2480117.143065515, rid='5f7d06b339784f4a9cb6a02781501f4b', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102486, 16, 102626, 18, 110610, 120392, 20, 102088, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 101294, 19, 15, 99064, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 100539, 21, 101804, 21, 109803, 11, 9276, 1131, 100539, 3065, 22, 66, 101294, 23, 101723, 66, 19, 98729, 19, 62153, 101294, 66, 100104, 22, 18, 64, 23, 67, 103825, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916] ... [18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 98729, 102573, 19, 110228, 22, 100096, 15, 98729, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 17, 13, 19, 100096, 108157, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 121498, 100539, 20, 20, 99082, 11, 9276, 1131, 20, 69, 22, 67, 100539, 65, 100702, 24, 100928, 19, 69, 19, 64, 24, 7221, 21, 64, 15, 99951, 23, 99082, 100286, 69, 19, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': '\n```bash\nsed -i \'69,72c\\\n this.action.text,\\\n new vscode.Range(insertPos, insertPos),\\\n { command: \'""\'""\'crowdPilot.acceptInlineCompletion\'""\'""\', tooltip: \'""\'""\'Accept Crowd Pilot suggestion\'""\'""\' }\\\n );\' /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts && cat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n \'68,78p\'\n```', 'output_ids': [198, 73022, 45937, 198, 31193, 481, 72, 364, 103093, 11, 102650, 66, 5661, 310, 419, 12389, 2788, 42626, 310, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 701, 5661, 310, 314, 3210, 25, 7127, 14901, 6, 51465, 67, 47, 23891, 28462, 25246, 33030, 14901, 14901, 516, 25373, 25, 7127, 14901, 6, 16629, 97156, 44241, 23710, 14901, 14901, 6, 335, 5661, 286, 6903, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101840, 11, 100928, 79, 1248, 73022, 151336], 'meta_info': {'id': '8527d8b208794cf5af6b29df079a43e9', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15304, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0007381098694168031, 73022, '```'), (0.0, 45937, 'bash'), (-1.311301275563892e-06, 198, '\n'), (-1.9252554178237915, 31193, 'sed'), (0.0, 481, ' -'), (-0.0005529781919904053, 72, 'i'), (-0.00327313132584095, 364, "" '""), (-0.9325723052024841, 103093, '69'), (-0.037201136350631714, 11, ','), (-0.6611447334289551, 102650, '72'), (-0.0028928357642143965, 66, 'c'), (-5.495397272170521e-05, 5661, '\\\n'), (-0.2532067596912384, 310, ' '), (-0.042490940541028976, 419, ' this'), (-4.6491513785440475e-06, 12389, '.action'), (-0.00020632999076042324, 2788, '.text'), (-0.2505936324596405, 42626, ',\\\n'), (-8.594620157964528e-05, 310, ' '), (-0.004827036056667566, 501, ' new'), (-2.622600959512056e-06, 55008, ' vscode'), (-6.437280717364047e-06, 24707, '.Range'), (-0.008674204349517822, 60163, '(insert'), (-4.768360213347478e-06, 4859, 'Pos'), (-0.0002598424907773733, 11, ','), (-0.00014578233822248876, 5656, ' insert'), (-6.198863957251888e-06, 4859, 'Pos'), (-0.29597020149230957, 701, '),'), (-6.580135959666222e-05, 5661, '\\\n'), (-0.0011411352315917611, 310, ' '), (-0.03792630508542061, 314, ' {'), (-0.6985958814620972, 3210, ' command'), (-0.00019798702851403505, 25, ':'), (-0.0024584089405834675, 7127, ' \'""'), (-7.152555099310121e-07, 14901, '\'""'), (-2.634490556374658e-05, 6, ""'""), (-0.008285789750516415, 51465, 'crow'), (-0.0023053516633808613, 67, 'd'), (-0.3433332145214081, 47, 'P'), (0.0, 23891, 'ilot'), (-0.3419041037559509, 28462, '.accept'), (-0.08965684473514557, 25246, 'Inline'), (-0.12380354106426239, 33030, 'Completion'), (-4.410734163684538e-06, 14901, '\'""'), (-5.328513361746445e-05, 14901, '\'""'), (-0.019503021612763405, 516, ""',""), (-0.5763992667198181, 25373, ' tooltip'), (0.0, 25, ':'), (-0.012554542161524296, 7127, ' \'""'), (0.0, 14901, '\'""'), (-4.529942543740617e-06, 6, ""'""), (-0.0029743739869445562, 16629, 'Accept'), (-0.4379281997680664, 97156, ' Crowd'), (-0.006219200324267149, 44241, ' Pilot'), (-1.0837546586990356, 23710, ' suggestion'), (-0.2527732849121094, 14901, '\'""'), (-8.22540732769994e-06, 14901, '\'""'), (-0.0005759726045653224, 6, ""'""), (-0.0009369035833515227, 335, ' }'), (-0.0016215997748076916, 5661, '\\\n'), (-0.002866922877728939, 286, ' '), (-6.758938252460212e-05, 6903, ' );'), (-0.10277672857046127, 6, ""'""), (-2.8967437174287625e-05, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (0.0, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (0.0, 13428, '/src'), (0.0, 14, '/'), (0.0, 27082, 'preview'), (0.0, 14, '/'), (0.0, 5057, 'inline'), (0.0, 5179, 'Provider'), (0.0, 21239, '.ts'), (-4.6132929128361866e-05, 1009, ' &&'), (-2.622600959512056e-06, 8250, ' cat'), (-1.1920928244535389e-07, 481, ' -'), (0.0, 77, 'n'), (0.0, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.1920928244535389e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-1.1920928244535389e-07, 13428, '/src'), (0.0, 14, '/'), (0.0, 27082, 'preview'), (-1.1920928244535389e-07, 14, '/'), (0.0, 5057, 'inline'), (0.0, 5179, 'Provider'), (0.0, 21239, '.ts'), (0.0, 760, ' |'), (0.0, 10918, ' sed'), (-3.099436753473128e-06, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.5811353325843811, 101840, '68'), (0.0, 11, ','), (-0.2839989960193634, 100928, '78'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-2.658331868587993e-05, 73022, '```'), (-4.410734163684538e-06, 151336, '<|user|>')], 'completion_tokens': 121, 'cached_tokens': 8787, 'spec_accept_rate': 0.675, 'spec_accept_length': 3.025, 'spec_verify_ct': 40, 'spec_accept_token_num': 81, 'spec_draft_token_num': 120, 'e2e_latency': 2.736922264099121, 'response_sent_to_client_ts': 1767625235.582678}}\r\n",,terminal_output +440,950114,"src/preview/inlineProvider.ts",1477,0,"",typescript,selection_command +441,950360,"src/preview/inlineProvider.ts",1467,0,"",typescript,selection_command +442,950395,"src/preview/inlineProvider.ts",1444,0,"",typescript,selection_command +443,950396,"TERMINAL",0,0,"[2026-01-05 16:00:37] Receive: obj=GenerateReqInput(validation_time=1.8438324332237244e-05, received_time=1767625237.1440525, received_time_perf=2480121.81697772, rid='392b9293504447438df0417d897658b7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99618, 127031, 100614, 23, 21, 98668, 23, 100928, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 23, 99243, 21, 99869, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 101474, 16, 101130, 18, 21, 99367, 11, 9276, 1131, 101655, 16, 23, 65, 109641, 100809, 23, 68, 19, 65, 19, 65, 23, 8315, 22, 8315, 101140, 22, 68, 5305, 100002, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100702, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100702, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 19, 13, 124260, 21, 102487, 23, 101961, 21, 100096, 101723, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 18, 13, 122463, 101663, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 23, 98360, 24, 98886, 99590, 18, 11, 9276, 1131, 101130, 18, 68, 24, 101294, 23, 67, 100702, 22, 101474, 16, 99869, 21, 67, 103878, 370, 121975, 15, 2577, 21, 102114, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:37 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 610, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +444,950422,"src/preview/inlineProvider.ts",1399,0,"",typescript,selection_command +445,950456,"src/preview/inlineProvider.ts",1307,0,"",typescript,selection_command +446,950489,"src/preview/inlineProvider.ts",1267,0,"",typescript,selection_command +447,950524,"src/preview/inlineProvider.ts",1218,0,"",typescript,selection_command +448,950551,"src/preview/inlineProvider.ts",1183,0,"",typescript,selection_command +449,950585,"src/preview/inlineProvider.ts",1144,0,"",typescript,selection_command +450,950629,"src/preview/inlineProvider.ts",1110,0,"",typescript,selection_command +451,950656,"src/preview/inlineProvider.ts",1102,0,"",typescript,selection_command +452,950680,"TERMINAL",0,0,"[2026-01-05 16:00:37 TP0] Prefill batch, #new-seq: 1, #new-token: 6908, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +453,950683,"src/preview/inlineProvider.ts",1062,0,"",typescript,selection_command +454,950720,"src/preview/inlineProvider.ts",1054,0,"",typescript,selection_command +455,950759,"src/preview/inlineProvider.ts",1053,0,"",typescript,selection_command +456,950989,"src/preview/inlineProvider.ts",1054,0,"",typescript,selection_command +457,951029,"TERMINAL",0,0,"[2026-01-05 16:00:37] Receive: obj=GenerateReqInput(validation_time=1.424318179488182e-05, received_time=1767625237.751304, received_time_perf=2480122.424229079, rid='04dc6ccad16a4931a8572d40933f50a2', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100372, 17, 100614, 24, 20, 102487, 99916, 16, 126189, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 98729, 18, 101729, 20, 24, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 18, 13, 23, 101478, 20, 98965, 100919, 11, 9276, 1131, 66, 99200, 64, 102636, 291, 100104, 65, 99590, 69, 16, 67, 23, 102088, 20, 2127, 22, 542, 101474, 69, 102573, 346, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220] ... [715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 102088, 23, 101175, 19, 100702, 99241, 18, 22, 99590, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 22, 13, 118836, 100002, 17, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 16, 13, 23, 99317, 24, 102114, 102650, 11, 9276, 1131, 101294, 17, 65, 24, 100104, 108642, 101723, 19, 22, 102088, 23, 2940, 100590, 16, 22, 67, 23, 103205, 101411, 23, 65, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 100067, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:37 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.03, #running-req: 1, #queue-req: 0, \r\n[2026-01-05 16:00:37] Receive: obj=GenerateReqInput(validation_time=1.3008713722229004e-05, received_time=1767625237.789524, received_time_perf=2480122.462449077, rid='60d05e389bf44bf797fde814ffb603d4', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100372, 17, 100614, 24, 20, 102487, 99916, 16, 126189, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 98729, 18, 101729, 20, 24, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 18, 13, 23, 101478, 20, 98965, 100919, 11, 9276, 1131, 66, 99200, 64, 102636, 291, 100104, 65, 99590, 69, 16, 67, 23, 102088, 20, 2127, 22, 542, 101474, 69, 102573, 346, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220] ... [715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 102088, 23, 101175, 19, 100702, 99241, 18, 22, 99590, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 22, 13, 118836, 100002, 17, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 16, 13, 23, 99317, 24, 102114, 102650, 11, 9276, 1131, 101294, 17, 65, 24, 100104, 108642, 101723, 19, 22, 102088, 23, 2940, 100590, 16, 22, 67, 23, 103205, 101411, 23, 65, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 100067, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +458,951219,"src/preview/inlineProvider.ts",1062,0,"",typescript,selection_command +459,951317,"TERMINAL",0,0,"[2026-01-05 16:00:38 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8787, token usage: 0.05, #running-req: 1, #queue-req: 0, \r\n[2026-01-05 16:00:38] Receive: obj=GenerateReqInput(validation_time=1.3587996363639832e-05, received_time=1767625238.090538, received_time_perf=2480122.763463114, rid='36213c184d7f43d58e53167717ca3c15', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99698, 21, 100809, 20, 102486, 24, 103306, 100702, 19, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 101474, 18, 22, 101562, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 19, 13, 122569, 102952, 16, 22, 100632, 11, 9276, 1131, 15, 99695, 21, 66, 100899, 64, 18, 65, 346, 19, 66, 99064, 65, 22, 67, 19, 66, 99916, 66, 24, 99241, 99869, 3065, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11] ... [220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 100067, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99590, 18, 99243, 126612, 100933, 23, 126334, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 22, 13, 100899, 16, 120911, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 17, 13, 19, 99590, 99241, 24, 100372, 24, 11, 9276, 1131, 100590, 7628, 21, 638, 329, 99317, 64, 101474, 18, 16, 64, 102284, 22, 17, 67, 99698, 24, 100702, 69, 99200, 64, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +460,951632,"TERMINAL",0,0,"[2026-01-05 16:00:38] Receive: obj=GenerateReqInput(validation_time=1.238333061337471e-05, received_time=1767625238.390056, received_time_perf=2480123.062981132, rid='71658fd1fbc14ccea2d0bcad6b9a79d7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101140, 19, 102114, 16, 23, 99243, 20, 99243, 21, 100919, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 102626, 17, 21, 101175, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 19, 13, 19, 101411, 101130, 22, 22, 101562, 11, 9276, 1131, 65, 23, 101474, 103878, 64, 98360, 13225, 20, 101723, 102624, 64, 100759, 7628, 17, 69, 20, 101294, 55469, 103825, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11] ... [101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 22, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100235, 23, 22, 100809, 21, 100632, 100632, 18, 101663, 18, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 23, 13, 15, 100067, 20, 100919, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 17, 13, 102269, 18, 101562, 18, 114365, 11, 9276, 1131, 100632, 17, 99366, 66, 126382, 67, 22, 69, 102088, 67, 101729, 68, 20, 100557, 21, 102114, 99419, 924, 18, 66, 99082, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:38 TP0] Prefill batch, #new-seq: 1, #new-token: 5811, #cached-token: 0, token usage: 0.07, #running-req: 2, #queue-req: 2, \r\n",,terminal_output +461,951893,"TERMINAL",0,0,"[2026-01-05 16:00:38 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 608, token usage: 0.07, #running-req: 3, #queue-req: 1, \r\n",,terminal_output +462,952181,"TERMINAL",0,0,"[2026-01-05 16:00:38 TP0] Prefill batch, #new-seq: 1, #new-token: 6988, #cached-token: 0, token usage: 0.09, #running-req: 3, #queue-req: 1, \r\n",,terminal_output +463,952468,"TERMINAL",0,0,"[2026-01-05 16:00:39 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 610, token usage: 0.10, #running-req: 4, #queue-req: 0, \r\n",,terminal_output +464,952756,"TERMINAL",0,0,"[2026-01-05 16:00:39 TP0] Prefill batch, #new-seq: 1, #new-token: 6949, #cached-token: 0, token usage: 0.12, #running-req: 4, #queue-req: 0, \r\n",,terminal_output +465,953367,"TERMINAL",0,0,"[2026-01-05 16:00:40] Finish: obj=GenerateReqInput(validation_time=1.8438324332237244e-05, received_time=1767625237.1440525, received_time_perf=2480121.81697772, rid='392b9293504447438df0417d897658b7', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=4.370696842670441e-05, received_time=1767625233.147987, received_time_perf=2480117.820912243, rid='553e9398d337491286d87ab1460af677', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, \n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 101474, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99618, 127031, 100614, 23, 21, 98668, 23, 100928, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 100840, 24, 13, 23, 99243, 21, 99869, 20, 11, 3949, 3009, 76167, 28, 99590, 22, 101663, 24, 19, 13, 101474, 16, 101130, 18, 21, 99367, 11, 9276, 1131, 101655, 16, 23, 65, 109641, 100809, 23, 68, 19, 65, 19, 65, 23, 8315, 22, 8315, 101140, 22, 68, 5305, 100002, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100702, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100702, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 19, 13, 124260, 21, 102487, 23, 101961, 21, 100096, 101723, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 18, 13, 122463, 101663, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 23, 98360, 24, 98886, 99590, 18, 11, 9276, 1131, 101130, 18, 68, 24, 101294, 23, 67, 100702, 22, 101474, 16, 99869, 21, 67, 103878, 370, 121975, 15, 2577, 21, 102114, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,10p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98668, 79, 1248, 73022, 151336], 'meta_info': {'id': '392b9293504447438df0417d897658b7', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15710, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-1.3470558769768104e-05, 73022, '```'), (0.0, 45937, 'bash'), (-1.5497195136049413e-06, 198, '\n'), (-0.2871577739715576, 4616, 'cat'), (-0.023313229903578758, 481, ' -'), (-3.4570634852570947e-06, 77, 'n'), (-1.7404405298293568e-05, 608, ' /'), (-1.6689286894688848e-06, 5117, 'home'), (-3.576278118089249e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-3.6954811548639555e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0003817544784396887, 13428, '/src'), (-0.0005046047735959291, 14, '/'), (-0.003353568958118558, 27082, 'preview'), (-0.005003549624234438, 14, '/'), (-0.03135721758008003, 5057, 'inline'), (-0.00018106251081917435, 5179, 'Provider'), (-5.364403477869928e-06, 21239, '.ts'), (-0.001616839086636901, 760, ' |'), (-0.0004843492351938039, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (0.0, 364, "" '""), (-0.4061557948589325, 16, '1'), (-0.00011359999916749075, 11, ','), (-0.4346965253353119, 98668, '10'), (-2.3841855067985307e-07, 79, 'p'), (-2.3841855067985307e-07, 1248, ""'\n""), (-9.274052717955783e-05, 73022, '```'), (-6.318072337307967e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 610, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 2.9980294704437256, 'response_sent_to_client_ts': 1767625240.1421623}}\r\n",,terminal_output +466,953438,"TERMINAL",0,0,"[2026-01-05 16:00:40] Finish: obj=GenerateReqInput(validation_time=1.3008713722229004e-05, received_time=1767625237.789524, received_time_perf=2480122.462449077, rid='60d05e389bf44bf797fde814ffb603d4', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... "";\n 40\t }\n 41\t\n 42\t /**\n 43\t * Provide inline completion items.\n 44\t */\n 45\t provideInlineCompletionItems(\n 46\t document: vscode.TextDocument,\n 47\t position: vscode.Position,\n 48\t context: vscode.InlineCompletionContext,\n 49\t token: vscode.CancellationToken\n 50\t ): vscode.ProviderResult {\n 51\t if (!this.enabled || !this.action) {\n 52\t return [];\n 53\t }\n 54\t\n 55\t // Only handle pure insertions (not replacements)\n 56\t // Replacements are handled by decorations to properly show what's being deleted\n 57\t if (this.action.kind !== 'editInsert') {\n 58\t return [];\n 59\t }\n/nothink<|user|>\n\n[2026-01-05 16:00:37 TP0] Prefill batch, #new-seq: 1, #new-token: 6908, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0,\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100372, 17, 100614, 24, 20, 102487, 99916, 16, 126189, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 98729, 18, 101729, 20, 24, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 18, 13, 23, 101478, 20, 98965, 100919, 11, 9276, 1131, 66, 99200, 64, 102636, 291, 100104, 65, 99590, 69, 16, 67, 23, 102088, 20, 2127, 22, 542, 101474, 69, 102573, 346, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220] ... [715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 102088, 23, 101175, 19, 100702, 99241, 18, 22, 99590, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 22, 13, 118836, 100002, 17, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 16, 13, 23, 99317, 24, 102114, 102650, 11, 9276, 1131, 101294, 17, 65, 24, 100104, 108642, 101723, 19, 22, 102088, 23, 2940, 100590, 16, 22, 67, 23, 103205, 101411, 23, 65, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 100067, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,20p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98360, 79, 1248, 73022, 151336], 'meta_info': {'id': '60d05e389bf44bf797fde814ffb603d4', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15789, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0004618293314706534, 73022, '```'), (0.0, 45937, 'bash'), (-3.099436753473128e-06, 198, '\n'), (-0.36877408623695374, 4616, 'cat'), (-0.055904705077409744, 481, ' -'), (-3.814689989667386e-06, 77, 'n'), (-4.291525328881107e-06, 608, ' /'), (-9.536738616588991e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.7881377516459906e-06, 2899, '/c'), (-1.0728830375228426e-06, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0023888403084129095, 13428, '/src'), (-0.0003190723655279726, 14, '/'), (-0.008064565248787403, 27082, 'preview'), (-0.003324222518131137, 14, '/'), (-0.01986186020076275, 5057, 'inline'), (-3.0278701160568744e-05, 5179, 'Provider'), (-1.7881377516459906e-06, 21239, '.ts'), (-0.008034883998334408, 760, ' |'), (-0.0018831630004569888, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-7.152555099310121e-07, 364, "" '""), (-0.4521541893482208, 16, '1'), (-6.317892984952778e-05, 11, ','), (-1.021613597869873, 98360, '20'), (-3.576278118089249e-07, 79, 'p'), (-5.960462772236497e-07, 1248, ""'\n""), (-0.000226472009671852, 73022, '```'), (-9.059865078597795e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 8787, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 2.3985447883605957, 'response_sent_to_client_ts': 1767625240.1882312}}\r\n[2026-01-05 16:00:40] Finish: obj=GenerateReqInput(validation_time=1.238333061337471e-05, received_time=1767625238.390056, received_time_perf=2480123.062981132, rid='71658fd1fbc14ccea2d0bcad6b9a79d7', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""n: 8192, #cached-token: 8787, token usage: 0.05, #running-req: 1, #queue-req: 0, \n[2026-01-05 16:00:38] Receive: obj=GenerateReqInput(validation_time=1.3587996363639832e-05, received_time=1767625238.090538, received_time_perf=2480122.763463114, rid='36213c184d7f43d58e53167717ca3c15', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101140, 19, 102114, 16, 23, 99243, 20, 99243, 21, 100919, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 102626, 17, 21, 101175, 21, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 19, 13, 19, 101411, 101130, 22, 22, 101562, 11, 9276, 1131, 65, 23, 101474, 103878, 64, 98360, 13225, 20, 101723, 102624, 64, 100759, 7628, 17, 69, 20, 101294, 55469, 103825, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11] ... [101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 22, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100235, 23, 22, 100809, 21, 100632, 100632, 18, 101663, 18, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 23, 13, 15, 100067, 20, 100919, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 17, 13, 102269, 18, 101562, 18, 114365, 11, 9276, 1131, 100632, 17, 99366, 66, 126382, 67, 22, 69, 102088, 67, 101729, 68, 20, 100557, 21, 102114, 99419, 924, 18, 66, 99082, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '39,59p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336], 'meta_info': {'id': '71658fd1fbc14ccea2d0bcad6b9a79d7', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15751, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00014959646796341985, 73022, '```'), (0.0, 45937, 'bash'), (-3.099436753473128e-06, 198, '\n'), (-0.3958318829536438, 4616, 'cat'), (-0.03947201743721962, 481, ' -'), (-5.125986263010418e-06, 77, 'n'), (-5.8412379075889476e-06, 608, ' /'), (-1.1920922133867862e-06, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-4.172316494077677e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.003384815761819482, 13428, '/src'), (-0.0004707658663392067, 14, '/'), (-0.003991376142948866, 27082, 'preview'), (-0.00224374164827168, 14, '/'), (-0.014958946965634823, 5057, 'inline'), (-4.0411134250462055e-05, 5179, 'Provider'), (-7.152555099310121e-07, 21239, '.ts'), (-0.004716699477285147, 760, ' |'), (-0.001444011926651001, 10918, ' sed'), (0.0, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-3.576278118089249e-07, 364, "" '""), (-2.7058849334716797, 101294, '39'), (-3.218599158572033e-05, 11, ','), (-0.010497826151549816, 102573, '59'), (-1.1920928244535389e-07, 79, 'p'), (-3.3378546504536644e-06, 1248, ""'\n""), (-0.0009446449112147093, 73022, '```'), (-1.8596476365928538e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 610, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.79811429977417, 'response_sent_to_client_ts': 1767625240.1895442}}\r\n[2026-01-05 16:00:40] INFO: 10.86.2.252:50086 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n[2026-01-05 16:00:40] Finish: obj=GenerateReqInput(validation_time=1.424318179488182e-05, received_time=1767625237.751304, received_time_perf=2480122.424229079, rid='04dc6ccad16a4931a8572d40933f50a2', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... "";\n 40\t }\n 41\t\n 42\t /**\n 43\t * Provide inline completion items.\n 44\t */\n 45\t provideInlineCompletionItems(\n 46\t document: vscode.TextDocument,\n 47\t position: vscode.Position,\n 48\t context: vscode.InlineCompletionContext,\n 49\t token: vscode.CancellationToken\n 50\t ): vscode.ProviderResult {\n 51\t if (!this.enabled || !this.action) {\n 52\t return [];\n 53\t }\n 54\t\n 55\t // Only handle pure insertions (not replacements)\n 56\t // Replacements are handled by decorations to properly show what's being deleted\n 57\t if (this.action.kind !== 'editInsert') {\n 58\t return [];\n 59\t }\n/nothink<|user|>\n\n[2026-01-05 16:00:37 TP0] Prefill batch, #new-seq: 1, #new-token: 6908, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0,\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101917, 25, 102573, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100372, 17, 100614, 24, 20, 102487, 99916, 16, 126189, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 16, 24, 13, 98729, 18, 101729, 20, 24, 11, 3949, 3009, 76167, 28, 99590, 22, 24, 100067, 18, 13, 23, 101478, 20, 98965, 100919, 11, 9276, 1131, 66, 99200, 64, 102636, 291, 100104, 65, 99590, 69, 16, 67, 23, 102088, 20, 2127, 22, 542, 101474, 69, 102573, 346, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220] ... [715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 102088, 23, 101175, 19, 100702, 99241, 18, 22, 99590, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 22, 13, 118836, 100002, 17, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 16, 13, 23, 99317, 24, 102114, 102650, 11, 9276, 1131, 101294, 17, 65, 24, 100104, 108642, 101723, 19, 22, 102088, 23, 2940, 100590, 16, 22, 67, 23, 103205, 101411, 23, 65, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18\n[... 7370 bytes truncated to respect terminal scrollback settings ...]\n",,terminal_output +467,959127,"src/preview/inlineProvider.ts",1102,0,"",typescript,selection_command +468,959366,"src/preview/inlineProvider.ts",1110,0,"",typescript,selection_command +469,959402,"src/preview/inlineProvider.ts",1144,0,"",typescript,selection_command +470,959402,"TERMINAL",0,0,"[2026-01-05 16:00:46] Receive: obj=GenerateReqInput(validation_time=2.604164183139801e-05, received_time=1767625246.1567805, received_time_perf=2480130.829705822, rid='37e61e0a8120487286ecaa8a601b11e1', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102486, 25, 100372, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101252, 20, 18, 99241, 102284, 18, 24, 99241, 23, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 17, 22, 13, 21, 101135, 15, 99200, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 100809, 16, 17, 13, 124540, 24, 100899, 103093, 18, 11, 9276, 1131, 103205, 2940, 103825, 69, 100096, 69, 100614, 19, 16, 69, 17, 64, 18, 638, 17, 67, 15, 99200, 69, 15, 67, 12517, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220] ... [11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99619, 23, 121577, 100539, 16, 100702, 22, 101655, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 23, 13, 18, 100067, 100002, 21, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 18, 13, 100539, 17, 101663, 16, 118843, 11, 9276, 1131, 22, 99317, 101729, 6902, 16, 69, 8901, 99367, 66, 72376, 17, 67, 15, 8901, 329, 21, 65, 24, 64, 102626, 67, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:46 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 594, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +471,959427,"src/preview/inlineProvider.ts",1183,0,"",typescript,selection_command +472,959476,"src/preview/inlineProvider.ts",1218,0,"",typescript,selection_command +473,959505,"src/preview/inlineProvider.ts",1267,0,"",typescript,selection_command +474,959534,"src/preview/inlineProvider.ts",1307,0,"",typescript,selection_command +475,959566,"src/preview/inlineProvider.ts",1399,0,"",typescript,selection_command +476,959594,"src/preview/inlineProvider.ts",1444,0,"",typescript,selection_command +477,959637,"src/preview/inlineProvider.ts",1467,0,"",typescript,selection_command +478,959668,"src/preview/inlineProvider.ts",1477,0,"",typescript,selection_command +479,959758,"src/preview/inlineProvider.ts",1478,0,"",typescript,selection_command +480,959758,"TERMINAL",0,0,"[2026-01-05 16:00:46 TP0] Prefill batch, #new-seq: 1, #new-token: 6965, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +481,959758,"src/preview/inlineProvider.ts",1536,0,"",typescript,selection_command +482,959759,"src/preview/inlineProvider.ts",1625,0,"",typescript,selection_command +483,959797,"src/preview/inlineProvider.ts",1674,0,"",typescript,selection_command +484,960007,"src/preview/inlineProvider.ts",1625,0,"",typescript,selection_command +485,960115,"TERMINAL",0,0,"[2026-01-05 16:00:46] Receive: obj=GenerateReqInput(validation_time=1.5966128557920456e-05, received_time=1767625246.7885609, received_time_perf=2480131.461486293, rid='20454ff7c6444e82b8fe755cf9521107', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101729, 25, 99590, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100632, 24, 24, 100632, 101130, 20, 21, 99590, 98503, 23, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98668, 19, 13, 20, 99951, 100441, 24, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 100809, 23, 24, 13, 99887, 24, 101411, 100461, 19, 11, 9276, 1131, 64, 100104, 68, 20, 64, 99698, 8937, 17, 68, 101723, 68, 20, 65, 19, 65, 100899, 22, 67, 102807, 24, 16, 64, 100928, 2577, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498] ... [11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 99618, 19, 126293, 124380, 117933, 99695, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 21, 13, 119953, 22, 99695, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 15, 13, 23, 100104, 22, 100002, 23, 99241, 11, 9276, 1131, 101140, 68, 103595, 68, 15, 64, 23, 98886, 100590, 23, 22, 99869, 21, 757, 5305, 23, 64, 99618, 16, 65, 98965, 68, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 102487, 20, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:46 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 594, token usage: 0.03, #running-req: 1, #queue-req: 0, \r\n[2026-01-05 16:00:46] Receive: obj=GenerateReqInput(validation_time=1.3147015124559402e-05, received_time=1767625246.8264365, received_time_perf=2480131.499361709, rid='92304377b23e4060a5c58aa03bd359a8', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101729, 25, 99590, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100632, 24, 24, 100632, 101130, 20, 21, 99590, 98503, 23, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98668, 19, 13, 20, 99951, 100441, 24, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 100809, 23, 24, 13, 99887, 24, 101411, 100461, 19, 11, 9276, 1131, 64, 100104, 68, 20, 64, 99698, 8937, 17, 68, 101723, 68, 20, 65, 19, 65, 100899, 22, 67, 102807, 24, 16, 64, 100928, 2577, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498] ... [11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 99618, 19, 126293, 124380, 117933, 99695, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 21, 13, 119953, 22, 99695, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 15, 13, 23, 100104, 22, 100002, 23, 99241, 11, 9276, 1131, 101140, 68, 103595, 68, 15, 64, 23, 98886, 100590, 23, 22, 99869, 21, 757, 5305, 23, 64, 99618, 16, 65, 98965, 68, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 102487, 20, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +486,960158,"src/preview/inlineProvider.ts",1536,0,"",typescript,selection_command +487,960404,"src/preview/inlineProvider.ts",1478,0,"",typescript,selection_command +488,960404,"TERMINAL",0,0,"[2026-01-05 16:00:47 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8785, token usage: 0.05, #running-req: 1, #queue-req: 0, \r\n[2026-01-05 16:00:47] Receive: obj=GenerateReqInput(validation_time=1.3404060155153275e-05, received_time=1767625247.1256254, received_time_perf=2480131.798550485, rid='9aba3411c8e84717bf65680e029117b1', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 101130, 11, 104160, 66, 5661, 286, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 62115, 286, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 14901, 14901, 6, 82, 1660, 11057, 5661, 286, 421, 320, 574, 12389, 36442, 4376, 7127, 14901, 6, 3587, 13771, 14901, 14901, 863, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 61534, 286, 3044, 286, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 5661, 286, 442, 30530, 6119, 14901, 14901, 6, 82, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 5661, 286, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 286, 3044, 286, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 7, 5661, 310, 419, 12389, 2788, 42626, 310, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 62115, 286, 6903, 5661, 286, 3044, 286, 470, 508, 1203, 5265, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 102856, 11, 103498, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203] ... [220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 102487, 20, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 102487, 21, 98886, 23, 101130, 102626, 17, 15, 100461, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 21, 13, 100928, 23, 20, 99618, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 16, 13, 101562, 16, 100933, 21, 100104, 18, 11, 9276, 1131, 119621, 102856, 542, 22, 66, 21, 101723, 19, 68, 104160, 65, 23, 1859, 100899, 20, 9787, 101804, 17, 104550, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +489,960450,"TERMINAL",0,0,"[2026-01-05 16:00:47] Receive: obj=GenerateReqInput(validation_time=1.3515818864107132e-05, received_time=1767625247.186492, received_time_perf=2480131.859417047, rid='c51d3f08ba054d2ba1c4466624cfc589', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 101130, 11, 104160, 66, 5661, 286, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 62115, 286, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 14901, 14901, 6, 82, 1660, 11057, 5661, 286, 421, 320, 574, 12389, 36442, 4376, 7127, 14901, 6, 3587, 13771, 14901, 14901, 863, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 61534, 286, 3044, 286, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 5661, 286, 442, 30530, 6119, 14901, 14901, 6, 82, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 5661, 286, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 286, 3044, 286, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 7, 5661, 310, 419, 12389, 2788, 42626, 310, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 62115, 286, 6903, 5661, 286, 3044, 286, 470, 508, 1203, 5265, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 102856, 11, 103498, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203] ... [220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 102487, 20, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 102487, 21, 98886, 23, 101130, 102626, 17, 15, 100461, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 21, 13, 100928, 23, 20, 99618, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 16, 13, 101562, 16, 100933, 21, 100104, 18, 11, 9276, 1131, 119621, 102856, 542, 22, 66, 21, 101723, 19, 68, 104160, 65, 23, 1859, 100899, 20, 9787, 101804, 17, 104550, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +490,960698,"TERMINAL",0,0,"[2026-01-05 16:00:47 TP0] Prefill batch, #new-seq: 1, #new-token: 5887, #cached-token: 0, token usage: 0.07, #running-req: 2, #queue-req: 2, \r\n",,terminal_output +491,960787,"TERMINAL",0,0,"[2026-01-05 16:00:47] Receive: obj=GenerateReqInput(validation_time=1.3458076864480972e-05, received_time=1767625247.486658, received_time_perf=2480132.159583331, rid='aa3a4f91c33b4e629696b0ee09464308', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 112098, 21, 99695, 101294, 20, 100632, 19, 102269, 16, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99082, 16, 13, 20, 100104, 22, 100557, 11, 3949, 3009, 76167, 28, 99590, 23, 98503, 18, 21, 13, 115937, 101411, 21, 101729, 11, 9276, 1131, 23, 66, 22, 580, 103502, 19, 22, 12502, 104029, 3065, 370, 15, 68, 115547, 17, 66, 20, 823, 99317, 69, 98668, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18] ... [102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101655, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 20, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 121910, 99698, 21, 15, 99082, 20, 99082, 18, 99951, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 22, 13, 109641, 21, 99446, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 16, 13, 22, 101663, 117239, 100933, 20, 11, 9276, 1131, 24, 11998, 101135, 16, 16, 66, 23, 68, 23, 101655, 99419, 13225, 101411, 21, 99695, 68, 15, 100104, 114959, 65, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +492,960995,"TERMINAL",0,0,"[2026-01-05 16:00:47 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 587, token usage: 0.07, #running-req: 3, #queue-req: 2, \r\n",,terminal_output +493,961210,"TERMINAL",0,0,"[2026-01-05 16:00:47 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8778, token usage: 0.09, #running-req: 3, #queue-req: 1, \r\n",,terminal_output +494,961601,"TERMINAL",0,0,"[2026-01-05 16:00:48 TP0] Prefill batch, #new-seq: 1, #new-token: 5895, #cached-token: 0, token usage: 0.10, #running-req: 4, #queue-req: 1, \r\n",,terminal_output +495,961829,"TERMINAL",0,0,"[2026-01-05 16:00:48 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 584, token usage: 0.10, #running-req: 5, #queue-req: 0, \r\n",,terminal_output +496,962098,"TERMINAL",0,0,"[2026-01-05 16:00:48 TP0] Prefill batch, #new-seq: 1, #new-token: 7456, #cached-token: 0, token usage: 0.12, #running-req: 5, #queue-req: 0, \r\n",,terminal_output +497,962633,"TERMINAL",0,0,"[2026-01-05 16:00:49 TP0] Decode batch, #running-req: 6, #token: 62041, token usage: 0.14, accept len: 3.50, accept rate: 0.88, cuda graph: True, gen throughput (token/s): 29.66, #queue-req: 0, \r\n[2026-01-05 16:00:49] Finish: obj=GenerateReqInput(validation_time=2.604164183139801e-05, received_time=1767625246.1567805, received_time_perf=2480130.829705822, rid='37e61e0a8120487286ecaa8a601b11e1', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.238333061337471e-05, received_time=1767625238.390056, received_time_perf=2480123.062981132, rid='71658fd1fbc14ccea2d0bcad6b9a79d7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, \n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102486, 25, 100372, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101252, 20, 18, 99241, 102284, 18, 24, 99241, 23, 101723, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 99200, 17, 22, 13, 21, 101135, 15, 99200, 19, 11, 3949, 3009, 76167, 28, 99590, 22, 100809, 16, 17, 13, 124540, 24, 100899, 103093, 18, 11, 9276, 1131, 103205, 2940, 103825, 69, 100096, 69, 100614, 19, 16, 69, 17, 64, 18, 638, 17, 67, 15, 99200, 69, 15, 67, 12517, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220] ... [11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99619, 23, 121577, 100539, 16, 100702, 22, 101655, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 23, 13, 18, 100067, 100002, 21, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 18, 13, 100539, 17, 101663, 16, 118843, 11, 9276, 1131, 22, 99317, 101729, 6902, 16, 69, 8901, 99367, 66, 72376, 17, 67, 15, 8901, 329, 21, 65, 24, 64, 102626, 67, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,20p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98360, 79, 1248, 73022, 151336], 'meta_info': {'id': '37e61e0a8120487286ecaa8a601b11e1', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15751, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-8.928377064876258e-05, 73022, '```'), (0.0, 45937, 'bash'), (-8.344646857949556e-07, 198, '\n'), (-0.34776031970977783, 4616, 'cat'), (-0.06648050248622894, 481, ' -'), (-2.3841830625315197e-06, 77, 'n'), (-4.410734163684538e-06, 608, ' /'), (-8.344646857949556e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.7881377516459906e-06, 2899, '/c'), (-3.6954811548639555e-06, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0010100268991664052, 13428, '/src'), (-0.0003406421164982021, 14, '/'), (-0.008069413714110851, 27082, 'preview'), (-0.002560077467933297, 14, '/'), (-0.009723915718495846, 5057, 'inline'), (-3.969590397900902e-05, 5179, 'Provider'), (-2.3841855067985307e-07, 21239, '.ts'), (-0.002760411472991109, 760, ' |'), (-0.0010632582707330585, 10918, ' sed'), (-3.576278118089249e-07, 481, ' -'), (-2.3841855067985307e-07, 77, 'n'), (-2.3841855067985307e-07, 364, "" '""), (-0.828361451625824, 16, '1'), (-0.00036614391137845814, 11, ','), (-0.823595404624939, 98360, '20'), (-2.3841855067985307e-07, 79, 'p'), (-8.344646857949556e-07, 1248, ""'\n""), (-0.0002706876548472792, 73022, '```'), (-3.1709168979432434e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 594, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 3.2209842205047607, 'response_sent_to_client_ts': 1767625249.3779883}}\r\n[2026-01-05 16:00:49] Finish: obj=GenerateReqInput(validation_time=1.3515818864107132e-05, received_time=1767625247.186492, received_time_perf=2480131.859417047, rid='c51d3f08ba054d2ba1c4466624cfc589', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.5966128557920456e-05, received_time=1767625246.7885609, received_time_perf=2480131.461486293, rid='20454ff7c6444e82b8fe755cf9521107', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 101130, 11, 104160, 66, 5661, 286, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 62115, 286, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 14901, 14901, 6, 82, 1660, 11057, 5661, 286, 421, 320, 574, 12389, 36442, 4376, 7127, 14901, 6, 3587, 13771, 14901, 14901, 863, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 61534, 286, 3044, 286, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 5661, 286, 442, 30530, 6119, 14901, 14901, 6, 82, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 5661, 286, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 286, 3044, 286, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 7, 5661, 310, 419, 12389, 2788, 42626, 310, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 62115, 286, 6903, 5661, 286, 3044, 286, 470, 508, 1203, 5265, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 102856, 11, 103498, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203] ... [220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 102487, 20, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 102487, 21, 98886, 23, 101130, 102626, 17, 15, 100461, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 21, 13, 100928, 23, 20, 99618, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 16, 13, 101562, 16, 100933, 21, 100104, 18, 11, 9276, 1131, 119621, 102856, 542, 22, 66, 21, 101723, 19, 68, 104160, 65, 23, 1859, 100899, 20, 9787, 101804, 17, 104550, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': '\n```bash\ncd /home/franz.srambical/crowd-pilot-extension && npm run build\n```', 'output_ids': [198, 73022, 45937, 198, 4385, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 1009, 46142, 1598, 1936, 198, 73022, 151336], 'meta_info': {'id': 'c51d3f08ba054d2ba1c4466624cfc589', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15822, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00034540885826572776, 73022, '```'), (0.0, 45937, 'bash'), (-1.1920928244535389e-07, 198, '\n'), (-1.5738190412521362, 4385, 'cd'), (-4.410734163684538e-06, 608, ' /'), (-3.814689989667386e-06, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-0.00022921319759916514, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-5.960462772236497e-07, 79888, '-extension'), (-0.007279656827449799, 1009, ' &&'), (-0.029429949820041656, 46142, ' npm'), (-0.25282901525497437, 1598, ' run'), (-0.238140270113945, 1936, ' build'), (-0.014944971539080143, 198, '\n'), (-0.00018940561858471483, 73022, '```'), (-1.2993727978027891e-05, 151336, '<|user|>')], 'completion_tokens': 26, 'cached_tokens': 8778, 'spec_accept_rate': 0.7083333333333334, 'spec_accept_length': 3.25, 'spec_verify_ct': 8, 'spec_accept_token_num': 17, 'spec_draft_token_num': 24, 'e2e_latency': 2.1913981437683105, 'response_sent_to_client_ts': 1767625249.380941}}\r\n",,terminal_output +498,962716,"TERMINAL",0,0,"[2026-01-05 16:00:49] Finish: obj=GenerateReqInput(validation_time=1.5966128557920456e-05, received_time=1767625246.7885609, received_time_perf=2480131.461486293, rid='20454ff7c6444e82b8fe755cf9521107', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... 'ut_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|user|>\n\n[2026-01-05 16:00:46 TP0] Prefill batch, #new-seq: 1, #new-token: 6965, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0,\n/nothink<|assistant|>\n', input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101729, 25, 99590, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100632, 24, 24, 100632, 101130, 20, 21, 99590, 98503, 23, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98668, 19, 13, 20, 99951, 100441, 24, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 100809, 23, 24, 13, 99887, 24, 101411, 100461, 19, 11, 9276, 1131, 64, 100104, 68, 20, 64, 99698, 8937, 17, 68, 101723, 68, 20, 65, 19, 65, 100899, 22, 67, 102807, 24, 16, 64, 100928, 2577, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498] ... [11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 99618, 19, 126293, 124380, 117933, 99695, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 21, 13, 119953, 22, 99695, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 15, 13, 23, 100104, 22, 100002, 23, 99241, 11, 9276, 1131, 101140, 68, 103595, 68, 15, 64, 23, 98886, 100590, 23, 22, 99869, 21, 757, 5305, 23, 64, 99618, 16, 65, 98965, 68, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 102487, 20, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '50,70p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99200, 11, 100096, 79, 1248, 73022, 151336], 'meta_info': {'id': '20454ff7c6444e82b8fe755cf9521107', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15825, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-6.174850568640977e-05, 73022, '```'), (0.0, 45937, 'bash'), (-2.3841855067985307e-07, 198, '\n'), (-0.236087366938591, 4616, 'cat'), (-0.0469711497426033, 481, ' -'), (-6.318072337307967e-06, 77, 'n'), (-3.6954811548639555e-06, 608, ' /'), (-1.1920922133867862e-06, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.0728830375228426e-06, 2899, '/c'), (-1.7881377516459906e-06, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0003518439189065248, 13428, '/src'), (-0.0001454247540095821, 14, '/'), (-0.0013653251808136702, 27082, 'preview'), (-0.0010031197452917695, 14, '/'), (-0.011957867071032524, 5057, 'inline'), (-0.00011216964776394889, 5179, 'Provider'), (-2.3841855067985307e-07, 21239, '.ts'), (-0.0023092764895409346, 760, ' |'), (-0.0007565735140815377, 10918, ' sed'), (-2.3841855067985307e-07, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-1.1920928244535389e-07, 364, "" '""), (-1.7636879682540894, 99200, '50'), (-4.732496745418757e-05, 11, ','), (-0.03743920475244522, 100096, '70'), (0.0, 79, 'p'), (-1.4305104514278355e-06, 1248, ""'\n""), (-0.00015841660206206143, 73022, '```'), (-4.541770613286644e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 594, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 2.6487295627593994, 'response_sent_to_client_ts': 1767625249.4375381}}\r\n[2026-01-05 16:00:49] Finish: obj=GenerateReqInput(validation_time=1.3458076864480972e-05, received_time=1767625247.486658, received_time_perf=2480132.159583331, rid='aa3a4f91c33b4e629696b0ee09464308', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""n: 8192, #cached-token: 8785, token usage: 0.05, #running-req: 1, #queue-req: 0, \n[2026-01-05 16:00:47] Receive: obj=GenerateReqInput(validation_time=1.3404060155153275e-05, received_time=1767625247.1256254, received_time_perf=2480131.798550485, rid='9aba3411c8e84717bf65680e029117b1', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 468\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 112098, 21, 99695, 101294, 20, 100632, 19, 102269, 16, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99082, 16, 13, 20, 100104, 22, 100557, 11, 3949, 3009, 76167, 28, 99590, 23, 98503, 18, 21, 13, 115937, 101411, 21, 101729, 11, 9276, 1131, 23, 66, 22, 580, 103502, 19, 22, 12502, 104029, 3065, 370, 15, 68, 115547, 17, 66, 20, 823, 99317, 69, 98668, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18] ... [102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101655, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 20, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 121910, 99698, 21, 15, 99082, 20, 99082, 18, 99951, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 22, 13, 109641, 21, 99446, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 16, 13, 22, 101663, 117239, 100933, 20, 11, 9276, 1131, 24, 11998, 101135, 16, 16, 66, 23, 68, 23, 101655, 99419, 13225, 101411, 21, 99695, 68, 15, 100104, 114959, 65, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '39,59p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336], 'meta_info': {'id': 'aa3a4f91c33b4e629696b0ee09464308', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16232, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0001817776501411572, 73022, '```'), (0.0, 45937, 'bash'), (-1.1920928244535389e-07, 198, '\n'), (-0.1514628827571869, 4616, 'cat'), (-0.03308311849832535, 481, ' -'), (-7.033323527139146e-06, 77, 'n'), (-3.3378546504536644e-06, 608, ' /'), (-8.344646857949556e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-3.576278118089249e-07, 2899, '/c'), (-1.1920928244535389e-07, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.00013481661153491586, 13428, '/src'), (-0.00011884459672728553, 14, '/'), (-0.0011374439345672727, 27082, 'preview'), (-0.0002053765201708302, 14, '/'), (-0.0020008322317153215, 5057, 'inline'), (-0.00013374387344811112, 5179, 'Provider'), (-1.1920928244535389e-07, 21239, '.ts'), (-0.0027580340392887592, 760, ' |'), (-0.0005466635921038687, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (-3.576278118089249e-07, 77, 'n'), (-1.1920928244535389e-07, 364, "" '""), (-0.8546361327171326, 101294, '39'), (-3.6954811548639555e-06, 11, ','), (-0.0014206803170964122, 102573, '59'), (-1.1920928244535389e-07, 79, 'p'), (-1.5497195136049413e-06, 1248, ""'\n""), (-0.00032360086333937943, 73022, '```'), (-1.5497195136049413e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 584, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.950791358947754, 'response_sent_to_client_ts': 1767625249.4394398}}\r\n[2026-01-05 16:00:49] INFO: 10.86.2.252:39352 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n[2026-01-05 16:00:49] Finish: obj=GenerateReqInput(validation_time=1.3147015124559402e-05, received_time=1767625246.8264365, received_time_perf=2480131.499361709, rid='92304377b23e4060a5c58aa03bd359a8', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... 'ut_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|user|>\n\n[2026-01-05 16:00:46 TP0] Prefill batch, #new-seq: 1, #new-token: 6965, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0,\n/nothink<|assistant|>\n', input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 101729, 25, 99590, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100632, 24, 24, 100632, 101130, 20, 21, 99590, 98503, 23, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98668, 19, 13, 20, 99951, 100441, 24, 23, 11, 3949, 3009, 76167, 28, 99590, 22, 100809, 23, 24, 13, 99887, 24, 101411, 100461, 19, 11, 9276, 1131, 64, 100104, 68, 20, 64, 99698, 8937, 17, 68, 101723, 68, 20, 65, 19, 65, 100899, 22, 67, 102807, 24, 16, 64, 100928, 2577, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498] ... [11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 99618, 19, 126293, 124380, 117933, 99695, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, \n[... 24078 bytes truncated to respect terminal scrollback settings ...]\n",,terminal_output +499,963305,"src/preview/inlineProvider.ts",1486,0,"",typescript,selection_command +500,963553,"src/preview/inlineProvider.ts",1489,0,"",typescript,selection_command +501,963584,"src/preview/inlineProvider.ts",1494,0,"",typescript,selection_command +502,963607,"TERMINAL",0,0,"[2026-01-05 16:00:50] Receive: obj=GenerateReqInput(validation_time=3.2924115657806396e-05, received_time=1767625250.3389523, received_time_perf=2480135.011877227, rid='04e40ac708874f4f95f168d70885590f', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101474, 23, 18, 98965, 100899, 23, 100590, 16, 100919, 16, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99082, 16, 13, 23, 99064, 115937, 11, 3949, 3009, 76167, 28, 99590, 23, 98503, 18, 21, 13, 99200, 18, 115925, 101294, 11, 9276, 1131, 17, 7221, 329, 21, 69, 103992, 68, 102487, 19, 24, 64, 24, 65, 924, 21, 100104, 88444, 19, 69, 21, 99317, 21, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11] ... [101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101655, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 20, 101252, 22, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100372, 11, 671, 27084, 12, 2958, 25, 220, 17, 11, 671, 4584, 12, 2958, 25, 220, 17, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 18, 100461, 99695, 22, 101840, 21, 19, 100933, 100614, 22, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 22, 13, 100933, 21, 101411, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 17, 13, 121743, 101729, 18, 18, 100557, 11, 9276, 1131, 5305, 18, 64, 19, 69, 104327, 66, 100702, 65, 19, 68, 21, 100104, 21, 102487, 65, 15, 2127, 100614, 19, 21, 102088, 100562, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:50 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 594, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +503,963688,"src/preview/inlineProvider.ts",1501,0,"",typescript,selection_command +504,963892,"src/preview/inlineProvider.ts",1506,0,"",typescript,selection_command +505,963892,"TERMINAL",0,0,"[2026-01-05 16:00:50 TP0] Prefill batch, #new-seq: 1, #new-token: 7202, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n[2026-01-05 16:00:50] Receive: obj=GenerateReqInput(validation_time=1.3010110706090927e-05, received_time=1767625250.638445, received_time_perf=2480135.311369908, rid='f52c79ab9e744fdebd92f72546178f18', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 17, 11, 104160, 66, 5661, 474, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 7127, 14901, 6, 1725, 9239, 14901, 14901, 6967, 5661, 5661, 3663, 5661, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 13, 5661, 353, 1096, 4990, 10614, 916, 27971, 14901, 14901, 6, 82, 30523, 323, 4278, 389, 4287, 5128, 13, 5661, 639, 5661, 1533, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 94714, 262, 869, 1917, 25, 5586, 760, 845, 284, 845, 55409, 262, 869, 8967, 25, 2710, 284, 830, 55409, 5661, 262, 4165, 5661, 257, 353, 2573, 279, 1482, 1917, 311, 3037, 438, 7381, 9750, 13, 5661, 257, 639, 5661, 262, 738, 2512, 15294, 25, 5586, 1648, 737, 94714, 286, 419, 12389, 284, 1917, 55409, 286, 442, 30174, 30530, 6119, 311, 312, 65012, 7381, 3459, 908, 5661, 286, 55008, 33739, 7769, 4062, 32604, 14901, 6, 8866, 12389, 29307, 50, 3799, 28455, 14901, 14901, 4667, 5661, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 12017, 279, 1482, 1917, 13, 5661, 257, 639, 5661, 262, 2797, 2512, 4555, 737, 94714, 286, 419, 12389, 284, 845, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 2126, 279, 1482, 1917, 13, 5661, 257, 639, 5661, 262, 633, 2512, 4555, 5586, 760, 845, 94714, 286, 470, 419, 12389, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 18535, 476, 11151, 279, 9106, 13, 5661, 257, 639, 5661, 262, 738, 5462, 87027, 25, 2710, 1648, 737, 94714, 286, 419, 22086, 284, 8967, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 39350, 7381, 9750, 3589, 13, 5661, 257, 639, 5661, 262, 3410, 25246, 33030, 4353, 7, 5661, 286, 2197, 25, 55008, 1979, 7524, 42626, 286, 2309, 25, 55008, 21900, 42626, 286, 2266, 25, 55008, 5337, 1056, 33030, 1972, 42626, 286, 3950, 25, 55008, 727, 23860, 5661, 262, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 94714, 286, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 62115, 286, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 14901, 14901, 6, 82, 1660, 11057, 5661, 286, 421, 320, 574, 12389, 36442, 4376, 7127, 14901, 6, 3587, 13771, 14901, 14901, 863, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 61534, 286, 3044, 286, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 5661] ... [220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 18, 13, 100104, 17, 19, 98965, 20, 101411, 22, 99695, 21, 101294, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 15, 13, 100702, 23, 101804, 17, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 15, 98965, 23, 102114, 99241, 22, 11, 9276, 1131, 100590, 68, 99698, 580, 100096, 23, 23, 103498, 69, 19, 69, 101804, 69, 112283, 67, 100096, 23, 23, 101130, 100067, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +506,963952,"TERMINAL",0,0,"[2026-01-05 16:00:50] Receive: obj=GenerateReqInput(validation_time=1.3813842087984085e-05, received_time=1767625250.717782, received_time_perf=2480135.390707171, rid='f1cabe87d85b41a4baa6abded2a2f6f7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 17, 11, 104160, 66, 5661, 474, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 7127, 14901, 6, 1725, 9239, 14901, 14901, 6967, 5661, 5661, 3663, 5661, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 13, 5661, 353, 1096, 4990, 10614, 916, 27971, 14901, 14901, 6, 82, 30523, 323, 4278, 389, 4287, 5128, 13, 5661, 639, 5661, 1533, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 94714, 262, 869, 1917, 25, 5586, 760, 845, 284, 845, 55409, 262, 869, 8967, 25, 2710, 284, 830, 55409, 5661, 262, 4165, 5661, 257, 353, 2573, 279, 1482, 1917, 311, 3037, 438, 7381, 9750, 13, 5661, 257, 639, 5661, 262, 738, 2512, 15294, 25, 5586, 1648, 737, 94714, 286, 419, 12389, 284, 1917, 55409, 286, 442, 30174, 30530, 6119, 311, 312, 65012, 7381, 3459, 908, 5661, 286, 55008, 33739, 7769, 4062, 32604, 14901, 6, 8866, 12389, 29307, 50, 3799, 28455, 14901, 14901, 4667, 5661, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 12017, 279, 1482, 1917, 13, 5661, 257, 639, 5661, 262, 2797, 2512, 4555, 737, 94714, 286, 419, 12389, 284, 845, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 2126, 279, 1482, 1917, 13, 5661, 257, 639, 5661, 262, 633, 2512, 4555, 5586, 760, 845, 94714, 286, 470, 419, 12389, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 18535, 476, 11151, 279, 9106, 13, 5661, 257, 639, 5661, 262, 738, 5462, 87027, 25, 2710, 1648, 737, 94714, 286, 419, 22086, 284, 8967, 55409, 262, 335, 5661, 5661, 262, 4165, 5661, 257, 353, 39350, 7381, 9750, 3589, 13, 5661, 257, 639, 5661, 262, 3410, 25246, 33030, 4353, 7, 5661, 286, 2197, 25, 55008, 1979, 7524, 42626, 286, 2309, 25, 55008, 21900, 42626, 286, 2266, 25, 55008, 5337, 1056, 33030, 1972, 42626, 286, 3950, 25, 55008, 727, 23860, 5661, 262, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 94714, 286, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 62115, 286, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 14901, 14901, 6, 82, 1660, 11057, 5661, 286, 421, 320, 574, 12389, 36442, 4376, 7127, 14901, 6, 3587, 13771, 14901, 14901, 863, 94714, 310, 470, 39158, 5661, 286, 335, 5661, 5661, 286, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 61534, 286, 3044, 286, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 5661] ... [220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 18, 13, 100104, 17, 19, 98965, 20, 101411, 22, 99695, 21, 101294, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 15, 13, 100702, 23, 101804, 17, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 15, 98965, 23, 102114, 99241, 22, 11, 9276, 1131, 100590, 68, 99698, 580, 100096, 23, 23, 103498, 69, 19, 69, 101804, 69, 112283, 67, 100096, 23, 23, 101130, 100067, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +507,964182,"TERMINAL",0,0,"[2026-01-05 16:00:50 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 591, token usage: 0.04, #running-req: 1, #queue-req: 1, \r\n",,terminal_output +508,964251,"TERMINAL",0,0,"[2026-01-05 16:00:51] Receive: obj=GenerateReqInput(validation_time=1.9887927919626236e-05, received_time=1767625251.016948, received_time_perf=2480135.689873066, rid='83cea96cd63f4aeabe20c1f6674491c3', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 102501, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 101723, 17, 124080, 126182, 19, 99367, 121416, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98729, 17, 13, 124898, 22, 101478, 11, 3949, 3009, 76167, 28, 99590, 23, 125744, 21, 13, 122589, 103093, 16, 101478, 11, 9276, 1131, 99618, 24, 20, 100104, 100539, 22, 924, 101135, 22, 15, 69, 99695, 18, 100632, 18, 103388, 13225, 100933, 65, 15, 6066, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11] ... [220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 125214, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 122559, 15, 98965, 15, 100096, 99618, 24, 100614, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 15, 13, 21, 100919, 19, 100461, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 18, 98965, 100632, 24, 100067, 23, 11, 9276, 1131, 69, 102501, 66, 102626, 370, 24, 68, 22, 101723, 69, 450, 8937, 103825, 69, 22, 99446, 101562, 16, 100928, 69, 99243, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +509,964365,"src/preview/inlineProvider.ts",1564,0,"",typescript,selection_command +510,964459,"TERMINAL",0,0,"[2026-01-05 16:00:51 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8782, token usage: 0.05, #running-req: 1, #queue-req: 1, \r\n",,terminal_output +511,964649,"TERMINAL",0,0,"[2026-01-05 16:00:51] Receive: obj=GenerateReqInput(validation_time=1.367693766951561e-05, received_time=1767625251.3951821, received_time_perf=2480136.068107274, rid='2e05b230279241f69e98701830ea1c6c', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 102501, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 101723, 17, 124080, 126182, 19, 99367, 121416, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98729, 17, 13, 124898, 22, 101478, 11, 3949, 3009, 76167, 28, 99590, 23, 125744, 21, 13, 122589, 103093, 16, 101478, 11, 9276, 1131, 99618, 24, 20, 100104, 100539, 22, 924, 101135, 22, 15, 69, 99695, 18, 100632, 18, 103388, 13225, 100933, 65, 15, 6066, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11] ... [220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 102573, 16, 11, 3950, 10426, 25, 220, 15, 13, 100590, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 24, 101252, 102626, 17, 102626, 16, 102487, 17, 21, 99619, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 16, 13, 15, 99317, 24, 100933, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 101840, 24, 23, 103388, 100539, 21, 11, 9276, 1131, 104127, 72376, 102487, 4385, 103302, 69, 19, 5918, 8228, 98360, 66, 16, 69, 101478, 22, 19, 101474, 16, 66, 18, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 17, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +512,964845,"TERMINAL",0,0,"[2026-01-05 16:00:51 TP0] Prefill batch, #new-seq: 1, #new-token: 6221, #cached-token: 0, token usage: 0.07, #running-req: 2, #queue-req: 2, \r\n",,terminal_output +513,965120,"TERMINAL",0,0,"[2026-01-05 16:00:51 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 586, token usage: 0.07, #running-req: 3, #queue-req: 1, \r\n",,terminal_output +514,965390,"TERMINAL",0,0,"[2026-01-05 16:00:52 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8777, token usage: 0.09, #running-req: 3, #queue-req: 0, \r\n",,terminal_output +515,965492,"src/preview/inlineProvider.ts",1544,0,"",typescript,selection_command +516,965796,"TERMINAL",0,0,"[2026-01-05 16:00:52 TP0] Prefill batch, #new-seq: 1, #new-token: 6956, #cached-token: 0, token usage: 0.10, #running-req: 4, #queue-req: 0, \r\n[2026-01-05 16:00:52] Receive: obj=GenerateReqInput(validation_time=1.2907199561595917e-05, received_time=1767625252.5227456, received_time_perf=2480137.195670741, rid='a919a67d421b4f648ec27fa0a4bd1081', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 102501, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 101723, 17, 124080, 126182, 19, 99367, 121416, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98729, 17, 13, 124898, 22, 101478, 11, 3949, 3009, 76167, 28, 99590, 23, 125744, 21, 13, 122589, 103093, 16, 101478, 11, 9276, 1131, 99618, 24, 20, 100104, 100539, 22, 924, 101135, 22, 15, 69, 99695, 18, 100632, 18, 103388, 13225, 100933, 65, 15, 6066, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11] ... [104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 17, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100632, 22, 103093, 18, 22, 101478, 101804, 16, 101917, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 16, 13, 101294, 20, 126334, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 21, 13, 100539, 23, 110733, 99951, 19, 11, 9276, 1131, 17, 68, 100002, 65, 112596, 99951, 24, 99590, 16, 69, 103093, 68, 101663, 22, 15, 99243, 99064, 12502, 16, 66, 21, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +517,966057,"TERMINAL",0,0,"[2026-01-05 16:00:52 TP0] Prefill batch, #new-seq: 1, #new-token: 686, #cached-token: 16709, token usage: 0.11, #running-req: 5, #queue-req: 0, \r\n",,terminal_output +518,966447,"TERMINAL",0,0,"[2026-01-05 16:00:53] Finish: obj=GenerateReqInput(validation_time=1.9887927919626236e-05, received_time=1767625251.016948, received_time_perf=2480135.689873066, rid='83cea96cd63f4aeabe20c1f6674491c3', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""n: 7202, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \n[2026-01-05 16:00:50] Receive: obj=GenerateReqInput(validation_time=1.3010110706090927e-05, received_time=1767625250.638445, received_time_perf=2480135.311369908, rid='f52c79ab9e744fdebd92f72546178f18', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 7\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 102501, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 101723, 17, 124080, 126182, 19, 99367, 121416, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98729, 17, 13, 124898, 22, 101478, 11, 3949, 3009, 76167, 28, 99590, 23, 125744, 21, 13, 122589, 103093, 16, 101478, 11, 9276, 1131, 99618, 24, 20, 100104, 100539, 22, 924, 101135, 22, 15, 69, 99695, 18, 100632, 18, 103388, 13225, 100933, 65, 15, 6066, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11] ... [220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 125214, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 122559, 15, 98965, 15, 100096, 99618, 24, 100614, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 15, 13, 21, 100919, 19, 100461, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 18, 98965, 100632, 24, 100067, 23, 11, 9276, 1131, 69, 102501, 66, 102626, 370, 24, 68, 22, 101723, 69, 450, 8937, 103825, 69, 22, 99446, 101562, 16, 100928, 69, 99243, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '50,70p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99200, 11, 100096, 79, 1248, 73022, 151336], 'meta_info': {'id': '83cea96cd63f4aeabe20c1f6674491c3', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 15989, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-1.6212332411669195e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.16167668998241425, 4616, 'cat'), (-0.01948992721736431, 481, ' -'), (-8.344646857949556e-07, 77, 'n'), (-2.861018856492592e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.4305104514278355e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.00025149996508844197, 13428, '/src'), (-9.30981186684221e-05, 14, '/'), (-0.00011622230522334576, 27082, 'preview'), (-0.00010382589971413836, 14, '/'), (-8.642300235806033e-05, 5057, 'inline'), (-0.00015937011630740017, 5179, 'Provider'), (0.0, 21239, '.ts'), (-3.182837463100441e-05, 760, ' |'), (-1.1920858014491387e-05, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.7144208550453186, 99200, '50'), (-3.576278118089249e-07, 11, ','), (-0.01935991644859314, 100096, '70'), (-1.1920928244535389e-07, 79, 'p'), (-7.152555099310121e-07, 1248, ""'\n""), (-3.802703940891661e-05, 73022, '```'), (-1.549708758830093e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 586, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 2.0866518020629883, 'response_sent_to_client_ts': 1767625253.1038458}}\r\n[2026-01-05 16:00:53] Finish: obj=GenerateReqInput(validation_time=1.367693766951561e-05, received_time=1767625251.3951821, received_time_perf=2480136.068107274, rid='2e05b230279241f69e98701830ea1c6c', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""ed_time=1767625251.016948, received_time_perf=2480135.689873066, rid='83cea96cd63f4aeabe20c1f6674491c3', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689,\n... [truncated]\n/nothink<|user|>\n\n[2026-01-05 16:00:51 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8782, token usage: 0.05, #running-req: 1, #queue-req: 1,\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 102501, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 101723, 17, 124080, 126182, 19, 99367, 121416, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98729, 17, 13, 124898, 22, 101478, 11, 3949, 3009, 76167, 28, 99590, 23, 125744, 21, 13, 122589, 103093, 16, 101478, 11, 9276, 1131, 99618, 24, 20, 100104, 100539, 22, 924, 101135, 22, 15, 69, 99695, 18, 100632, 18, 103388, 13225, 100933, 65, 15, 6066, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11] ... [220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 102573, 16, 11, 3950, 10426, 25, 220, 15, 13, 100590, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 24, 101252, 102626, 17, 102626, 16, 102487, 17, 21, 99619, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 16, 13, 15, 99317, 24, 100933, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 101840, 24, 23, 103388, 100539, 21, 11, 9276, 1131, 104127, 72376, 102487, 4385, 103302, 69, 19, 5918, 8228, 98360, 66, 16, 69, 101478, 22, 19, 101474, 16, 66, 18, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 17, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '50,70p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99200, 11, 100096, 79, 1248, 73022, 151336], 'meta_info': {'id': '2e05b230279241f69e98701830ea1c6c', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16714, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-3.433168603805825e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.28457126021385193, 4616, 'cat'), (-0.019499631598591805, 481, ' -'), (-7.152555099310121e-07, 77, 'n'), (-4.410734163684538e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-2.9802276912960224e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.000710592488758266, 13428, '/src'), (-0.00016044282529037446, 14, '/'), (-0.00023445718397852033, 27082, 'preview'), (-0.00014280252798926085, 14, '/'), (-0.00015198028995655477, 5057, 'inline'), (-0.000271521887043491, 5179, 'Provider'), (0.0, 21239, '.ts'), (-5.4596363042946905e-05, 760, ' |'), (-2.002696055569686e-05, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.8420470952987671, 99200, '50'), (-3.576278118089249e-07, 11, ','), (-0.03227386623620987, 100096, '70'), (-1.1920928244535389e-07, 79, 'p'), (-8.344646857949556e-07, 1248, ""'\n""), (-5.4238757002167404e-05, 73022, '```'), (-1.311301275563892e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 8777, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.7085752487182617, 'response_sent_to_client_ts': 1767625253.1068041}}\r\n[2026-01-05 16:00:53] Finish: obj=GenerateReqInput(validation_time=3.2924115657806396e-05, received_time=1767625250.3389523, received_time_perf=2480135.011877227, rid='04e40ac708874f4f95f168d70885590f', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""n: 5887, #cached-token: 0, token usage: 0.07, #running-req: 2, #queue-req: 2, \n[2026-01-05 16:00:47] Receive: obj=GenerateReqInput(validation_time=1.3458076864480972e-05, received_time=1767625247.486658, received_time_perf=2480132.159583331, rid='aa3a4f91c33b4e629696b0ee09464308', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 7\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101474, 23, 18, 98965, 100899, 23, 100590, 16, 100919, 16, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99082, 16, 13, 23, 99064, 115937, 11, 3949, 3009, 76167, 28, 99590, 23, 98503, 18, 21, 13, 99200, 18, 115925, 101294, 11, 9276, 1131, 17, 7221, 329, 21, 69, 103992, 68, 102487, 19, 24, 64, 24, 65, 924, 21, 100104, 88444, 19, 69, 21, 99317, 21, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11] ... [101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 1\n[... 57833 bytes truncated to respect terminal scrollback settings ...]\n",,terminal_output +519,968284,"src/preview/inlineProvider.ts",1633,0,"",typescript,selection_command +520,968522,"src/preview/inlineProvider.ts",1682,0,"",typescript,selection_command +521,968545,"src/preview/inlineProvider.ts",1705,0,"",typescript,selection_command +522,968625,"src/preview/inlineProvider.ts",1707,0,"",typescript,selection_command +523,968626,"TERMINAL",0,0,"[2026-01-05 16:00:55] Receive: obj=GenerateReqInput(validation_time=1.7288606613874435e-05, received_time=1767625255.3143358, received_time_perf=2480139.987260831, rid='36680dc083a04324bcf0b369a1975d53', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99082, 25, 102573, 25, 102501, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 101723, 17, 124080, 126182, 19, 99367, 121416, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 98729, 17, 13, 124898, 22, 101478, 11, 3949, 3009, 76167, 28, 99590, 23, 125744, 21, 13, 122589, 103093, 16, 101478, 11, 9276, 1131, 99618, 24, 20, 100104, 100539, 22, 924, 101135, 22, 15, 69, 99695, 18, 100632, 18, 103388, 13225, 100933, 65, 15, 6066, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220] ... [11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102501, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 101804, 21, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98668, 11, 671, 27084, 12, 2958, 25, 220, 19, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102501, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 124484, 22, 98729, 101804, 21, 121743, 102573, 16, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 17, 13, 20, 99241, 22, 100461, 21, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 22, 13, 122414, 21, 100096, 22, 102340, 11, 9276, 1131, 64, 24, 98729, 64, 102952, 67, 19, 99146, 65, 19, 69, 21, 100933, 757, 99951, 3632, 15, 64, 19, 8937, 108479, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:55 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +524,968626,"src/preview/inlineProvider.ts",1716,0,"",typescript,selection_command +525,968652,"src/preview/inlineProvider.ts",1781,0,"",typescript,selection_command +526,968896,"TERMINAL",0,0,"[2026-01-05 16:00:55 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +527,968897,"src/preview/inlineProvider.ts",1791,0,"",typescript,selection_command +528,968970,"TERMINAL",0,0,"[2026-01-05 16:00:55] Receive: obj=GenerateReqInput(validation_time=1.802109181880951e-05, received_time=1767625255.6550884, received_time_perf=2480140.328013802, rid='8606ff779db1473482dfad328b289d13', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99082, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 99317, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99082, 11, 4193, 4379, 25, 220, 15, 13, 102626, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 123564, 13, 100104, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99082, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 103093, 18, 100096, 18, 99619, 19, 122414, 100096, 24, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 20, 13, 100235, 22, 100562, 23, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 15, 13, 15, 99064, 15, 99366, 102269, 17, 11, 9276, 1131, 823, 329, 1371, 99590, 68, 122250, 99698, 20, 18, 2577, 88714, 102636, 68, 21, 66, 18, 64, 101840, 21, 18, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076] ... [11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99200, 11, 100096, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99869, 23, 99618, 101478, 16, 100919, 22, 101723, 18, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 18, 99367, 18, 100235, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 24, 13, 101663, 22, 112896, 23, 100557, 11, 9276, 1131, 100632, 21, 99695, 7628, 100562, 18, 64, 100590, 18, 99590, 8901, 69, 15, 65, 100632, 24, 64, 98729, 22, 20, 67, 102721, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:55] Receive: obj=GenerateReqInput(validation_time=1.7448794096708298e-05, received_time=1767625255.6950512, received_time_perf=2480140.367976516, rid='4bd5319939db4afdb8f68dcdb5a5e2d9', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99082, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 99317, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99082, 11, 4193, 4379, 25, 220, 15, 13, 102626, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 123564, 13, 100104, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99082, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 103093, 18, 100096, 18, 99619, 19, 122414, 100096, 24, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 20, 13, 100235, 22, 100562, 23, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 15, 13, 15, 99064, 15, 99366, 102269, 17, 11, 9276, 1131, 823, 329, 1371, 99590, 68, 122250, 99698, 20, 18, 2577, 88714, 102636, 68, 21, 66, 18, 64, 101840, 21, 18, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076] ... [11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99200, 11, 100096, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99869, 23, 99618, 101478, 16, 100919, 22, 101723, 18, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 18, 99367, 18, 100235, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 24, 13, 101663, 22, 112896, 23, 100557, 11, 9276, 1131, 100632, 21, 99695, 7628, 100562, 18, 64, 100590, 18, 99590, 8901, 69, 15, 65, 100632, 24, 64, 98729, 22, 20, 67, 102721, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +529,969203,"TERMINAL",0,0,"[2026-01-05 16:00:55 TP0] Prefill batch, #new-seq: 1, #new-token: 874, #cached-token: 0, token usage: 0.04, #running-req: 0, #queue-req: 2, \r\n",,terminal_output +530,969256,"TERMINAL",0,0,"[2026-01-05 16:00:56] Receive: obj=GenerateReqInput(validation_time=1.8575694411993027e-05, received_time=1767625255.981489, received_time_perf=2480140.654414143, rid='5eaed925376f41a28ffed2f566328111', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99082, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 99317, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99082, 11, 4193, 4379, 25, 220, 15, 13, 102626, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 123564, 13, 100104, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99082, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 103093, 18, 100096, 18, 99619, 19, 122414, 100096, 24, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 20, 13, 100235, 22, 100562, 23, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 15, 13, 15, 99064, 15, 99366, 102269, 17, 11, 9276, 1131, 823, 329, 1371, 99590, 68, 122250, 99698, 20, 18, 2577, 88714, 102636, 68, 21, 66, 18, 64, 101840, 21, 18, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919] ... [99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99695, 17, 110610, 127020, 23, 99695, 101804, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 21, 101130, 100562, 23, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 15, 13, 18, 99869, 15, 99366, 99695, 17, 11, 9276, 1131, 23, 99618, 21, 542, 102114, 24, 1999, 122463, 18, 100933, 17, 2940, 329, 18, 99869, 65, 99869, 24, 67, 99366, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +531,969333,"TERMINAL",0,0,"[2026-01-05 16:00:56 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 616, token usage: 0.04, #running-req: 1, #queue-req: 2, \r\n",,terminal_output +532,969454,"src/preview/inlineProvider.ts",1871,0,"",typescript,selection_command +533,969635,"TERMINAL",0,0,"[2026-01-05 16:00:56 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.06, #running-req: 1, #queue-req: 2, \r\n",,terminal_output +534,969636,"src/preview/inlineProvider.ts",1956,0,"",typescript,selection_command +535,970142,"src/preview/inlineProvider.ts",2000,0,"",typescript,selection_command +536,970200,"TERMINAL",0,0,"[2026-01-05 16:00:56] Receive: obj=GenerateReqInput(validation_time=1.625390723347664e-05, received_time=1767625256.4842694, received_time_perf=2480141.157194542, rid='cb35a26e9ff04922aa63f1d08693553e', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 101723, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99951, 11, 4193, 4379, 25, 220, 15, 13, 104160, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 112596, 13, 99869, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 20, 100557, 101663, 21, 101840, 18, 99618, 22, 107609, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100002, 17, 23, 100235, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 16, 13, 22, 99446, 22, 99618, 101917, 19, 11, 9276, 1131, 66, 16, 64, 23, 65, 15, 55469, 124047, 101655, 22, 67, 23, 98886, 66, 22, 66, 100235, 17, 101411, 66, 99619, 19, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11] ... [102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 103498, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100590, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 17, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102284, 22, 101917, 24, 101723, 16, 99887, 99064, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 101663, 16, 100933, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 15, 13, 101411, 19, 19, 99367, 121498, 11, 9276, 1131, 20, 12502, 291, 24, 99446, 101140, 21, 69, 102340, 64, 99869, 542, 291, 17, 69, 20, 101478, 18, 99869, 111659, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:56 TP0] Prefill batch, #new-seq: 2, #new-token: 609, #cached-token: 16999, token usage: 0.08, #running-req: 1, #queue-req: 2, \r\n[2026-01-05 16:00:56 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 592, token usage: 0.08, #running-req: 3, #queue-req: 1, \r\n[2026-01-05 16:00:56] Receive: obj=GenerateReqInput(validation_time=2.836296334862709e-05, received_time=1767625256.7851188, received_time_perf=2480141.458043971, rid='e2f101bef48e4e8f9f20bf3597ce9014', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 101723, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99951, 11, 4193, 4379, 25, 220, 15, 13, 104160, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 112596, 13, 99869, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 20, 100557, 101663, 21, 101840, 18, 99618, 22, 107609, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100002, 17, 23, 100235, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 16, 13, 22, 99446, 22, 99618, 101917, 19, 11, 9276, 1131, 66, 16, 64, 23, 65, 15, 55469, 124047, 101655, 22, 67, 23, 98886, 66, 22, 66, 100235, 17, 101411, 66, 99619, 19, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11] ... [220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 103498, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100590, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 17, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102284, 22, 101917, 24, 101723, 16, 99887, 99064, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 101663, 16, 100933, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 15, 13, 101411, 19, 19, 99367, 121498, 11, 9276, 1131, 20, 12502, 291, 24, 99446, 101140, 21, 69, 102340, 64, 99869, 542, 291, 17, 69, 20, 101478, 18, 99869, 111659, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100539, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +537,970435,"TERMINAL",0,0,"[2026-01-05 16:00:57 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.09, #running-req: 3, #queue-req: 2, \r\n",,terminal_output +538,970435,"src/preview/inlineProvider.ts",2023,0,"",typescript,selection_command +539,970436,"src/preview/inlineProvider.ts",2032,0,"",typescript,selection_command +540,970436,"src/preview/inlineProvider.ts",2042,0,"",typescript,selection_command +541,970443,"src/preview/inlineProvider.ts",2096,0,"",typescript,selection_command +542,970443,"TERMINAL",0,0,"[2026-01-05 16:00:57] Receive: obj=GenerateReqInput(validation_time=1.8977094441652298e-05, received_time=1767625257.1717012, received_time_perf=2480141.844626481, rid='0a5bb7d7ee7a426996bd6df51d3f65de', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 99446, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99243, 98729, 22, 15, 99419, 100067, 23, 100614, 21, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100919, 17, 102807, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 17, 13, 100002, 20, 100928, 23, 117786, 11, 9276, 1131, 102487, 22, 64, 18, 70783, 67, 101294, 17, 19, 65, 23, 1999, 23, 64, 21, 67, 103498, 65, 18, 6902, 101723, 638, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24] ... [11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100539, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 21, 99446, 18, 100067, 22, 99619, 101135, 22, 101478, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 21, 13, 100933, 19, 99916, 24, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 16, 13, 123006, 98729, 19, 20, 101961, 11, 9276, 1131, 7221, 100235, 64, 99916, 68, 24, 542, 100590, 24, 99241, 5305, 103302, 69, 16, 67, 100562, 21, 24, 100235, 102721, 68, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +543,970502,"src/preview/inlineProvider.ts",2126,0,"",typescript,selection_command +544,970503,"src/preview/inlineProvider.ts",2177,0,"",typescript,selection_command +545,970542,"src/preview/inlineProvider.ts",2187,0,"",typescript,selection_command +546,970594,"src/preview/inlineProvider.ts",2197,0,"",typescript,selection_command +547,970612,"src/preview/inlineProvider.ts",2216,0,"",typescript,selection_command +548,970717,"src/preview/inlineProvider.ts",2218,0,"",typescript,selection_command +549,970718,"TERMINAL",0,0,"[2026-01-05 16:00:57 TP0] Prefill batch, #new-seq: 1, #new-token: 833, #cached-token: 0, token usage: 0.11, #running-req: 3, #queue-req: 3, \r\n",,terminal_output +550,970718,"src/preview/inlineProvider.ts",2225,0,"",typescript,selection_command +551,970718,"src/preview/inlineProvider.ts",2235,0,"",typescript,selection_command +552,970738,"src/preview/inlineProvider.ts",2316,0,"",typescript,selection_command +553,970764,"TERMINAL",0,0,"[2026-01-05 16:00:57 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.12, #running-req: 4, #queue-req: 2, \r\n",,terminal_output +554,971019,"TERMINAL",0,0,"[2026-01-05 16:00:57] Receive: obj=GenerateReqInput(validation_time=1.762108877301216e-05, received_time=1767625257.7477417, received_time_perf=2480142.420666854, rid='7f58e1168c6c46d7894e2925b915278c', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101478, 16, 24, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100539, 23, 102626, 18, 99698, 16, 115760, 100614, 18, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99241, 16, 13, 101294, 20, 100590, 23, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 21, 13, 100539, 22, 103205, 19, 122559, 11, 9276, 1131, 37012, 102269, 65, 15, 66, 102269, 6902, 19, 99241, 101663, 66, 17, 8315, 99317, 4645, 18, 66, 20, 13225, 823, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220] ... [16, 11, 9276, 1131, 15, 64, 20, 6066, 22, 67, 22, 2127, 22, 64, 19, 99916, 100809, 21, 8937, 21, 2940, 102624, 67, 18, 69, 101411, 450, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103595, 11, 104340, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 262, 220, 100899, 2760, 456, 262, 220, 102269, 1572, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 262, 220, 104340, 2760, 374, 2512, 51621, 14533, 27583, 2460, 25, 1372, 1648, 2710, 341, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 100702, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98965, 11, 671, 27084, 12, 2958, 25, 220, 18, 11, 671, 4584, 12, 2958, 25, 220, 18, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 98886, 11, 671, 27084, 12, 2958, 25, 220, 19, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:00:57 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.13, #running-req: 4, #queue-req: 3, \r\n",,terminal_output +555,971056,"TERMINAL",0,0,"[2026-01-05 16:00:57] Receive: obj=GenerateReqInput(validation_time=1.865113154053688e-05, received_time=1767625257.7856193, received_time_perf=2480142.458544488, rid='12f06717356b4bdda4a7a08b80024d6b', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101478, 16, 24, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100539, 23, 102626, 18, 99698, 16, 115760, 100614, 18, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99241, 16, 13, 101294, 20, 100590, 23, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 21, 13, 100539, 22, 103205, 19, 122559, 11, 9276, 1131, 37012, 102269, 65, 15, 66, 102269, 6902, 19, 99241, 101663, 66, 17, 8315, 99317, 4645, 18, 66, 20, 13225, 823, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220] ... [16, 11, 9276, 1131, 15, 64, 20, 6066, 22, 67, 22, 2127, 22, 64, 19, 99916, 100809, 21, 8937, 21, 2940, 102624, 67, 18, 69, 101411, 450, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103595, 11, 104340, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 262, 220, 100899, 2760, 456, 262, 220, 102269, 1572, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 262, 220, 104340, 2760, 374, 2512, 51621, 14533, 27583, 2460, 25, 1372, 1648, 2710, 341, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 100702, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98965, 11, 671, 27084, 12, 2958, 25, 220, 18, 11, 671, 4584, 12, 2958, 25, 220, 18, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 98886, 11, 671, 27084, 12, 2958, 25, 220, 19, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +556,971403,"TERMINAL",0,0,"[2026-01-05 16:00:58 TP0] Prefill batch, #new-seq: 2, #new-token: 1784, #cached-token: 16979, token usage: 0.15, #running-req: 4, #queue-req: 3, \r\n",,terminal_output +557,971553,"TERMINAL",0,0,"[2026-01-05 16:00:58 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 600, token usage: 0.15, #running-req: 6, #queue-req: 2, \r\n",,terminal_output +558,971896,"TERMINAL",0,0,"[2026-01-05 16:00:58 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.17, #running-req: 6, #queue-req: 2, \r\n",,terminal_output +559,972281,"TERMINAL",0,0,"[2026-01-05 16:00:58 TP0] Prefill batch, #new-seq: 1, #new-token: 983, #cached-token: 0, token usage: 0.19, #running-req: 6, #queue-req: 2, \r\n",,terminal_output +560,972282,"TERMINAL",0,0,"[2026-01-05 16:00:58 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.19, #running-req: 7, #queue-req: 1, \r\n",,terminal_output +561,972509,"TERMINAL",0,0,"[2026-01-05 16:00:59 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.21, #running-req: 7, #queue-req: 1, \r\n",,terminal_output +562,972831,"TERMINAL",0,0,"[2026-01-05 16:00:59 TP0] Prefill batch, #new-seq: 2, #new-token: 1107, #cached-token: 16979, token usage: 0.23, #running-req: 7, #queue-req: 0, \r\n",,terminal_output +563,973397,"TERMINAL",0,0,"[2026-01-05 16:01:00] Finish: obj=GenerateReqInput(validation_time=1.625390723347664e-05, received_time=1767625256.4842694, received_time_perf=2480141.157194542, rid='cb35a26e9ff04922aa63f1d08693553e', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""n: 874, #cached-token: 0, token usage: 0.04, #running-req: 0, #queue-req: 2, \n[2026-01-05 16:00:56] Receive: obj=GenerateReqInput(validation_time=1.8575694411993027e-05, received_time=1767625255.981489, received_time_perf=2480140.654414143, rid='5eaed925376f41a28ffed2f566328111', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 101723, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99951, 11, 4193, 4379, 25, 220, 15, 13, 104160, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 112596, 13, 99869, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 20, 100557, 101663, 21, 101840, 18, 99618, 22, 107609, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100002, 17, 23, 100235, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 16, 13, 22, 99446, 22, 99618, 101917, 19, 11, 9276, 1131, 66, 16, 64, 23, 65, 15, 55469, 124047, 101655, 22, 67, 23, 98886, 66, 22, 66, 100235, 17, 101411, 66, 99619, 19, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11] ... [102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 103498, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100590, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 17, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102284, 22, 101917, 24, 101723, 16, 99887, 99064, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 101663, 16, 100933, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 15, 13, 101411, 19, 19, 99367, 121498, 11, 9276, 1131, 20, 12502, 291, 24, 99446, 101140, 21, 69, 102340, 64, 99869, 542, 291, 17, 69, 20, 101478, 18, 99869, 111659, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '70,90p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100096, 11, 100067, 79, 1248, 73022, 151336], 'meta_info': {'id': 'cb35a26e9ff04922aa63f1d08693553e', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 17833, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00040880427695810795, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.07440541684627533, 4616, 'cat'), (-0.0027617192827165127, 481, ' -'), (-8.344646857949556e-07, 77, 'n'), (-1.0847986231965479e-05, 608, ' /'), (-8.344646857949556e-07, 5117, 'home'), (-3.576278118089249e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-7.986990567587782e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.0003721021639648825, 13428, '/src'), (-0.0001896439935080707, 14, '/'), (-6.663577369181439e-05, 27082, 'preview'), (-0.00044252615771256387, 14, '/'), (-0.0016232660273090005, 5057, 'inline'), (-0.0010696887038648129, 5179, 'Provider'), (-1.1920928244535389e-07, 21239, '.ts'), (-1.3112935448589269e-05, 760, ' |'), (-5.602820692729438e-06, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-1.1920928244535389e-07, 364, "" '""), (-1.3265107870101929, 100096, '70'), (-1.1920928244535389e-07, 11, ','), (-0.007651781663298607, 100067, '90'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-9.274052717955783e-05, 73022, '```'), (-2.622600959512056e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 3.6286673545837402, 'response_sent_to_client_ts': 1767625260.1130424}}\r\n[2026-01-05 16:01:00] Finish: obj=GenerateReqInput(validation_time=2.836296334862709e-05, received_time=1767625256.7851188, received_time_perf=2480141.458043971, rid='e2f101bef48e4e8f9f20bf3597ce9014', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""ed_time=1767625255.981489, received_time_perf=2480140.654414143, rid='5eaed925376f41a28ffed2f566328111', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79\n... [truncated]\n/nothink<|user|>\n\n[2026-01-05 16:00:56 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.06, #running-req: 1, #queue-req: 2,\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 49872, 7162, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 5839, 25, 220, 122876, 101723, 11, 3950, 10426, 25, 220, 15, 13, 100441, 11, 4193, 2422, 25, 220, 18, 13, 99951, 11, 4193, 4379, 25, 220, 15, 13, 104160, 11, 24335, 4771, 25, 3007, 11, 4081, 63308, 320, 5839, 2687, 1648, 220, 112596, 13, 99869, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 20, 100557, 101663, 21, 101840, 18, 99618, 22, 107609, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100002, 17, 23, 100235, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 16, 13, 22, 99446, 22, 99618, 101917, 19, 11, 9276, 1131, 66, 16, 64, 23, 65, 15, 55469, 124047, 101655, 22, 67, 23, 98886, 66, 22, 66, 100235, 17, 101411, 66, 99619, 19, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11] ... [220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 103498, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100590, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 17, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102284, 22, 101917, 24, 101723, 16, 99887, 99064, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 101663, 16, 100933, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 15, 13, 101411, 19, 19, 99367, 121498, 11, 9276, 1131, 20, 12502, 291, 24, 99446, 101140, 21, 69, 102340, 64, 99869, 542, 291, 17, 69, 20, 101478, 18, 99869, 111659, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100539, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '69,89p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103093, 11, 103502, 79, 1248, 73022, 151336], 'meta_info': {'id': 'e2f101bef48e4e8f9f20bf3597ce9014', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 17910, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0006513857515528798, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.12956152856349945, 4616, 'cat'), (-0.003948274999856949, 481, ' -'), (-1.0728830375228426e-06, 77, 'n'), (-1.1324817933200393e-05, 608, ' /'), (-1.4305104514278355e-06, 5117, 'home'), (-2.3841855067985307e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-7.748573807475623e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-2.3841855067985307e-07, 79888, '-extension'), (-0.0003121604095213115, 13428, '/src'), (-0.00017426878912374377, 14, '/'), (-5.781483559985645e-05, 27082, 'preview'), (-0.00031013446277938783, 14, '/'), (-0.0015705404803156853, 5057, 'inline'), (-0.0009575072908774018, 5179, 'Provider'), (-2.3841855067985307e-07, 21239, '.ts'), (-1.3112935448589269e-05, 760, ' |'), (-5.722029527532868e-06, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-1.1920928244535389e-07, 364, "" '""), (-1.4591822624206543, 103093, '69'), (-1.1920928244535389e-07, 11, ','), (-0.02043585292994976, 103502, '89'), (-1.1920928244535389e-07, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-4.541770613286644e-05, 73022, '```'), (-1.5497195136049413e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 16979, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 3.355325698852539, 'response_sent_to_client_ts': 1767625260.1410277}}\r\n[2026-01-05 16:01:00] Finish: obj=GenerateReqInput(validation_time=1.8977094441652298e-05, received_time=1767625257.1717012, received_time_perf=2480141.844626481, rid='0a5bb7d7ee7a426996bd6df51d3f65de', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.625390723347664e-05, received_time=1767625256.4842694, received_time_perf=2480141.157194542, rid='cb35a26e9ff04922aa63f1d08693553e', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 99446, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99419, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99243, 98729, 22, 15, 99419, 100067, 23, 100614, 21, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99146, 22, 13, 100919, 17, 102807, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 17, 13, 100002, 20, 100928, 23, 117786, 11, 9276, 1131, 102487, 22, 64, 18, 70783, 67, 101294, 17, 19, 65, 23, 1999, 23, 64, 21, 67, 103498, 65, 18, 6902, 101723, 638, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24] ... [11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100539, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 21, 99446, 18, 100067, 22, 99619, 101135, 22, 101478, 19,\n[... 40676 bytes truncated to respect terminal scrollback settings ...]\n",,terminal_output +564,974922,"src/preview/inlineProvider.ts",2550,0,"",typescript,selection_command +565,975072,"TERMINAL",0,0,"[2026-01-05 16:01:01] Receive: obj=GenerateReqInput(validation_time=2.0470935851335526e-05, received_time=1767625261.7597175, received_time_perf=2480146.432642564, rid='4d01699eb29d40a8aa4e21992a277cfd', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 103388, 22, 99951, 101252, 22, 23, 101840, 101252, 16, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99241, 16, 13, 21, 101804, 20, 100702, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 21, 13, 100632, 23, 100461, 24, 100590, 24, 11, 9276, 1131, 100104, 17, 3065, 104340, 66, 101917, 16, 101135, 21, 20, 99243, 103878, 68, 20, 100067, 99366, 66, 24, 4645, 101474, 20, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 262, 220, 100899, 2760, 456, 262, 220, 102269, 1572, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 262, 220, 104340, 2760, 374, 2512, 51621, 14533, 27583, 2460, 25, 1372, 1648, 2710, 341, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 100702, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98965, 11, 671, 27084, 12, 2958, 25, 220, 18, 11, 671, 4584, 12, 2958, 25, 220, 18, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 98886, 11, 671, 27084, 12, 2958, 25, 220, 19, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102269, 17, 108479, 23, 102114, 122559, 122250, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 22, 13, 22, 101655, 22, 102340, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 17, 13, 125603, 101478, 21, 102284, 19, 11, 9276, 1131, 22, 69, 101729, 68, 114495, 23, 66, 21, 66, 101562, 67, 100928, 24, 19, 68, 100104, 17, 20, 65, 24, 99082, 99951, 23, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:01:01 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 597, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +566,975201,"TERMINAL",0,0,"[2026-01-05 16:01:01] Receive: obj=GenerateReqInput(validation_time=1.3776123523712158e-05, received_time=1767625261.952238, received_time_perf=2480146.625163275, rid='f163bbaff0d241a9b63200bd8bd29d17', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 98729, 102573, 19, 110228, 22, 100096, 15, 98729, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 17, 13, 19, 100096, 108157, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 121498, 100539, 20, 20, 99082, 11, 9276, 1131, 20, 69, 22, 67, 100539, 65, 100702, 24, 100928, 19, 69, 19, 64, 24, 7221, 21, 64, 15, 99951, 23, 99082, 100286, 69, 19, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11] ... [220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 102114, 11, 103205, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 262, 220, 104340, 2760, 374, 2512, 51621, 14533, 27583, 2460, 25, 1372, 1648, 2710, 341, 262, 220, 104160, 7472, 421, 1505, 574, 12389, 8, 341, 262, 220, 104127, 13056, 470, 895, 280, 262, 220, 104029, 7472, 456, 262, 220, 102284, 88166, 262, 220, 102807, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 103878, 13056, 470, 4149, 14562, 1394, 12389, 6187, 58, 15, 60, 481, 8127, 2460, 8, 2651, 220, 16, 280, 262, 220, 101252, 7472, 456, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100286, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100590, 22, 100614, 18, 101729, 20, 117055, 101130, 17, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99916, 16, 13, 100899, 24, 22, 99419, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 21, 13, 19, 101175, 21, 101961, 101917, 19, 11, 9276, 1131, 19, 67, 15, 99317, 100809, 3065, 100104, 67, 99698, 64, 23, 5305, 19, 68, 17, 98729, 103825, 64, 99951, 22, 66, 6902, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +567,975315,"TERMINAL",0,0,"[2026-01-05 16:01:02 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +568,975648,"TERMINAL",0,0,"[2026-01-05 16:01:02 TP0] Prefill batch, #new-seq: 1, #new-token: 477, #cached-token: 0, token usage: 0.04, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +569,975729,"TERMINAL",0,0,"[2026-01-05 16:01:02 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.04, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +570,976010,"TERMINAL",0,0,"[2026-01-05 16:01:02 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.06, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +571,976371,"TERMINAL",0,0,"[2026-01-05 16:01:03 TP0] Prefill batch, #new-seq: 1, #new-token: 741, #cached-token: 0, token usage: 0.08, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +572,976746,"TERMINAL",0,0,"[2026-01-05 16:01:03 TP0] Decode batch, #running-req: 2, #token: 34656, token usage: 0.08, accept len: 3.25, accept rate: 0.81, cuda graph: True, gen throughput (token/s): 41.81, #queue-req: 0, \r\n[2026-01-05 16:01:03] Finish: obj=GenerateReqInput(validation_time=2.0470935851335526e-05, received_time=1767625261.7597175, received_time_perf=2480146.432642564, rid='4d01699eb29d40a8aa4e21992a277cfd', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.762108877301216e-05, received_time=1767625257.7477417, received_time_perf=2480142.420666854, rid='7f58e1168c6c46d7894e2925b915278c', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99146, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 103388, 22, 99951, 101252, 22, 23, 101840, 101252, 16, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99241, 16, 13, 21, 101804, 20, 100702, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 121570, 21, 13, 100632, 23, 100461, 24, 100590, 24, 11, 9276, 1131, 100104, 17, 3065, 104340, 66, 101917, 16, 101135, 21, 20, 99243, 103878, 68, 20, 100067, 99366, 66, 24, 4645, 101474, 20, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 262, 220, 100899, 2760, 456, 262, 220, 102269, 1572, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 262, 220, 104340, 2760, 374, 2512, 51621, 14533, 27583, 2460, 25, 1372, 1648, 2710, 341, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 100702, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98965, 11, 671, 27084, 12, 2958, 25, 220, 18, 11, 671, 4584, 12, 2958, 25, 220, 18, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 98886, 11, 671, 27084, 12, 2958, 25, 220, 19, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102269, 17, 108479, 23, 102114, 122559, 122250, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 22, 13, 22, 101655, 22, 102340, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 17, 13, 125603, 101478, 21, 102284, 19, 11, 9276, 1131, 22, 69, 101729, 68, 114495, 23, 66, 21, 66, 101562, 67, 100928, 24, 19, 68, 100104, 17, 20, 65, 24, 99082, 99951, 23, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '70,90p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100096, 11, 100067, 79, 1248, 73022, 151336], 'meta_info': {'id': '4d01699eb29d40a8aa4e21992a277cfd', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 17458, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0005704921204596758, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.13662512600421906, 4616, 'cat'), (-0.0027625514194369316, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-7.271740287251305e-06, 608, ' /'), (-2.3841855067985307e-07, 5117, 'home'), (-4.768370445162873e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.7523612768854946e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-5.960462772236497e-07, 79888, '-extension'), (-0.0003307510633021593, 13428, '/src'), (-0.0008032671175897121, 14, '/'), (-0.0006699701189063489, 27082, 'preview'), (-0.0025311834178864956, 14, '/'), (-0.0021550068631768227, 5057, 'inline'), (-2.7894584491150454e-05, 5179, 'Provider'), (-1.1920928244535389e-07, 21239, '.ts'), (-7.629365427419543e-06, 760, ' |'), (-4.291525328881107e-06, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.8869099617004395, 100096, '70'), (-2.3841855067985307e-07, 11, ','), (-0.0008541273418813944, 100067, '90'), (0.0, 79, 'p'), (0.0, 1248, ""'\n""), (-3.1709168979432434e-05, 73022, '```'), (-6.318072337307967e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 597, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.747077226638794, 'response_sent_to_client_ts': 1767625263.5068653}}\r\n[2026-01-05 16:01:03] Finish: obj=GenerateReqInput(validation_time=1.3776123523712158e-05, received_time=1767625261.952238, received_time_perf=2480146.625163275, rid='f163bbaff0d241a9b63200bd8bd29d17', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=2.0470935851335526e-05, received_time=1767625261.7597175, received_time_perf=2480146.432642564, rid='4d01699eb29d40a8aa4e21992a277cfd', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 98729, 102573, 19, 110228, 22, 100096, 15, 98729, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 17, 13, 19, 100096, 108157, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 121498, 100539, 20, 20, 99082, 11, 9276, 1131, 20, 69, 22, 67, 100539, 65, 100702, 24, 100928, 19, 69, 19, 64, 24, 7221, 21, 64, 15, 99951, 23, 99082, 100286, 69, 19, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11] ... [220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 102114, 11, 103205, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 262, 220, 104340, 2760, 374, 2512, 51621, 14533, 27583, 2460, 25, 1372, 1648, 2710, 341, 262, 220, 104160, 7472, 421, 1505, 574, 12389, 8, 341, 262, 220, 104127, 13056, 470, 895, 280, 262, 220, 104029, 7472, 456, 262, 220, 102284, 88166, 262, 220, 102807, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 13771, 863, 341, 262, 220, 103878, 13056, 470, 4149, 14562, 1394, 12389, 6187, 58, 15, 60, 481, 8127, 2460, 8, 2651, 220, 16, 280, 262, 220, 101252, 7472, 456, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100286, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100590, 22, 100614, 18, 101729, 20, 117055, 101130, 17, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99916, 16, 13, 100899, 24, 22, 99419, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 21, 13, 19, 101175, 21, 101961, 101917, 19, 11, 9276, 1131, 19, 67, 15, 99317, 100809, 3065, 100104, 67, 99698, 64, 23, 5305, 19, 68, 17, 98729, 103825, 64, 99951, 22, 66, 6902, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,20p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98360, 79, 1248, 73022, 151336], 'meta_info': {'id': 'f163bbaff0d241a9b63200bd8bd29d17', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 17721, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00014494798961095512, 73022, '```'), (0.0, 45937, 'bash'), (-1.1920928244535389e-07, 198, '\n'), (-0.38872841000556946, 4616, 'cat'), (-0.03949848562479019, 481, ' -'), (-1.5497195136049413e-06, 77, 'n'), (-3.8742269680369645e-05, 608, ' /'), (-8.344646857949556e-07, 5117, 'home'), (-3.576278118089249e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.9192511899746023e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920922133867862e-06, 79888, '-extension'), (-0.004192610736936331, 13428, '/src'), (-0.0018543682526797056, 14, '/'), (-0.00042798896902240813, 27082, 'preview'), (-0.033181145787239075, 14, '/'), (-0.13647714257240295, 5057, 'inline'), (-0.003271111287176609, 5179, 'Provider'), (-1.8000440832111053e-05, 21239, '.ts'), (-0.013740594498813152, 760, ' |'), (-0.0007906410028226674, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (-4.768370445162873e-07, 77, 'n'), (-2.50339189733495e-06, 364, "" '""), (-0.2190331667661667, 16, '1'), (-9.464769391342998e-05, 11, ','), (-0.7957606315612793, 98360, '20'), (0.0, 79, 'p'), (-2.3841855067985307e-07, 1248, ""'\n""), (-0.00013255194062367082, 73022, '```'), (-7.629365427419543e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.5711877346038818, 'response_sent_to_client_ts': 1767625263.5234883}}\r\n[2026-01-05 16:01:03] INFO: 10.86.2.252:47296 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +573,979919,"src/preview/inlineProvider.ts",365,0,"",typescript,selection_command +574,979994,"TERMINAL",0,0,"[2026-01-05 16:01:06] Receive: obj=GenerateReqInput(validation_time=2.1317042410373688e-05, received_time=1767625266.7275326, received_time_perf=2480151.400457786, rid='44efeca9d740494db180adcb4ded5c78', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101411, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100919, 20, 100919, 23, 23, 101804, 102807, 24, 122300, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 17, 13, 23, 99317, 110610, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 100933, 24, 100441, 19, 20, 99619, 11, 9276, 1131, 370, 17, 329, 16, 65, 17, 924, 3632, 19, 98729, 100104, 18, 32497, 100590, 68, 19, 67, 102624, 8901, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220] ... [16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100286, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101140, 22, 21, 98886, 100235, 17, 101140, 16, 118611, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99916, 16, 13, 101804, 17, 99619, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 21, 13, 21, 99446, 114491, 99951, 20, 11, 9276, 1131, 69, 114491, 6066, 2649, 15, 67, 99590, 16, 64, 24, 65, 21, 101175, 98503, 8937, 23, 8937, 100104, 67, 99419, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:01:06 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 597, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +575,980261,"TERMINAL",0,0,"[2026-01-05 16:01:06] Receive: obj=GenerateReqInput(validation_time=1.8245074898004532e-05, received_time=1767625266.94816, received_time_perf=2480151.6210853, rid='859328be96ed4361944bb7f8a19838eb', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 102088, 23, 101175, 19, 100702, 99241, 18, 22, 99590, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 22, 13, 118836, 100002, 17, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 16, 13, 23, 99317, 24, 102114, 102650, 11, 9276, 1131, 101294, 17, 65, 24, 100104, 108642, 101723, 19, 22, 102088, 23, 2940, 100590, 16, 22, 67, 23, 103205, 101411, 23, 65, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98360, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 311, 3037, 438, 7381, 9750, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 442, 30174, 30530, 6119, 311, 312, 65012, 7381, 3459, 908, 198, 262, 220, 99243, 7472, 55008, 33739, 7769, 4062, 492, 8866, 12389, 29307, 50, 3799, 28455, 1157, 262, 220, 98729, 2760, 456, 262, 220, 98360, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100539, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 118901, 100096, 19, 99590, 16, 100441, 22, 100632, 23, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99916, 21, 13, 22, 99951, 20, 101175, 21, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 16, 13, 102259, 100461, 22, 100928, 21, 11, 9276, 1131, 101723, 823, 22733, 24, 67, 22, 99698, 101474, 19, 1999, 105818, 329, 7221, 19, 9784, 20, 66, 100928, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +576,980279,"TERMINAL",0,0,"[2026-01-05 16:01:07 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +577,980616,"TERMINAL",0,0,"[2026-01-05 16:01:07 TP0] Prefill batch, #new-seq: 1, #new-token: 735, #cached-token: 0, token usage: 0.04, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +578,980702,"TERMINAL",0,0,"[2026-01-05 16:01:07 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.04, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +579,980999,"TERMINAL",0,0,"[2026-01-05 16:01:07 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.06, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +580,981344,"TERMINAL",0,0,"[2026-01-05 16:01:08 TP0] Prefill batch, #new-seq: 1, #new-token: 272, #cached-token: 0, token usage: 0.08, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +581,981622,"TERMINAL",0,0,"[2026-01-05 16:01:08] Finish: obj=GenerateReqInput(validation_time=2.1317042410373688e-05, received_time=1767625266.7275326, received_time_perf=2480151.400457786, rid='44efeca9d740494db180adcb4ded5c78', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.3776123523712158e-05, received_time=1767625261.952238, received_time_perf=2480146.625163275, rid='f163bbaff0d241a9b63200bd8bd29d17', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101411, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100919, 20, 100919, 23, 23, 101804, 102807, 24, 122300, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 17, 13, 23, 99317, 110610, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98965, 22, 13, 100933, 24, 100441, 19, 20, 99619, 11, 9276, 1131, 370, 17, 329, 16, 65, 17, 924, 3632, 19, 98729, 100104, 18, 32497, 100590, 68, 19, 67, 102624, 8901, 18, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220] ... [16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100286, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101140, 22, 21, 98886, 100235, 17, 101140, 16, 118611, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99916, 16, 13, 101804, 17, 99619, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 21, 13, 21, 99446, 114491, 99951, 20, 11, 9276, 1131, 69, 114491, 6066, 2649, 15, 67, 99590, 16, 64, 24, 65, 21, 101175, 98503, 8937, 23, 8937, 100104, 67, 99419, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,20p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98360, 79, 1248, 73022, 151336], 'meta_info': {'id': '44efeca9d740494db180adcb4ded5c78', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 17716, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00017045476124621928, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.3125125765800476, 4616, 'cat'), (-0.03313270956277847, 481, ' -'), (-7.152555099310121e-07, 77, 'n'), (-4.541770613286644e-05, 608, ' /'), (-1.7881377516459906e-06, 5117, 'home'), (-4.768370445162873e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-3.504691630951129e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.311301275563892e-06, 79888, '-extension'), (-0.003941981587558985, 13428, '/src'), (-0.0033566581550985575, 14, '/'), (-0.000867467257194221, 27082, 'preview'), (-0.028594333678483963, 14, '/'), (-0.08831595629453659, 5057, 'inline'), (-0.0021362120751291513, 5179, 'Provider'), (-0.00011383838864276186, 21239, '.ts'), (-0.011578385718166828, 760, ' |'), (-0.0004094000905752182, 10918, ' sed'), (0.0, 481, ' -'), (-1.9073468138230965e-06, 77, 'n'), (-8.344646857949556e-07, 364, "" '""), (-0.32637307047843933, 16, '1'), (-0.00011657988943625242, 11, ','), (-0.7011761665344238, 98360, '20'), (0.0, 79, 'p'), (-2.3841855067985307e-07, 1248, ""'\n""), (-0.0002706876548472792, 73022, '```'), (-9.059865078597795e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 597, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.6598446369171143, 'response_sent_to_client_ts': 1767625268.3876095}}\r\n[2026-01-05 16:01:08] Finish: obj=GenerateReqInput(validation_time=1.8245074898004532e-05, received_time=1767625266.94816, received_time_perf=2480151.6210853, rid='859328be96ed4361944bb7f8a19838eb', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=2.1317042410373688e-05, received_time=1767625266.7275326, received_time_perf=2480151.400457786, rid='44efeca9d740494db180adcb4ded5c78', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 102088, 23, 101175, 19, 100702, 99241, 18, 22, 99590, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 22, 13, 118836, 100002, 17, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 16, 13, 23, 99317, 24, 102114, 102650, 11, 9276, 1131, 101294, 17, 65, 24, 100104, 108642, 101723, 19, 22, 102088, 23, 2940, 100590, 16, 22, 67, 23, 103205, 101411, 23, 65, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98360, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 262, 220, 98886, 2760, 1554, 262, 220, 99366, 9356, 353, 2573, 279, 1482, 1917, 311, 3037, 438, 7381, 9750, 624, 262, 220, 99367, 9356, 735, 262, 220, 99082, 2760, 738, 2512, 15294, 25, 5586, 1648, 737, 341, 262, 220, 99317, 7472, 419, 12389, 284, 1917, 280, 262, 220, 99419, 7472, 442, 30174, 30530, 6119, 311, 312, 65012, 7381, 3459, 908, 198, 262, 220, 99243, 7472, 55008, 33739, 7769, 4062, 492, 8866, 12389, 29307, 50, 3799, 28455, 1157, 262, 220, 98729, 2760, 456, 262, 220, 98360, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100539, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 118901, 100096, 19, 99590, 16, 100441, 22, 100632, 23, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99916, 21, 13, 22, 99951, 20, 101175, 21, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 16, 13, 102259, 100461, 22, 100928, 21, 11, 9276, 1131, 101723, 823, 22733, 24, 67, 22, 99698, 101474, 19, 1999, 105818, 329, 7221, 19, 9784, 20, 66, 100928, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '20,40p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 98360, 11, 99698, 79, 1248, 73022, 151336], 'meta_info': {'id': '859328be96ed4361944bb7f8a19838eb', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 17252, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-9.393251093570143e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.14402125775814056, 4616, 'cat'), (-0.0194924995303154, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-2.9802276912960224e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-2.145764938177308e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.00048780461656861007, 13428, '/src'), (-0.00017009719158522785, 14, '/'), (-0.00027724236133508384, 27082, 'preview'), (-0.026717329397797585, 14, '/'), (-0.011861507780849934, 5057, 'inline'), (-5.507317473529838e-05, 5179, 'Provider'), (-5.960462772236497e-07, 21239, '.ts'), (-0.003941862843930721, 760, ' |'), (-5.94836674281396e-05, 10918, ' sed'), (0.0, 481, ' -'), (-3.576278118089249e-07, 77, 'n'), (-1.1920922133867862e-06, 364, "" '""), (-1.32662832736969, 98360, '20'), (-1.1920928244535389e-07, 11, ','), (-0.0008818790083751082, 99698, '40'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-3.802703940891661e-05, 73022, '```'), (-3.802703940891661e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.4393746852874756, 'response_sent_to_client_ts': 1767625268.3893452}}\r\n[2026-01-05 16:01:08] INFO: 10.86.2.252:48994 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +582,983964,"src/preview/inlineProvider.ts",843,0,"",typescript,selection_command +583,984047,"TERMINAL",0,0,"[2026-01-05 16:01:10] Receive: obj=GenerateReqInput(validation_time=3.364216536283493e-05, received_time=1767625270.7982323, received_time_perf=2480155.471157497, rid='55c877a8b0a94f88b90dc4a148497fa7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 100067, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99590, 18, 99243, 126612, 100933, 23, 126334, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 22, 13, 100899, 16, 120911, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 17, 13, 19, 99590, 99241, 24, 100372, 24, 11, 9276, 1131, 100590, 7628, 21, 638, 329, 99317, 64, 101474, 18, 16, 64, 102284, 22, 17, 67, 99698, 24, 100702, 69, 99200, 64, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720] ... [11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100539, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 99590, 99200, 22, 100933, 24, 103919, 100461, 18, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99916, 21, 13, 24, 100933, 99317, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 16, 13, 21, 99146, 100562, 20, 18, 11, 9276, 1131, 102284, 24, 18, 99869, 1371, 102487, 291, 19, 100632, 98729, 19, 19, 6066, 22, 69, 23, 64, 100759, 100919, 3065, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 220, 99446, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:01:10 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 847, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +584,984225,"TERMINAL",0,0,"[2026-01-05 16:01:11] Receive: obj=GenerateReqInput(validation_time=1.3979151844978333e-05, received_time=1767625270.993827, received_time_perf=2480155.666752416, rid='531263b451d14a80af5ea9dde4a9483f', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 22, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100235, 23, 22, 100809, 21, 100632, 100632, 18, 101663, 18, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 23, 13, 15, 100067, 20, 100919, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 17, 13, 102269, 18, 101562, 18, 114365, 11, 9276, 1131, 100632, 17, 99366, 66, 126382, 67, 22, 69, 102088, 67, 101729, 68, 20, 100557, 21, 102114, 99419, 924, 18, 66, 99082, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916] ... [99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 220, 99446, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 98360, 11, 99698, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 98360, 1572, 262, 220, 99146, 2760, 1554, 262, 220, 99241, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99619, 9356, 735, 262, 220, 99590, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99446, 7472, 419, 12389, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 2126, 279, 1482, 1917, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 633, 2512, 4555, 5586, 760, 845, 341, 262, 220, 101175, 7472, 470, 419, 12389, 280, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98668, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 18, 13, 100632, 19, 122250, 20, 100632, 99869, 18, 101474, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 15, 13, 22, 101663, 126173, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 20, 13, 101655, 16, 123006, 101474, 22, 11, 9276, 1131, 101130, 66, 23, 102114, 64, 23, 65, 15, 64, 103992, 69, 101252, 65, 100067, 7628, 19, 64, 121919, 101474, 22, 3632, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +585,984328,"TERMINAL",0,0,"[2026-01-05 16:01:11 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +586,984674,"TERMINAL",0,0,"[2026-01-05 16:01:11 TP0] Prefill batch, #new-seq: 1, #new-token: 20, #cached-token: 0, token usage: 0.04, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +587,984757,"TERMINAL",0,0,"[2026-01-05 16:01:11 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.04, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +588,985031,"TERMINAL",0,0,"[2026-01-05 16:01:11 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.06, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +589,985368,"TERMINAL",0,0,"[2026-01-05 16:01:12 TP0] Prefill batch, #new-seq: 1, #new-token: 141, #cached-token: 0, token usage: 0.07, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +590,985643,"TERMINAL",0,0,"[2026-01-05 16:01:12] Finish: obj=GenerateReqInput(validation_time=1.3979151844978333e-05, received_time=1767625270.993827, received_time_perf=2480155.666752416, rid='531263b451d14a80af5ea9dde4a9483f', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=3.364216536283493e-05, received_time=1767625270.7982323, received_time_perf=2480155.471157497, rid='55c877a8b0a94f88b90dc4a148497fa7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 22, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100235, 23, 22, 100809, 21, 100632, 100632, 18, 101663, 18, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 23, 13, 15, 100067, 20, 100919, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 17, 13, 102269, 18, 101562, 18, 114365, 11, 9276, 1131, 100632, 17, 99366, 66, 126382, 67, 22, 69, 102088, 67, 101729, 68, 20, 100557, 21, 102114, 99419, 924, 18, 66, 99082, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916] ... [99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 220, 99446, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 98360, 11, 99698, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 98360, 1572, 262, 220, 99146, 2760, 1554, 262, 220, 99241, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99619, 9356, 735, 262, 220, 99590, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99446, 7472, 419, 12389, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 2126, 279, 1482, 1917, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 633, 2512, 4555, 5586, 760, 845, 341, 262, 220, 101175, 7472, 470, 419, 12389, 280, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98668, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 18, 13, 100632, 19, 122250, 20, 100632, 99869, 18, 101474, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 15, 13, 22, 101663, 126173, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 20, 13, 101655, 16, 123006, 101474, 22, 11, 9276, 1131, 101130, 66, 23, 102114, 64, 23, 65, 15, 64, 103992, 69, 101252, 65, 100067, 7628, 19, 64, 121919, 101474, 22, 3632, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '40,60p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99698, 11, 99618, 79, 1248, 73022, 151336], 'meta_info': {'id': '531263b451d14a80af5ea9dde4a9483f', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 17121, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.000516757951118052, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.05545008182525635, 4616, 'cat'), (-0.006719138007611036, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-2.264974000354414e-06, 608, ' /'), (-2.3841855067985307e-07, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-2.0265558760002023e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0011398254428058863, 13428, '/src'), (-0.00011038171214750037, 14, '/'), (-3.325883881188929e-05, 27082, 'preview'), (-0.048190485686063766, 14, '/'), (-0.0023522109258919954, 5057, 'inline'), (-9.179073458653875e-06, 5179, 'Provider'), (-2.3841855067985307e-07, 21239, '.ts'), (-0.003941744100302458, 760, ' |'), (-2.253030106658116e-05, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (0.0, 364, "" '""), (-0.015634581446647644, 99698, '40'), (-1.1920928244535389e-07, 11, ','), (-0.0001774868869688362, 99618, '60'), (0.0, 79, 'p'), (0.0, 1248, ""'\n""), (-4.541770613286644e-05, 73022, '```'), (-4.410734163684538e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.4196479320526123, 'response_sent_to_client_ts': 1767625272.413555}}\r\n[2026-01-05 16:01:12] INFO: 10.86.2.252:49018 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n[2026-01-05 16:01:12] Finish: obj=GenerateReqInput(validation_time=3.364216536283493e-05, received_time=1767625270.7982323, received_time_perf=2480155.471157497, rid='55c877a8b0a94f88b90dc4a148497fa7', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.8245074898004532e-05, received_time=1767625266.94816, received_time_perf=2480151.6210853, rid='859328be96ed4361944bb7f8a19838eb', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 25\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 101294, 11, 102573, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 100067, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101140, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 19, 99590, 18, 99243, 126612, 100933, 23, 126334, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 22, 13, 100899, 16, 120911, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 17, 13, 19, 99590, 99241, 24, 100372, 24, 11, 9276, 1131, 100590, 7628, 21, 638, 329, 99317, 64, 101474, 18, 16, 64, 102284, 22, 17, 67, 99698, 24, 100702, 69, 99200, 64, 17, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720] ... [11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100539, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 99590, 99200, 22, 100933, 24, 103919, 100461, 18, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99916, 21, 13, 24, 100933, 99317, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 16, 13, 21, 99146, 100562, 20, 18, 11, 9276, 1131, 102284, 24, 18, 99869, 1371, 102487, 291, 19, 100632, 98729, 19, 19, 6066, 22, 69, 23, 64, 100759, 100919, 3065, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 220, 99446, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '20,40p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 98360, 11, 99698, 79, 1248, 73022, 151336], 'meta_info': {'id': '55c877a8b0a94f88b90dc4a148497fa7', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 17251, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.001593392575159669, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.414919912815094, 4616, 'cat'), (-0.016339901834726334, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-2.145764938177308e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.1920928244535389e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0004010588163509965, 13428, '/src'), (-9.965400386136025e-05, 14, '/'), (-1.8954096958623268e-05, 27082, 'preview'), (-0.024990277364850044, 14, '/'), (-0.0009806115413084626, 5057, 'inline'), (-9.059865078597795e-06, 5179, 'Provider'), (0.0, 21239, '.ts'), (-0.004709105938673019, 760, ' |'), (-0.0006043276516720653, 10918, ' sed'), (0.0, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-1.0728830375228426e-06, 364, "" '""), (-0.6897028684616089, 98360, '20'), (0.0, 11, ','), (-0.0008127961773425341, 99698, '40'), (0.0, 79, 'p'), (-2.3841855067985307e-07, 1248, ""'\n""), (-1.2993727978027891e-05, 73022, '```'), (-0.0005530973430722952, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 847, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.6319427490234375, 'response_sent_to_client_ts': 1767625272.4302359}}\r\n",,terminal_output +591,992042,"src/preview/inlineProvider.ts",1307,0,"",typescript,selection_command +592,992129,"TERMINAL",0,0,"[2026-01-05 16:01:18] Receive: obj=GenerateReqInput(validation_time=2.431776374578476e-05, received_time=1767625278.8454468, received_time_perf=2480163.518372539, rid='c557802b228c4f688d86e799a7b26506', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99619, 23, 121577, 100539, 16, 100702, 22, 101655, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 23, 13, 18, 100067, 100002, 21, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 18, 13, 100539, 17, 101663, 16, 118843, 11, 9276, 1131, 22, 99317, 101729, 6902, 16, 69, 8901, 99367, 66, 72376, 17, 67, 15, 8901, 329, 21, 65, 24, 64, 102626, 67, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220] ... [220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101294, 22, 24, 99082, 126382, 101474, 22, 23, 100702, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 15, 13, 100809, 18, 23, 99951, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 20, 13, 101478, 21, 100899, 17, 19, 99317, 11, 9276, 1131, 20, 100557, 99916, 18, 65, 100461, 16, 67, 99367, 64, 99695, 2577, 20, 12502, 24, 87358, 19, 64, 24, 100933, 18, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:01:18 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 597, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +593,992322,"TERMINAL",0,0,"[2026-01-05 16:01:19] Receive: obj=GenerateReqInput(validation_time=1.567276194691658e-05, received_time=1767625279.071269, received_time_perf=2480163.744194509, rid='2efdaea57db74d0a81dcd2e3b7e93310', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 102487, 20, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 102487, 21, 98886, 23, 101130, 102626, 17, 15, 100461, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 21, 13, 100928, 23, 20, 99618, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 16, 13, 101562, 16, 100933, 21, 100104, 18, 11, 9276, 1131, 119621, 102856, 542, 22, 66, 21, 101723, 19, 68, 104160, 65, 23, 1859, 100899, 20, 9787, 101804, 17, 104550, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618] ... [11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99698, 11, 99618, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99243, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 19, 100557, 102114, 21, 101140, 19, 20, 100928, 101655, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 23, 13, 23, 100461, 19, 101562, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99317, 18, 13, 20, 99243, 101140, 17, 20, 101294, 11, 9276, 1131, 66, 101130, 22, 99695, 17, 65, 123886, 66, 19, 69, 21, 101252, 67, 102807, 68, 22, 100809, 64, 22, 65, 99916, 20, 100539, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +594,992383,"TERMINAL",0,0,"[2026-01-05 16:01:19 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +595,992779,"TERMINAL",0,0,"[2026-01-05 16:01:19 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 596, token usage: 0.04, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +596,993054,"TERMINAL",0,0,"[2026-01-05 16:01:19 TP0] Prefill batch, #new-seq: 1, #new-token: 8112, #cached-token: 0, token usage: 0.06, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +597,993436,"TERMINAL",0,0,"[2026-01-05 16:01:20] Finish: obj=GenerateReqInput(validation_time=1.567276194691658e-05, received_time=1767625279.071269, received_time_perf=2480163.744194509, rid='2efdaea57db74d0a81dcd2e3b7e93310', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=2.431776374578476e-05, received_time=1767625278.8454468, received_time_perf=2480163.518372539, rid='c557802b228c4f688d86e799a7b26506', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 21, 102487, 20, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101562, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 102487, 21, 98886, 23, 101130, 102626, 17, 15, 100461, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 21, 13, 100928, 23, 20, 99618, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 16, 13, 101562, 16, 100933, 21, 100104, 18, 11, 9276, 1131, 119621, 102856, 542, 22, 66, 21, 101723, 19, 68, 104160, 65, 23, 1859, 100899, 20, 9787, 101804, 17, 104550, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618] ... [11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99698, 11, 99618, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99243, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 19, 100557, 102114, 21, 101140, 19, 20, 100928, 101655, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 23, 13, 23, 100461, 19, 101562, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99317, 18, 13, 20, 99243, 101140, 17, 20, 101294, 11, 9276, 1131, 66, 101130, 22, 99695, 17, 65, 123886, 66, 19, 69, 21, 101252, 67, 102807, 68, 22, 100809, 64, 22, 65, 99916, 20, 100539, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': '\n```bash\ngit status\n```', 'output_ids': [198, 73022, 45937, 198, 12875, 2639, 198, 73022, 151336], 'meta_info': {'id': '2efdaea57db74d0a81dcd2e3b7e93310', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16725, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0007631250191479921, 73022, '```'), (0.0, 45937, 'bash'), (-1.1920928244535389e-07, 198, '\n'), (-2.220095634460449, 12875, 'git'), (-0.3629239797592163, 2639, ' status'), (-0.0003695997002068907, 198, '\n'), (-0.0004632591735571623, 73022, '```'), (-1.0728830375228426e-06, 151336, '<|user|>')], 'completion_tokens': 9, 'cached_tokens': 596, 'spec_accept_rate': 0.6666666666666666, 'spec_accept_length': 3.0, 'spec_verify_ct': 3, 'spec_accept_token_num': 6, 'spec_draft_token_num': 9, 'e2e_latency': 1.143883466720581, 'response_sent_to_client_ts': 1767625280.2152147}}\r\n[2026-01-05 16:01:20] INFO: 10.86.2.252:60882 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +598,993582,"TERMINAL",0,0,"[2026-01-05 16:01:20] Finish: obj=GenerateReqInput(validation_time=2.431776374578476e-05, received_time=1767625278.8454468, received_time_perf=2480163.518372539, rid='c557802b228c4f688d86e799a7b26506', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.3979151844978333e-05, received_time=1767625270.993827, received_time_perf=2480155.666752416, rid='531263b451d14a80af5ea9dde4a9483f', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 100919, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99619, 23, 121577, 100539, 16, 100702, 22, 101655, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99619, 23, 13, 18, 100067, 100002, 21, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98886, 18, 13, 100539, 17, 101663, 16, 118843, 11, 9276, 1131, 22, 99317, 101729, 6902, 16, 69, 8901, 99367, 66, 72376, 17, 67, 15, 8901, 329, 21, 65, 24, 64, 102626, 67, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220] ... [220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101294, 22, 24, 99082, 126382, 101474, 22, 23, 100702, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 15, 13, 100809, 18, 23, 99951, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 20, 13, 101478, 21, 100899, 17, 19, 99317, 11, 9276, 1131, 20, 100557, 99916, 18, 65, 100461, 16, 67, 99367, 64, 99695, 2577, 20, 12502, 24, 87358, 19, 64, 24, 100933, 18, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '40,60p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99698, 11, 99618, 79, 1248, 73022, 151336], 'meta_info': {'id': 'c557802b228c4f688d86e799a7b26506', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 17156, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0004552758182398975, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.09097492694854736, 4616, 'cat'), (-0.016331106424331665, 481, ' -'), (-9.536738616588991e-07, 77, 'n'), (-4.291525328881107e-06, 608, ' /'), (-7.152555099310121e-07, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-4.529942543740617e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0016411182004958391, 13428, '/src'), (-0.00014232576359063387, 14, '/'), (-0.001141730579547584, 27082, 'preview'), (-0.016544530168175697, 14, '/'), (-0.008131257258355618, 5057, 'inline'), (-1.9311717551317997e-05, 5179, 'Provider'), (-1.0728830375228426e-06, 21239, '.ts'), (-0.0009608419495634735, 760, ' |'), (-2.9205850296420977e-05, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (-3.576278118089249e-07, 77, 'n'), (0.0, 364, "" '""), (-0.006628076080232859, 99698, '40'), (0.0, 11, ','), (-6.711257447022945e-05, 99618, '60'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-2.658331868587993e-05, 73022, '```'), (-9.059865078597795e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 597, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.4850621223449707, 'response_sent_to_client_ts': 1767625280.3305638}}\r\n",,terminal_output +599,1006130,"src/preview/inlineProvider.ts",0,0,"",typescript,selection_command +600,1006484,"TERMINAL",0,0,"[2026-01-05 16:01:33] Receive: obj=GenerateReqInput(validation_time=2.109212800860405e-05, received_time=1767625293.1593273, received_time_perf=2480177.832252711, rid='32b52390a8184b9f82bc21f5636e709e', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101655, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 20, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 121910, 99698, 21, 15, 99082, 20, 99082, 18, 99951, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 22, 13, 109641, 21, 99446, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 16, 13, 22, 101663, 117239, 100933, 20, 11, 9276, 1131, 24, 11998, 101135, 16, 16, 66, 23, 68, 23, 101655, 99419, 13225, 101411, 21, 99695, 68, 15, 100104, 114959, 65, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24] ... [220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98729, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101917, 22, 99951, 21, 98729, 19, 103093, 16, 101411, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 24, 13, 100372, 16, 99916, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99317, 18, 13, 22, 101723, 98729, 19, 99200, 24, 11, 9276, 1131, 17, 823, 3235, 12502, 102486, 1999, 103498, 67, 15, 64, 104340, 67, 4385, 17, 68, 18, 65, 22, 68, 24, 100702, 98668, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:01:33 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +601,1006756,"TERMINAL",0,0,"[2026-01-05 16:01:33 TP0] Prefill batch, #new-seq: 1, #new-token: 8037, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +602,1007080,"TERMINAL",0,0,"[2026-01-05 16:01:33 TP0] Decode batch, #running-req: 1, #token: 16836, token usage: 0.04, accept len: 3.27, accept rate: 0.82, cuda graph: True, gen throughput (token/s): 7.21, #queue-req: 0, \r\n",,terminal_output +603,1007220,"TERMINAL",0,0,"[2026-01-05 16:01:33] Finish: obj=GenerateReqInput(validation_time=2.109212800860405e-05, received_time=1767625293.1593273, received_time_perf=2480177.832252711, rid='32b52390a8184b9f82bc21f5636e709e', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... "" 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, \n... [truncated]\n/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,11p'\n```<|user|>\n\n 1\timport * as vscode from 'vscode';\n 2\timport { Action, toVscodePosition } from './types';\n 3\t\n 4\t/**\n 5\t * Provides inline completion items (ghost text) for code edit actions.\n 6\t * This takes priority over Cursor's hints and works on empty lines.\n 7\t */\n 8\texport class CrowdPilotInlineProvider implements vscode.InlineCompletionItemProvider {\n 9\t private action: Action | null = null;\n 10\t private enabled: boolean = true;\n 11\t\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101655, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 20, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101655, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 121910, 99698, 21, 15, 99082, 20, 99082, 18, 99951, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99590, 22, 13, 109641, 21, 99446, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 16, 13, 22, 101663, 117239, 100933, 20, 11, 9276, 1131, 24, 11998, 101135, 16, 16, 66, 23, 68, 23, 101655, 99419, 13225, 101411, 21, 99695, 68, 15, 100104, 114959, 65, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24] ... [220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98729, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101917, 22, 99951, 21, 98729, 19, 103093, 16, 101411, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 24, 13, 100372, 16, 99916, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99317, 18, 13, 22, 101723, 98729, 19, 99200, 24, 11, 9276, 1131, 17, 823, 3235, 12502, 102486, 1999, 103498, 67, 15, 64, 104340, 67, 4385, 17, 68, 18, 65, 22, 68, 24, 100702, 98668, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/types.ts | sed -n '1,20p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 20343, 21239, 760, 10918, 481, 77, 364, 16, 11, 98360, 79, 1248, 73022, 151336], 'meta_info': {'id': '32b52390a8184b9f82bc21f5636e709e', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16825, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.004423238802701235, 73022, '```'), (0.0, 45937, 'bash'), (-3.576278118089249e-07, 198, '\n'), (-0.2900518774986267, 4616, 'cat'), (-0.06669653952121735, 481, ' -'), (-7.152555099310121e-07, 77, 'n'), (-5.4238757002167404e-05, 608, ' /'), (-4.0531076592742465e-06, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.597391747054644e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.03955841809511185, 13428, '/src'), (-0.0005129451747052372, 14, '/'), (-1.9073468138230965e-06, 27082, 'preview'), (-0.2159118801355362, 20343, '/types'), (-5.960462772236497e-07, 21239, '.ts'), (-0.29455918073654175, 760, ' |'), (-0.0001934579631779343, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-5.018585216021165e-05, 16, '1'), (0.0, 11, ','), (-1.404752492904663, 98360, '20'), (0.0, 79, 'p'), (0.0, 1248, ""'\n""), (-5.4238757002167404e-05, 73022, '```'), (-8.344646857949556e-07, 151336, '<|user|>')], 'completion_tokens': 38, 'cached_tokens': 596, 'spec_accept_rate': 0.7222222222222222, 'spec_accept_length': 3.1666666666666665, 'spec_verify_ct': 12, 'spec_accept_token_num': 26, 'spec_draft_token_num': 36, 'e2e_latency': 0.8336737155914307, 'response_sent_to_client_ts': 1767625293.993063}}\r\n[2026-01-05 16:01:33] INFO: 10.86.2.252:36406 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +604,1013339,"src/preview/inlineProvider.ts",1307,0,"",typescript,selection_keyboard +605,1013631,"TERMINAL",0,0,"[2026-01-05 16:01:40] Receive: obj=GenerateReqInput(validation_time=2.313358709216118e-05, received_time=1767625300.3688622, received_time_perf=2480185.041787454, rid='9318bf69a0304b099034b846fbe3e214', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 18, 13, 100104, 17, 19, 98965, 20, 101411, 22, 99695, 21, 101294, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 15, 13, 100702, 23, 101804, 17, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 15, 98965, 23, 102114, 99241, 22, 11, 9276, 1131, 100590, 68, 99698, 580, 100096, 23, 23, 103498, 69, 19, 69, 101804, 69, 112283, 67, 100096, 23, 23, 101130, 100067, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220] ... [19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100702, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 110610, 124212, 103919, 23, 99618, 99698, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100104, 18, 13, 121743, 18, 99951, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99419, 22, 13, 23, 101175, 99446, 17, 22, 98965, 11, 9276, 1131, 101175, 65, 20, 99619, 100067, 64, 23, 99243, 19, 65, 24, 69, 104160, 8901, 99146, 69, 101917, 18, 21, 68, 100096, 24, 68, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99698, 11, 99618, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:01:40 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +606,1013710,"src/preview/inlineProvider.ts",2872,0,"",typescript,selection_keyboard +607,1013914,"TERMINAL",0,0,"[2026-01-05 16:01:40 TP0] Prefill batch, #new-seq: 1, #new-token: 7668, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +608,1013970,"TERMINAL",0,0,"[2026-01-05 16:01:40] Receive: obj=GenerateReqInput(validation_time=1.5550758689641953e-05, received_time=1767625300.740026, received_time_perf=2480185.412951678, rid='abac470d9a0c44cfa661c0d230234c88', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 125214, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 122559, 15, 98965, 15, 100096, 99618, 24, 100614, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 15, 13, 21, 100919, 19, 100461, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 18, 98965, 100632, 24, 100067, 23, 11, 9276, 1131, 69, 102501, 66, 102626, 370, 24, 68, 22, 101723, 69, 450, 8937, 103825, 69, 22, 99446, 101562, 16, 100928, 69, 99243, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11] ... [99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 18, 99366, 100235, 23, 100096, 24, 122250, 112891, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 15, 13, 100632, 23, 102807, 17, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 20, 13, 100590, 16, 100928, 22, 100461, 19, 11, 9276, 1131, 24, 100557, 23, 13225, 103093, 64, 15, 99064, 19, 65, 100614, 24, 100441, 19, 65, 23, 101562, 92482, 18, 68, 124399, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103502, 11, 100809, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +609,1014251,"TERMINAL",0,0,"[2026-01-05 16:01:41 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 597, token usage: 0.04, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +610,1014519,"TERMINAL",0,0,"[2026-01-05 16:01:41 TP0] Prefill batch, #new-seq: 1, #new-token: 7808, #cached-token: 0, token usage: 0.05, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +611,1015110,"TERMINAL",0,0,"[2026-01-05 16:01:41] Finish: obj=GenerateReqInput(validation_time=2.313358709216118e-05, received_time=1767625300.3688622, received_time_perf=2480185.041787454, rid='9318bf69a0304b099034b846fbe3e214', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""out>/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '40,60p'\n```<|user|>\n\n 40\t }\n 41\t\n 42\t /**\n 43\t * Provide inline completion items.\n 44\t */\n 45\t provideInlineCompletionItems(\n 46\t document: vscode.TextDocument,\n 47\t position: vscode.Position,\n 48\t context: vscode.InlineCompletionContext,\n 49\t token: vscode.CancellationToken\n 50\t ): vscode.ProviderResult {\n 51\t if (!this.enabled || !this.action) {\n 52\t return [];\n 53\t }\n 54\t\n 55\t // Only handle pure insertions (not replacements)\n 56\t // Replacements are handled by decorations to properly show what's being deleted\n 57\t if (this.action.kind !== 'editInsert') {\n 58\t return [];\n 59\t }\n 60\t\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 18, 13, 100104, 17, 19, 98965, 20, 101411, 22, 99695, 21, 101294, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 15, 13, 100702, 23, 101804, 17, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 15, 98965, 23, 102114, 99241, 22, 11, 9276, 1131, 100590, 68, 99698, 580, 100096, 23, 23, 103498, 69, 19, 69, 101804, 69, 112283, 67, 100096, 23, 23, 101130, 100067, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220] ... [19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100702, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 110610, 124212, 103919, 23, 99618, 99698, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100104, 18, 13, 121743, 18, 99951, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99419, 22, 13, 23, 101175, 99446, 17, 22, 98965, 11, 9276, 1131, 101175, 65, 20, 99619, 100067, 64, 23, 99243, 19, 65, 24, 69, 104160, 8901, 99146, 69, 101917, 18, 21, 68, 100096, 24, 68, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99698, 11, 99618, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '60,80p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99618, 11, 99695, 79, 1248, 73022, 151336], 'meta_info': {'id': '9318bf69a0304b099034b846fbe3e214', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16456, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0006571040721610188, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.15095417201519012, 4616, 'cat'), (-0.013674980960786343, 481, ' -'), (-4.768370445162873e-07, 77, 'n'), (-7.390948667307384e-06, 608, ' /'), (-4.768370445162873e-07, 5117, 'home'), (-5.960462772236497e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-6.556489552167477e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.004754191264510155, 13428, '/src'), (-0.000328367663314566, 14, '/'), (-3.8980677345534787e-05, 27082, 'preview'), (-0.03732965141534805, 14, '/'), (-0.0013996100751683116, 5057, 'inline'), (-7.986990567587782e-06, 5179, 'Provider'), (-2.0265558760002023e-06, 21239, '.ts'), (-0.0006084974738769233, 760, ' |'), (-1.7165990357170813e-05, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-7.152555099310121e-07, 364, "" '""), (-0.15252754092216492, 99618, '60'), (-2.3841855067985307e-07, 11, ','), (-0.001742750871926546, 99695, '80'), (0.0, 79, 'p'), (0.0, 1248, ""'\n""), (-9.274052717955783e-05, 73022, '```'), (-1.0847986231965479e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.4747653007507324, 'response_sent_to_client_ts': 1767625301.8437102}}\r\n[2026-01-05 16:01:41] Finish: obj=GenerateReqInput(validation_time=1.5550758689641953e-05, received_time=1767625300.740026, received_time_perf=2480185.412951678, rid='abac470d9a0c44cfa661c0d230234c88', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""0, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '89,99p'\n```<|user|>\n\n 89\t \n 90\t if (this.action.kind === 'editReplace') {\n 91\t return cursorLine >= this.action.range.start[0] - 1 && \n 92\t cursorLine <= this.action.range.end[0] + 1;\n 93\t }\n 94\t \n 95\t return false;\n 96\t }\n 97\t}\n 98\t\n 99\t\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 125214, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 122559, 15, 98965, 15, 100096, 99618, 24, 100614, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 15, 13, 21, 100919, 19, 100461, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 18, 98965, 100632, 24, 100067, 23, 11, 9276, 1131, 69, 102501, 66, 102626, 370, 24, 68, 22, 101723, 69, 450, 8937, 103825, 69, 22, 99446, 101562, 16, 100928, 69, 99243, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11] ... [99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 18, 99366, 100235, 23, 100096, 24, 122250, 112891, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 15, 13, 100632, 23, 102807, 17, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 20, 13, 100590, 16, 100928, 22, 100461, 19, 11, 9276, 1131, 24, 100557, 23, 13225, 103093, 64, 15, 99064, 19, 65, 100614, 24, 100441, 19, 65, 23, 101562, 92482, 18, 68, 124399, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103502, 11, 100809, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '70,90p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 100096, 11, 100067, 79, 1248, 73022, 151336], 'meta_info': {'id': 'abac470d9a0c44cfa661c0d230234c88', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16597, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0004086851258762181, 73022, '```'), (0.0, 45937, 'bash'), (-1.1920928244535389e-07, 198, '\n'), (-0.2821725308895111, 4616, 'cat'), (-0.03942709416151047, 481, ' -'), (-3.576278118089249e-07, 77, 'n'), (-2.3483953555114567e-05, 608, ' /'), (-1.4305104514278355e-06, 5117, 'home'), (-4.768370445162873e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (-7.152555099310121e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-7.271740287251305e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.006770290434360504, 13428, '/src'), (-0.00027807659353129566, 14, '/'), (-3.683499380713329e-05, 27082, 'preview'), (-0.1262858808040619, 14, '/'), (-0.013931412249803543, 5057, 'inline'), (-0.0002826052950695157, 5179, 'Provider'), (-2.9444261599564925e-05, 21239, '.ts'), (-0.047376204282045364, 760, ' |'), (-0.0008061258122324944, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (-3.099436753473128e-06, 77, 'n'), (-3.2186455882765586e-06, 364, "" '""), (-3.160205364227295, 100096, '70'), (-1.1920922133867862e-06, 11, ','), (-0.05677402392029762, 100067, '90'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-3.802703940891661e-05, 73022, '```'), (-1.549708758830093e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 597, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.1226513385772705, 'response_sent_to_client_ts': 1767625301.8627295}}\r\n[2026-01-05 16:01:41] INFO: 10.86.2.252:57542 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +612,1016811,"TERMINAL",0,0,"[2026-01-05 16:01:43] Receive: obj=GenerateReqInput(validation_time=1.754704862833023e-05, received_time=1767625303.5172083, received_time_perf=2480188.190133441, rid='be2b1993d5d344058bdfc26b9f3c87e4', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 102573, 16, 11, 3950, 10426, 25, 220, 15, 13, 100590, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 24, 101252, 102626, 17, 102626, 16, 102487, 17, 21, 99619, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 16, 13, 15, 99317, 24, 100933, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 101840, 24, 23, 103388, 100539, 21, 11, 9276, 1131, 104127, 72376, 102487, 4385, 103302, 69, 19, 5918, 8228, 98360, 66, 16, 69, 101478, 22, 19, 101474, 16, 66, 18, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24] ... [220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103502, 11, 100809, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 22, 101478, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101130, 20, 100372, 20, 23, 101840, 102487, 19, 122414, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 15, 13, 22, 99698, 15, 99916, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 20, 13, 19, 98886, 101804, 16, 21, 100928, 11, 9276, 1131, 370, 580, 19, 100096, 67, 24, 64, 15, 66, 101723, 66, 3632, 101478, 16, 66, 15, 67, 112596, 99619, 19, 66, 101252, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:01:43 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 617, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +613,1017078,"TERMINAL",0,0,"[2026-01-05 16:01:43 TP0] Prefill batch, #new-seq: 1, #new-token: 7790, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +614,1017496,"TERMINAL",0,0,"[2026-01-05 16:01:44] Receive: obj=GenerateReqInput(validation_time=1.3797078281641006e-05, received_time=1767625304.250817, received_time_perf=2480188.923742123, rid='8828355b61614a8ea1780fa57f41276b', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 17, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100632, 22, 103093, 18, 22, 101478, 101804, 16, 101917, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 16, 13, 101294, 20, 126334, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 21, 13, 100539, 23, 110733, 99951, 19, 11, 9276, 1131, 17, 68, 100002, 65, 112596, 99951, 24, 99590, 16, 69, 103093, 68, 101663, 22, 15, 99243, 99064, 12502, 16, 66, 21, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21] ... [11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 102088, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100899, 19, 100096, 19, 102807, 17, 23, 100702, 15, 99619, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 18, 13, 20, 99419, 117509, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 23, 13, 114146, 117055, 101723, 16, 11, 9276, 1131, 1371, 17, 65, 99887, 18, 67, 20, 67, 101135, 19, 100002, 23, 65, 2940, 66, 99916, 65, 24, 69, 18, 66, 103878, 68, 19, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:01:44 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.04, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +615,1017524,"src/preview/inlineProvider.ts",0,0,"",typescript,selection_command +616,1017790,"TERMINAL",0,0,"[2026-01-05 16:01:44 TP0] Prefill batch, #new-seq: 1, #new-token: 7847, #cached-token: 0, token usage: 0.06, #running-req: 1, #queue-req: 0, \r\n[2026-01-05 16:01:44] Receive: obj=GenerateReqInput(validation_time=1.3173092156648636e-05, received_time=1767625304.5537455, received_time_perf=2480189.22667055, rid='819d945a6fe54018a4370e3fbc763726', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99200, 11, 100096, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99869, 23, 99618, 101478, 16, 100919, 22, 101723, 18, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 18, 99367, 18, 100235, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 24, 13, 101663, 22, 112896, 23, 100557, 11, 9276, 1131, 100632, 21, 99695, 7628, 100562, 18, 64, 100590, 18, 99590, 8901, 69, 15, 65, 100632, 24, 64, 98729, 22, 20, 67, 102721, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632] ... [11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 101723, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101140, 24, 100096, 22, 23, 99869, 126293, 99457, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 19, 13, 104836, 23, 99419, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 23, 13, 24, 99619, 22, 101961, 108714, 11, 9276, 1131, 101252, 17, 23, 100235, 20, 65, 21, 99317, 99367, 64, 23, 12502, 120392, 15, 3632, 102486, 69, 19, 98886, 102269, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +617,1018128,"TERMINAL",0,0,"[2026-01-05 16:01:44 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 616, token usage: 0.07, #running-req: 2, #queue-req: 0, \r\n",,terminal_output +618,1018395,"TERMINAL",0,0,"[2026-01-05 16:01:45 TP0] Prefill batch, #new-seq: 1, #new-token: 7275, #cached-token: 0, token usage: 0.09, #running-req: 2, #queue-req: 0, \r\n",,terminal_output +619,1018793,"TERMINAL",0,0,"[2026-01-05 16:01:45] Finish: obj=GenerateReqInput(validation_time=1.754704862833023e-05, received_time=1767625303.5172083, received_time_perf=2480188.190133441, rid='be2b1993d5d344058bdfc26b9f3c87e4', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""n: 7668, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \n[2026-01-05 16:01:40] Receive: obj=GenerateReqInput(validation_time=1.5550758689641953e-05, received_time=1767625300.740026, received_time_perf=2480185.412951678, rid='abac470d9a0c44cfa661c0d230234c88', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 7\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 99200, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 102573, 16, 11, 3950, 10426, 25, 220, 15, 13, 100590, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 24, 101252, 102626, 17, 102626, 16, 102487, 17, 21, 99619, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 16, 13, 15, 99317, 24, 100933, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 20, 13, 101840, 24, 23, 103388, 100539, 21, 11, 9276, 1131, 104127, 72376, 102487, 4385, 103302, 69, 19, 5918, 8228, 98360, 66, 16, 69, 101478, 22, 19, 101474, 16, 66, 18, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24] ... [220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103502, 11, 100809, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 22, 101478, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101130, 20, 100372, 20, 23, 101840, 102487, 19, 122414, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 15, 13, 22, 99698, 15, 99916, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 20, 13, 19, 98886, 101804, 16, 21, 100928, 11, 9276, 1131, 370, 580, 19, 100096, 67, 24, 64, 15, 66, 101723, 66, 3632, 101478, 16, 66, 15, 67, 112596, 99619, 19, 66, 101252, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '85,95p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 102284, 11, 101804, 79, 1248, 73022, 151336], 'meta_info': {'id': 'be2b1993d5d344058bdfc26b9f3c87e4', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16599, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00041214076918549836, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.26350781321525574, 4616, 'cat'), (-0.03308727219700813, 481, ' -'), (-9.536738616588991e-07, 77, 'n'), (-1.764281842042692e-05, 608, ' /'), (-5.960462772236497e-07, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-7.986990567587782e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.001957050058990717, 13428, '/src'), (-0.0003177614707965404, 14, '/'), (-3.9934315282152966e-05, 27082, 'preview'), (-0.03654980659484863, 14, '/'), (-0.011643783189356327, 5057, 'inline'), (-0.00011586471373448148, 5179, 'Provider'), (-2.264974000354414e-06, 21239, '.ts'), (-0.001660279231145978, 760, ' |'), (-0.0003195490571670234, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-2.264974000354414e-06, 364, "" '""), (-2.7560067176818848, 102284, '85'), (-9.417489309271332e-06, 11, ','), (-0.39524129033088684, 101804, '95'), (-2.3841855067985307e-07, 79, 'p'), (-2.3841855067985307e-07, 1248, ""'\n""), (-0.00032360086333937943, 73022, '```'), (-4.541770613286644e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 617, 'spec_accept_rate': 0.8484848484848485, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 28, 'spec_draft_token_num': 33, 'e2e_latency': 2.0588061809539795, 'response_sent_to_client_ts': 1767625305.57607}}\r\n",,terminal_output +620,1018942,"TERMINAL",0,0,"[2026-01-05 16:01:45 TP0] Decode batch, #running-req: 2, #token: 16673, token usage: 0.04, accept len: 3.38, accept rate: 0.85, cuda graph: True, gen throughput (token/s): 18.57, #queue-req: 0, \r\n[2026-01-05 16:01:45] Finish: obj=GenerateReqInput(validation_time=1.3173092156648636e-05, received_time=1767625304.5537455, received_time_perf=2480189.22667055, rid='819d945a6fe54018a4370e3fbc763726', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... "", 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,11p'\n```<|user|>\n\n 1\timport * as vscode from 'vscode';\n 2\timport { Action, toVscodePosition } from './types';\n 3\t\n 4\t/**\n 5\t * Provides inline completion items (ghost text) for code edit actions.\n 6\t * This takes priority over Cursor's hints and works on empty lines.\n 7\t */\n 8\texport class CrowdPilotInlineProvider implements vscode.InlineCompletionItemProvider {\n 9\t private action: Action | null = null;\n 10\t private enabled: boolean = true;\n 11\t\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99200, 11, 100096, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99869, 23, 99618, 101478, 16, 100919, 22, 101723, 18, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 18, 99367, 18, 100235, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 24, 13, 101663, 22, 112896, 23, 100557, 11, 9276, 1131, 100632, 21, 99695, 7628, 100562, 18, 64, 100590, 18, 99590, 8901, 69, 15, 65, 100632, 24, 64, 98729, 22, 20, 67, 102721, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632] ... [11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 101723, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101140, 24, 100096, 22, 23, 99869, 126293, 99457, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 19, 13, 104836, 23, 99419, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 23, 13, 24, 99619, 22, 101961, 108714, 11, 9276, 1131, 101252, 17, 23, 100235, 20, 65, 21, 99317, 99367, 64, 23, 12502, 120392, 15, 3632, 102486, 69, 19, 98886, 102269, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '20,40p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 98360, 11, 99698, 79, 1248, 73022, 151336], 'meta_info': {'id': '819d945a6fe54018a4370e3fbc763726', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16083, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0008677055011503398, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.32262998819351196, 4616, 'cat'), (-0.009594046510756016, 481, ' -'), (-2.3841855067985307e-07, 77, 'n'), (-2.0265558760002023e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-3.576278118089249e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.002314390614628792, 13428, '/src'), (-0.0001234931987710297, 14, '/'), (-4.768370445162873e-07, 27082, 'preview'), (-0.05620834603905678, 14, '/'), (-9.905801562126726e-05, 5057, 'inline'), (-1.6689286894688848e-06, 5179, 'Provider'), (-2.3841855067985307e-07, 21239, '.ts'), (-0.0005629861843772233, 760, ' |'), (-0.00012110930401831865, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-1.1920922133867862e-06, 364, "" '""), (-1.8440324068069458, 98360, '20'), (0.0, 11, ','), (-0.004230597522109747, 99698, '40'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.00032360086333937943, 73022, '```'), (-0.0004625442670658231, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 616, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.154979944229126, 'response_sent_to_client_ts': 1767625305.7087812}}\r\n[2026-01-05 16:01:45] INFO: 10.86.2.252:57560 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n[2026-01-05 16:01:45] Finish: obj=GenerateReqInput(validation_time=1.3797078281641006e-05, received_time=1767625304.250817, received_time_perf=2480188.923742123, rid='8828355b61614a8ea1780fa57f41276b', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.754704862833023e-05, received_time=1767625303.5172083, received_time_perf=2480188.190133441, rid='be2b1993d5d344058bdfc26b9f3c87e4', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 17, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 16, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102624, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100632, 22, 103093, 18, 22, 101478, 101804, 16, 101917, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 16, 13, 101294, 20, 126334, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 21, 13, 100539, 23, 110733, 99951, 19, 11, 9276, 1131, 17, 68, 100002, 65, 112596, 99951, 24, 99590, 16, 69, 103093, 68, 101663, 22, 15, 99243, 99064, 12502, 16, 66, 21, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21] ... [11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 102088, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100899, 19, 100096, 19, 102807, 17, 23, 100702, 15, 99619, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 18, 13, 20, 99419, 117509, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 23, 13, 114146, 117055, 101723, 16, 11, 9276, 1131, 1371, 17, 65, 99887, 18, 67, 20, 67, 101135, 19, 100002, 23, 65, 2940, 66, 99916, 65, 24, 69, 18, 66, 103878, 68, 19, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,11p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336], 'meta_info': {'id': '8828355b61614a8ea1780fa57f41276b', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16635, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00011860620725201443, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.14766158163547516, 4616, 'cat'), (-0.00959392823278904, 481, ' -'), (-3.576278118089249e-07, 77, 'n'), (-5.960446742392378e-06, 608, ' /'), (-3.576278118089249e-07, 5117, 'home'), (-3.576278118089249e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-5.364403477869928e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0011653067776933312, 13428, '/src'), (-0.00013374387344811112, 14, '/'), (-1.4781842764932662e-05, 27082, 'preview'), (-0.026634685695171356, 14, '/'), (-0.0014002051902934909, 5057, 'inline'), (-2.002696055569686e-05, 5179, 'Provider'), (-2.50339189733495e-06, 21239, '.ts'), (-0.001039679627865553, 760, ' |'), (-3.3854863431770355e-05, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-2.264974000354414e-06, 364, "" '""), (-0.8258570432662964, 16, '1'), (-3.6954811548639555e-06, 11, ','), (-2.0446128845214844, 98965, '11'), (0.0, 79, 'p'), (-3.576278118089249e-07, 1248, ""'\n""), (-0.000226472009671852, 73022, '```'), (-9.059865078597795e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.4771232604980469, 'response_sent_to_client_ts': 1767625305.7279983}}\r\n",,terminal_output +621,1021329,"src/preview/inlineProvider.ts",843,0,"",typescript,selection_command +622,1021458,"TERMINAL",0,0,"[2026-01-05 16:01:48] Receive: obj=GenerateReqInput(validation_time=1.792190596461296e-05, received_time=1767625308.1749015, received_time_perf=2480192.847826695, rid='5acc829bd348499593f602bf34720ddc', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99869, 23, 99618, 101478, 16, 100919, 22, 101723, 18, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 18, 99367, 18, 100235, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 24, 13, 101663, 22, 112896, 23, 100557, 11, 9276, 1131, 100632, 21, 99695, 7628, 100562, 18, 64, 100590, 18, 99590, 8901, 69, 15, 65, 100632, 24, 64, 98729, 22, 20, 67, 102721, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220] ... [220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 101723, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 100928, 19, 22, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100539, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 101723, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 18, 99419, 99064, 24, 118611, 101478, 19, 102807, 18, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 19, 13, 101130, 18, 22, 100461, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 24, 13, 99241, 21, 21, 100096, 101130, 11, 9276, 1131, 23, 98729, 67, 24, 100461, 64, 21, 1859, 20, 99698, 99243, 64, 19, 101140, 15, 68, 18, 69, 8901, 102269, 18, 22, 99916, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:01:48 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +623,1021631,"TERMINAL",0,0,"[2026-01-05 16:01:48] Receive: obj=GenerateReqInput(validation_time=1.805601641535759e-05, received_time=1767625308.3579001, received_time_perf=2480193.030825248, rid='f6504a1c797b464ebfc0ea7786725361', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99695, 17, 110610, 127020, 23, 99695, 101804, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 21, 101130, 100562, 23, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 15, 13, 18, 99869, 15, 99366, 99695, 17, 11, 9276, 1131, 23, 99618, 21, 542, 102114, 24, 1999, 122463, 18, 100933, 17, 2940, 329, 18, 99869, 65, 99869, 24, 67, 99366, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220] ... [220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 98360, 11, 99698, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 98360, 1572, 262, 220, 99146, 2760, 1554, 262, 220, 99241, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99619, 9356, 735, 262, 220, 99590, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99446, 7472, 419, 12389, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 2126, 279, 1482, 1917, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 633, 2512, 4555, 5586, 760, 845, 341, 262, 220, 101175, 7472, 470, 419, 12389, 280, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102626, 17, 114146, 20, 102487, 101562, 16, 100104, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 23, 13, 127031, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98729, 17, 13, 23, 101655, 23, 99916, 21, 101804, 11, 9276, 1131, 20, 4475, 23, 100104, 8937, 18, 100933, 19, 100809, 102573, 18, 69, 99618, 17, 13225, 101135, 22, 98360, 631, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +624,1021703,"TERMINAL",0,0,"[2026-01-05 16:01:48 TP0] Prefill batch, #new-seq: 1, #new-token: 7670, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +625,1022206,"TERMINAL",0,0,"[2026-01-05 16:01:48 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 597, token usage: 0.04, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +626,1022392,"TERMINAL",0,0,"[2026-01-05 16:01:49 TP0] Prefill batch, #new-seq: 1, #new-token: 7863, #cached-token: 0, token usage: 0.05, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +627,1023002,"TERMINAL",0,0,"[2026-01-05 16:01:49] Finish: obj=GenerateReqInput(validation_time=1.792190596461296e-05, received_time=1767625308.1749015, received_time_perf=2480192.847826695, rid='5acc829bd348499593f602bf34720ddc', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""n: 7847, #cached-token: 0, token usage: 0.06, #running-req: 1, #queue-req: 0, \n[2026-01-05 16:01:44] Receive: obj=GenerateReqInput(validation_time=1.3173092156648636e-05, received_time=1767625304.5537455, received_time_perf=2480189.22667055, rid='819d945a6fe54018a4370e3fbc763726', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 7\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99869, 23, 99618, 101478, 16, 100919, 22, 101723, 18, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 18, 99367, 18, 100235, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99366, 24, 13, 101663, 22, 112896, 23, 100557, 11, 9276, 1131, 100632, 21, 99695, 7628, 100562, 18, 64, 100590, 18, 99590, 8901, 69, 15, 65, 100632, 24, 64, 98729, 22, 20, 67, 102721, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220] ... [220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 311, 51397, 1851, 3812, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 36428, 7381, 9750, 3589, 320, 38446, 1467, 8, 369, 2038, 4499, 6168, 624, 257, 220, 21, 197, 353, 1096, 4990, 10614, 916, 27971, 594, 30523, 323, 4278, 389, 4287, 5128, 624, 257, 220, 22, 197, 735, 257, 220, 23, 59028, 536, 97156, 47, 23891, 25246, 5179, 5169, 55008, 5337, 1056, 33030, 1234, 5179, 341, 257, 220, 24, 2760, 869, 1917, 25, 5586, 760, 845, 284, 845, 280, 262, 220, 98668, 2760, 869, 8967, 25, 2710, 284, 830, 280, 262, 220, 98965, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 101723, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 100928, 19, 22, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100539, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 101723, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 18, 99419, 99064, 24, 118611, 101478, 19, 102807, 18, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 19, 13, 101130, 18, 22, 100461, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 24, 13, 99241, 21, 21, 100096, 101130, 11, 9276, 1131, 23, 98729, 67, 24, 100461, 64, 21, 1859, 20, 99698, 99243, 64, 19, 101140, 15, 68, 18, 69, 8901, 102269, 18, 22, 99916, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '11,31p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 98965, 11, 100557, 79, 1248, 73022, 151336], 'meta_info': {'id': '5acc829bd348499593f602bf34720ddc', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16458, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.00026842328952625394, 73022, '```'), (0.0, 45937, 'bash'), (-1.1920928244535389e-07, 198, '\n'), (-0.13580693304538727, 4616, 'cat'), (-0.011463252827525139, 481, ' -'), (-2.3841855067985307e-07, 77, 'n'), (-6.9141146923357155e-06, 608, ' /'), (-7.152555099310121e-07, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.168244216387393e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.002785732736811042, 13428, '/src'), (-0.0002388668799540028, 14, '/'), (-6.687417771900073e-05, 27082, 'preview'), (-0.010386225767433643, 14, '/'), (-0.0028291221242398024, 5057, 'inline'), (-4.6491513785440475e-06, 5179, 'Provider'), (-4.768370445162873e-07, 21239, '.ts'), (-0.0001722425949992612, 760, ' |'), (-5.483612312673358e-06, 10918, ' sed'), (0.0, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-1.311301275563892e-06, 364, "" '""), (-1.13767409324646, 98965, '11'), (-8.22540732769994e-06, 11, ','), (-0.10549712926149368, 100557, '31'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.00011085849109804258, 73022, '```'), (-6.318072337307967e-06, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.460261583328247, 'response_sent_to_client_ts': 1767625309.6353614}}\r\n[2026-01-05 16:01:49] Finish: obj=GenerateReqInput(validation_time=1.805601641535759e-05, received_time=1767625308.3579001, received_time_perf=2480193.030825248, rid='f6504a1c797b464ebfc0ea7786725361', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.792190596461296e-05, received_time=1767625308.1749015, received_time_perf=2480192.847826695, rid='5acc829bd348499593f602bf34720ddc', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99695, 17, 110610, 127020, 23, 99695, 101804, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 21, 101130, 100562, 23, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 15, 13, 18, 99869, 15, 99366, 99695, 17, 11, 9276, 1131, 23, 99618, 21, 542, 102114, 24, 1999, 122463, 18, 100933, 17, 2940, 329, 18, 99869, 65, 99869, 24, 67, 99366, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220] ... [220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 98360, 11, 99698, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 98360, 1572, 262, 220, 99146, 2760, 1554, 262, 220, 99241, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99619, 9356, 735, 262, 220, 99590, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99446, 7472, 419, 12389, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 2126, 279, 1482, 1917, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 633, 2512, 4555, 5586, 760, 845, 341, 262, 220, 101175, 7472, 470, 419, 12389, 280, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102626, 17, 114146, 20, 102487, 101562, 16, 100104, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 23, 13, 127031, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98729, 17, 13, 23, 101655, 23, 99916, 21, 101804, 11, 9276, 1131, 20, 4475, 23, 100104, 8937, 18, 100933, 19, 100809, 102573, 18, 69, 99618, 17, 13225, 101135, 22, 98360, 631, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '60,80p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99618, 11, 99695, 79, 1248, 73022, 151336], 'meta_info': {'id': 'f6504a1c797b464ebfc0ea7786725361', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16652, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-2.682172998902388e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.03538084775209427, 4616, 'cat'), (-0.00562252476811409, 481, ' -'), (0.0, 77, 'n'), (-9.536738616588991e-07, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.0728830375228426e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0004755319678224623, 13428, '/src'), (-7.73638384998776e-05, 14, '/'), (-2.9802276912960224e-06, 27082, 'preview'), (-0.004221219569444656, 14, '/'), (-0.0003937899600714445, 5057, 'inline'), (-2.0265558760002023e-06, 5179, 'Provider'), (-1.1920928244535389e-07, 21239, '.ts'), (-1.9192511899746023e-05, 760, ' |'), (-2.3841855067985307e-07, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-2.3841855067985307e-07, 364, "" '""), (-0.7214875221252441, 99618, '60'), (0.0, 11, ','), (-0.009016748517751694, 99695, '80'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-2.2172682292875834e-05, 73022, '```'), (-0.00011085849109804258, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 597, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.2773919105529785, 'response_sent_to_client_ts': 1767625309.636909}}\r\n[2026-01-05 16:01:49] INFO: 10.86.2.252:56308 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +628,1035660,"src/preview/inlineProvider.ts",2088,0,"",typescript,selection_command +629,1035738,"TERMINAL",0,0,"[2026-01-05 16:02:02] Receive: obj=GenerateReqInput(validation_time=1.840014010667801e-05, received_time=1767625322.4272351, received_time_perf=2480207.100160476, rid='41c12f0b31bc4d43b363491c9332791b', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 103498, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100590, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 17, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102284, 22, 101917, 24, 101723, 16, 99887, 99064, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 101663, 16, 100933, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 15, 13, 101411, 19, 19, 99367, 121498, 11, 9276, 1131, 20, 12502, 291, 24, 99446, 101140, 21, 69, 102340, 64, 99869, 542, 291, 17, 69, 20, 101478, 18, 99869, 111659, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800] ... [220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99695, 20, 99618, 16, 21, 102340, 20, 100235, 100899, 24, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 23, 13, 100235, 22, 107578, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98729, 18, 13, 15, 99064, 23, 99446, 99590, 23, 11, 9276, 1131, 69, 118173, 19, 64, 16, 66, 102626, 22, 65, 101562, 19, 3065, 8315, 15, 12502, 22, 100928, 21, 102650, 20, 100632, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:02:02 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 618, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +630,1036008,"TERMINAL",0,0,"[2026-01-05 16:02:02] Receive: obj=GenerateReqInput(validation_time=2.57883220911026e-05, received_time=1767625322.6903903, received_time_perf=2480207.363315676, rid='51506f6e918943e8a1f695b90bc11417', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100614, 11, 671, 27084, 12, 2958, 25, 220, 18, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 103205, 100096, 24, 101723, 19, 116768, 99241, 24, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 22, 13, 125763, 100096, 16, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 16, 13, 23, 101723, 21, 99916, 100933, 16, 11, 9276, 1131, 15, 64, 20, 6066, 22, 67, 22, 2127, 22, 64, 19, 99916, 100809, 21, 8937, 21, 2940, 102624, 67, 18, 69, 101411, 450, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21] ... [11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99618, 11, 99695, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 262, 220, 100899, 2760, 456, 262, 220, 102269, 1572, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 100772, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 99698, 15, 99367, 121570, 101478, 22, 99695, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101175, 17, 13, 19, 99951, 122406, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98360, 22, 13, 99457, 107271, 101655, 21, 11, 9276, 1131, 102340, 66, 98886, 69, 15, 65, 100557, 8901, 19, 67, 102088, 65, 100632, 18, 101474, 16, 66, 24, 100702, 99951, 24, 16, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +631,1036084,"TERMINAL",0,0,"[2026-01-05 16:02:02 TP0] Prefill batch, #new-seq: 1, #new-token: 7768, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +632,1036428,"TERMINAL",0,0,"[2026-01-05 16:02:03 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.04, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +633,1036827,"TERMINAL",0,0,"[2026-01-05 16:02:03 TP0] Prefill batch, #new-seq: 1, #new-token: 7314, #cached-token: 0, token usage: 0.06, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +634,1037246,"TERMINAL",0,0,"[2026-01-05 16:02:03] Finish: obj=GenerateReqInput(validation_time=1.840014010667801e-05, received_time=1767625322.4272351, received_time_perf=2480207.100160476, rid='41c12f0b31bc4d43b363491c9332791b', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.805601641535759e-05, received_time=1767625308.3579001, received_time_perf=2480193.030825248, rid='f6504a1c797b464ebfc0ea7786725361', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101130, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 103498, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100590, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 17, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 101917, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102284, 22, 101917, 24, 101723, 16, 99887, 99064, 17, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 20, 13, 101663, 16, 100933, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 15, 13, 101411, 19, 19, 99367, 121498, 11, 9276, 1131, 20, 12502, 291, 24, 99446, 101140, 21, 69, 102340, 64, 99869, 542, 291, 17, 69, 20, 101478, 18, 99869, 111659, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800] ... [220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99695, 20, 99618, 16, 21, 102340, 20, 100235, 100899, 24, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 23, 13, 100235, 22, 107578, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98729, 18, 13, 15, 99064, 23, 99446, 99590, 23, 11, 9276, 1131, 69, 118173, 19, 64, 16, 66, 102626, 22, 65, 101562, 19, 3065, 8315, 15, 12502, 22, 100928, 21, 102650, 20, 100632, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '40,60p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99698, 11, 99618, 79, 1248, 73022, 151336], 'meta_info': {'id': '41c12f0b31bc4d43b363491c9332791b', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16578, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-7.354942499659956e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.03571850806474686, 4616, 'cat'), (-0.004706020932644606, 481, ' -'), (0.0, 77, 'n'), (-1.4305104514278355e-06, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.0728830375228426e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0002361257211305201, 13428, '/src'), (-5.686121585313231e-05, 14, '/'), (-5.960462772236497e-07, 27082, 'preview'), (-0.0020350953564047813, 14, '/'), (-0.00046659549116156995, 5057, 'inline'), (-1.1920928244535389e-07, 5179, 'Provider'), (-2.3841855067985307e-07, 21239, '.ts'), (-1.6569954823353328e-05, 760, ' |'), (-2.3841855067985307e-07, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-2.3841855067985307e-07, 364, "" '""), (-0.6205666661262512, 99698, '40'), (0.0, 11, ','), (-0.0005629861843772233, 99618, '60'), (0.0, 79, 'p'), (0.0, 1248, ""'\n""), (-0.00013255194062367082, 73022, '```'), (-2.658331868587993e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 618, 'spec_accept_rate': 0.8787878787878788, 'spec_accept_length': 3.6363636363636362, 'spec_verify_ct': 11, 'spec_accept_token_num': 29, 'spec_draft_token_num': 33, 'e2e_latency': 1.4268991947174072, 'response_sent_to_client_ts': 1767625323.854199}}\r\n[2026-01-05 16:02:03] Finish: obj=GenerateReqInput(validation_time=2.57883220911026e-05, received_time=1767625322.6903903, received_time_perf=2480207.363315676, rid='51506f6e918943e8a1f695b90bc11417', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.840014010667801e-05, received_time=1767625322.4272351, received_time_perf=2480207.100160476, rid='41c12f0b31bc4d43b363491c9332791b', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100614, 11, 671, 27084, 12, 2958, 25, 220, 18, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 103205, 100096, 24, 101723, 19, 116768, 99241, 24, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 22, 13, 125763, 100096, 16, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 16, 13, 23, 101723, 21, 99916, 100933, 16, 11, 9276, 1131, 15, 64, 20, 6066, 22, 67, 22, 2127, 22, 64, 19, 99916, 100809, 21, 8937, 21, 2940, 102624, 67, 18, 69, 101411, 450, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21] ... [11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99618, 11, 99695, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 262, 220, 100899, 2760, 456, 262, 220, 102269, 1572, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 100772, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 99698, 15, 99367, 121570, 101478, 22, 99695, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101175, 17, 13, 19, 99951, 122406, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98360, 22, 13, 99457, 107271, 101655, 21, 11, 9276, 1131, 102340, 66, 98886, 69, 15, 65, 100557, 8901, 19, 67, 102088, 65, 100632, 18, 101474, 16, 66, 24, 100702, 99951, 24, 16, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '40,60p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99698, 11, 99618, 79, 1248, 73022, 151336], 'meta_info': {'id': '51506f6e918943e8a1f695b90bc11417', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16102, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-7.986990567587782e-06, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.08477313071489334, 4616, 'cat'), (-0.011451349593698978, 481, ' -'), (-2.3841855067985307e-07, 77, 'n'), (-2.264974000354414e-06, 608, ' /'), (-3.576272320060525e-06, 5117, 'home'), (-4.768370445162873e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.7881377516459906e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0005673944251611829, 13428, '/src'), (-9.154854342341423e-05, 14, '/'), (-6.794906312279636e-06, 27082, 'preview'), (-0.005876525770872831, 14, '/'), (-0.0008051729528233409, 5057, 'inline'), (-3.4570634852570947e-06, 5179, 'Provider'), (-1.1920928244535389e-07, 21239, '.ts'), (-0.00013493580627255142, 760, ' |'), (-4.410734163684538e-06, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-1.5497195136049413e-06, 364, "" '""), (-1.8285045623779297, 99698, '40'), (0.0, 11, ','), (-0.002751733176410198, 99618, '60'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-6.48477507638745e-05, 73022, '```'), (-5.4238757002167404e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.18052339553833, 'response_sent_to_client_ts': 1767625323.8709702}}\r\n[2026-01-05 16:02:03] INFO: 10.86.2.252:44922 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +635,1039146,"src/preview/inlineProvider.ts",2872,0,"",typescript,selection_keyboard +636,1039436,"TERMINAL",0,0,"[2026-01-05 16:02:06] Receive: obj=GenerateReqInput(validation_time=1.718662679195404e-05, received_time=1767625326.1756895, received_time_perf=2480210.848614713, rid='9b054756ffe04e9c93042bf6cc7d5fb7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103595, 11, 104340, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 262, 220, 100899, 2760, 456, 262, 220, 102269, 1572, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 262, 220, 104340, 2760, 374, 2512, 51621, 14533, 27583, 2460, 25, 1372, 1648, 2710, 341, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 100702, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98965, 11, 671, 27084, 12, 2958, 25, 220, 18, 11, 671, 4584, 12, 2958, 25, 220, 18, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 98886, 11, 671, 27084, 12, 2958, 25, 220, 19, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102269] ... [23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 100772, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 20, 100928, 23, 101175, 124618, 104550, 99916, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101175, 17, 13, 21, 100067, 18, 100067, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98360, 22, 13, 100632, 18, 125255, 21, 102269, 11, 9276, 1131, 20, 99082, 100539, 69, 21, 68, 24, 99243, 24, 102088, 68, 23, 64, 16, 69, 21, 101804, 65, 100067, 8901, 114365, 99419, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103502, 11, 100809, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:02:06 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 616, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +637,1039714,"TERMINAL",0,0,"[2026-01-05 16:02:06 TP0] Prefill batch, #new-seq: 1, #new-token: 7358, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +638,1040215,"TERMINAL",0,0,"[2026-01-05 16:02:06] Finish: obj=GenerateReqInput(validation_time=1.718662679195404e-05, received_time=1767625326.1756895, received_time_perf=2480210.848614713, rid='9b054756ffe04e9c93042bf6cc7d5fb7', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... "", 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, \n... [truncated]\n/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '89,99p'\n```<|user|>\n\n 89\t \n 90\t if (this.action.kind === 'editReplace') {\n 91\t return cursorLine >= this.action.range.start[0] - 1 && \n 92\t cursorLine <= this.action.range.end[0] + 1;\n 93\t }\n 94\t \n 95\t return false;\n 96\t }\n 97\t}\n 98\t\n 99\t\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103595, 11, 104340, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 262, 220, 100899, 2760, 456, 262, 220, 102269, 1572, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 262, 220, 104340, 2760, 374, 2512, 51621, 14533, 27583, 2460, 25, 1372, 1648, 2710, 341, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 100702, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98965, 11, 671, 27084, 12, 2958, 25, 220, 18, 11, 671, 4584, 12, 2958, 25, 220, 18, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 98886, 11, 671, 27084, 12, 2958, 25, 220, 19, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102269] ... [23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 100772, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 20, 100928, 23, 101175, 124618, 104550, 99916, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101175, 17, 13, 21, 100067, 18, 100067, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98360, 22, 13, 100632, 18, 125255, 21, 102269, 11, 9276, 1131, 20, 99082, 100539, 69, 21, 68, 24, 99243, 24, 102088, 68, 23, 64, 16, 69, 21, 101804, 65, 100067, 8901, 114365, 99419, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103502, 11, 100809, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/inlineProvider.ts | sed -n '1,11p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336], 'meta_info': {'id': '9b054756ffe04e9c93042bf6cc7d5fb7', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16166, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-0.0006239851354621351, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.37098875641822815, 4616, 'cat'), (-0.008034883998334408, 481, ' -'), (-4.768370445162873e-07, 77, 'n'), (-1.7881377516459906e-06, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-7.152555099310121e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.004717292729765177, 13428, '/src'), (-5.793403761344962e-05, 14, '/'), (-1.311301275563892e-06, 27082, 'preview'), (-0.028104985132813454, 14, '/'), (-0.001950387260876596, 5057, 'inline'), (-6.079655122448457e-06, 5179, 'Provider'), (-4.768370445162873e-07, 21239, '.ts'), (-0.003944118972867727, 760, ' |'), (-0.0005038899253122509, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.09812229126691818, 16, '1'), (-4.172316494077677e-06, 11, ','), (-0.502482533454895, 98965, '11'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.0002708068350329995, 73022, '```'), (-0.00015841660206206143, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 616, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 0.8084208965301514, 'response_sent_to_client_ts': 1767625326.984174}}\r\n[2026-01-05 16:02:06] INFO: 10.86.2.252:44924 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +639,1042168,"src/preview/quickPick.ts",0,0,"",typescript,tab +640,1042578,"TERMINAL",0,0,"[2026-01-05 16:02:09] Receive: obj=GenerateReqInput(validation_time=1.559872180223465e-05, received_time=1767625329.1980634, received_time_perf=2480213.870988599, rid='dd014158e91d4c15bf15323b9c17d185', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102269, 17, 108479, 23, 102114, 122559, 122250, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 22, 13, 22, 101655, 22, 102340, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 17, 13, 125603, 101478, 21, 102284, 19, 11, 9276, 1131, 22, 69, 101729, 68, 114495, 23, 66, 21, 66, 101562, 67, 100928, 24, 19, 68, 100104, 17, 20, 65, 24, 99082, 99951, 23, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11] ... [220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103502, 11, 100809, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 100539, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99243, 101478, 17, 21, 102626, 122414, 99698, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101175, 21, 13, 115381, 101840, 24, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99146, 15, 13, 23, 100933, 21, 99367, 22, 99366, 11, 9276, 1131, 24, 65, 100002, 19, 100899, 21, 27874, 100590, 68, 24, 66, 24, 99064, 101961, 13225, 21, 638, 22, 67, 20, 10793, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 27669, 36761, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 5714, 315, 279, 3974, 3735, 16216, 624, 257, 220, 21, 197, 735, 257, 220, 22, 59028, 943, 17231, 36761, 2077, 284, 364, 10325, 6, 760, 364, 80672, 6, 760, 845, 280, 257, 220, 23, 1572, 257, 220, 24, 197, 1747, 262, 220, 98668, 197, 353, 6928, 264, 3974, 3735, 13220, 369, 279, 15268, 1917, 624, 262, 220, 98965, 197, 353, 12193, 979, 15010, 374, 10730, 323, 47286, 646, 944, 387, 6839, 624, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:02:09 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 597, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +641,1042892,"TERMINAL",0,0,"[2026-01-05 16:02:09 TP0] Prefill batch, #new-seq: 1, #new-token: 7809, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +642,1043912,"TERMINAL",0,0,"[2026-01-05 16:02:09 TP0] Decode batch, #running-req: 1, #token: 16609, token usage: 0.04, accept len: 3.29, accept rate: 0.82, cuda graph: True, gen throughput (token/s): 8.56, #queue-req: 0, \r\n",,terminal_output +643,1043915,"TERMINAL",0,0,"[2026-01-05 16:02:10] Finish: obj=GenerateReqInput(validation_time=1.559872180223465e-05, received_time=1767625329.1980634, received_time_perf=2480213.870988599, rid='dd014158e91d4c15bf15323b9c17d185', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/quickPick.ts | sed -n '1,11p'\n```<|user|>\n\n 1\timport * as vscode from 'vscode';\n 2\timport { Action, truncate } from './types';\n 3\t\n 4\t/**\n 5\t * Result of the quick pick interaction.\n 6\t */\n 7\texport type QuickPickResult = 'accept' | 'dismiss' | null;\n 8\t\n 9\t/**\n 10\t * Show a quick pick modal for the pending action.\n 11\t * Used when terminal is focused and decorations can't be shown.\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 98503, 25, 102486, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102269, 17, 108479, 23, 102114, 122559, 122250, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99446, 22, 13, 22, 101655, 22, 102340, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99367, 17, 13, 125603, 101478, 21, 102284, 19, 11, 9276, 1131, 22, 69, 101729, 68, 114495, 23, 66, 21, 66, 101562, 67, 100928, 24, 19, 68, 100104, 17, 20, 65, 24, 99082, 99951, 23, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11] ... [220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 103502, 11, 100809, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 100539, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 99243, 101478, 17, 21, 102626, 122414, 99698, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101175, 21, 13, 115381, 101840, 24, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99146, 15, 13, 23, 100933, 21, 99367, 22, 99366, 11, 9276, 1131, 24, 65, 100002, 19, 100899, 21, 27874, 100590, 68, 24, 66, 24, 99064, 101961, 13225, 21, 638, 22, 67, 20, 10793, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 27669, 36761, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 5714, 315, 279, 3974, 3735, 16216, 624, 257, 220, 21, 197, 735, 257, 220, 22, 59028, 943, 17231, 36761, 2077, 284, 364, 10325, 6, 760, 364, 80672, 6, 760, 845, 280, 257, 220, 23, 1572, 257, 220, 24, 197, 1747, 262, 220, 98668, 197, 353, 6928, 264, 3974, 3735, 13220, 369, 279, 15268, 1917, 624, 262, 220, 98965, 197, 353, 12193, 979, 15010, 374, 10730, 323, 47286, 646, 944, 387, 6839, 624, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/quickPick.ts | sed -n '11,31p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 27669, 36761, 21239, 760, 10918, 481, 77, 364, 98965, 11, 100557, 79, 1248, 73022, 151336], 'meta_info': {'id': 'dd014158e91d4c15bf15323b9c17d185', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16598, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-2.1576648578047752e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.007194326724857092, 4616, 'cat'), (-0.0007901645149104297, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-2.3841855067985307e-07, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.1920928244535389e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-7.986990567587782e-06, 13428, '/src'), (-0.00019441144831944257, 14, '/'), (-0.0005617947317659855, 27082, 'preview'), (-0.012153265066444874, 14, '/'), (-0.003929513972252607, 27669, 'quick'), (0.0, 36761, 'Pick'), (-7.152555099310121e-07, 21239, '.ts'), (-2.2172682292875834e-05, 760, ' |'), (0.0, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-1.1920928244535389e-07, 364, "" '""), (-0.5003625750541687, 98965, '11'), (-5.602820692729438e-06, 11, ','), (-0.3245135247707367, 100557, '31'), (0.0, 79, 'p'), (0.0, 1248, ""'\n""), (-0.0005529781919904053, 73022, '```'), (-1.0967194612021558e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 597, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 0.818202018737793, 'response_sent_to_client_ts': 1767625330.0163214}}\r\n[2026-01-05 16:02:10] INFO: 10.86.2.252:52838 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +644,1044233,"src/preview/decorations.ts",0,0,"import * as vscode from 'vscode';\n\n/**\n * Theme colors for consistent styling across light/dark modes.\n */\nexport const COLORS = {\n // Code changes - use VS Code's built-in diff colors\n deletion: {\n background: new vscode.ThemeColor('diffEditor.removedTextBackground'),\n border: new vscode.ThemeColor('diffEditor.removedTextBorder'),\n },\n insertion: {\n background: new vscode.ThemeColor('diffEditor.insertedTextBackground'),\n border: new vscode.ThemeColor('diffEditor.insertedTextBorder'),\n foreground: new vscode.ThemeColor('editor.foreground'),\n },\n\n // Meta actions - use editor widget colors for consistency\n metaAction: {\n foreground: new vscode.ThemeColor('editorWidget.foreground'),\n background: new vscode.ThemeColor('editorWidget.background'),\n border: new vscode.ThemeColor('editorWidget.border'),\n },\n\n // Action-specific accents\n terminal: new vscode.ThemeColor('terminal.ansiGreen'),\n fileSwitch: new vscode.ThemeColor('textLink.foreground'),\n cursorMove: new vscode.ThemeColor('editorCursor.foreground'),\n};\n\n/**\n * Pool of reusable decoration types to avoid memory leaks and flickering.\n * Created once, reused for all previews.\n */\nexport class DecorationPool {\n private types: Map = new Map();\n\n constructor() {\n // Deletion highlight for entire ranges (editDelete)\n this.types.set('deletion', vscode.window.createTextEditorDecorationType({\n backgroundColor: COLORS.deletion.background,\n borderColor: COLORS.deletion.border,\n borderStyle: 'solid',\n borderWidth: '1px',\n textDecoration: 'line-through',\n }));\n\n // Character-level deletion highlight (for diffs in editReplace)\n this.types.set('deletion-char', vscode.window.createTextEditorDecorationType({\n backgroundColor: COLORS.deletion.background,\n textDecoration: 'line-through',\n // No border - cleaner for individual character highlights\n }));\n\n // Insertion block - shows new text with green highlight on next line\n // contentText is set per-decoration via renderOptions\n this.types.set('insertion-block', vscode.window.createTextEditorDecorationType({\n // Base styles - content set via DecorationOptions.renderOptions\n }));\n\n // Inline insertion - shows new text right after deleted text (same line)\n // contentText is set per-decoration via renderOptions\n this.types.set('insertion-inline', vscode.window.createTextEditorDecorationType({\n // Base styles - content set via DecorationOptions.renderOptions\n }));\n\n // Meta-action indicator (terminal, file switch, cursor move)\n // Note: contentText is set per-decoration via renderOptions\n this.types.set('meta-indicator', vscode.window.createTextEditorDecorationType({\n // Base styles - specific content/colors set via DecorationOptions.renderOptions\n }));\n\n // Terminal command indicator\n this.types.set('terminal-indicator', vscode.window.createTextEditorDecorationType({\n // Styles applied via renderOptions for flexibility\n }));\n\n // File switch indicator\n this.types.set('file-indicator', vscode.window.createTextEditorDecorationType({\n // Styles applied via renderOptions for flexibility\n }));\n\n // Cursor move indicator\n this.types.set('cursor-indicator', vscode.window.createTextEditorDecorationType({\n // Styles applied via renderOptions for flexibility\n }));\n }\n\n /**\n * Get a decoration type by key.\n */\n get(typeKey: string): vscode.TextEditorDecorationType | undefined {\n return this.types.get(typeKey);\n }\n\n /**\n * Apply decorations to an editor.\n */\n setDecorations(\n editor: vscode.TextEditor,\n typeKey: string,\n options: vscode.DecorationOptions[]\n ): void {\n const type = this.types.get(typeKey);\n if (type) {\n editor.setDecorations(type, options);\n }\n }\n\n /**\n * Clear all decorations from the active editor.\n */\n clearAll(editor?: vscode.TextEditor): void {\n const targetEditor = editor ?? vscode.window.activeTextEditor;\n if (targetEditor) {\n for (const type of this.types.values()) {\n targetEditor.setDecorations(type, []);\n }\n }\n }\n\n /**\n * Clear a specific decoration type.\n */\n clear(editor: vscode.TextEditor, typeKey: string): void {\n const type = this.types.get(typeKey);\n if (type) {\n editor.setDecorations(type, []);\n }\n }\n\n /**\n * Dispose all decoration types. Call on extension deactivation.\n */\n dispose(): void {\n for (const type of this.types.values()) {\n type.dispose();\n }\n this.types.clear();\n }\n}\n\n\n",typescript,tab +645,1044646,"TERMINAL",0,0,"[2026-01-05 16:02:11] Receive: obj=GenerateReqInput(validation_time=1.5701167285442352e-05, received_time=1767625331.2627752, received_time_perf=2480215.935700431, rid='76aa3495e2bd45429cfed7a56ac83cbd', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 98360, 1572, 262, 220, 99146, 2760, 1554, 262, 220, 99241, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99619, 9356, 735, 262, 220, 99590, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99446, 7472, 419, 12389, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 2126, 279, 1482, 1917, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 633, 2512, 4555, 5586, 760, 845, 341, 262, 220, 101175, 7472, 470, 419, 12389, 280, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98668, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 18, 13, 100632, 19, 122250, 20, 100632, 99869, 18, 101474, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 15, 13, 22, 101663, 126173, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 20, 13, 101655, 16, 123006, 101474, 22, 11, 9276, 1131, 101130, 66, 23, 102114, 64, 23, 65, 15, 64, 103992, 69, 101252, 65, 100067, 7628, 19, 64, 121919, 101474, 22, 3632, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220] ... [340, 262, 220, 99698, 7472, 419, 23163, 980, 492, 450, 52279, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 102340, 13056, 12688, 25, 86102, 2285, 52279, 11711, 345, 262, 220, 101961, 13056, 35647, 25, 86102, 2285, 52279, 22159, 345, 262, 220, 102088, 13056, 3886, 2323, 25, 364, 30901, 751, 262, 220, 101723, 13056, 43850, 25, 364, 16, 1767, 751, 262, 220, 100461, 13056, 91174, 25, 364, 1056, 42503, 751, 262, 220, 101562, 7472, 17804, 262, 220, 101655, 1572, 262, 220, 100933, 7472, 442, 15621, 11586, 35884, 11162, 320, 1958, 86227, 304, 4499, 23044, 340, 262, 220, 101474, 7472, 419, 23163, 980, 492, 450, 52279, 42294, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 99200, 13056, 12688, 25, 86102, 2285, 52279, 11711, 345, 262, 220, 102624, 13056, 91174, 25, 364, 1056, 42503, 751, 262, 220, 102501, 13056, 442, 2308, 3886, 481, 31746, 369, 3842, 3668, 21265, 198, 262, 220, 102721, 7472, 17804, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 17082, 290, 2504, 481, 4933, 501, 1467, 448, 6176, 11162, 389, 1790, 1555, 198, 262, 220, 101917, 7472, 442, 2213, 1178, 374, 738, 817, 30149, 4566, 3141, 3798, 198, 262, 220, 102486, 7472, 419, 23163, 980, 492, 4208, 290, 9421, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 101729, 13056, 442, 5351, 9219, 481, 2213, 738, 4566, 63561, 3798, 8739, 3798, 198, 262, 220, 102573, 7472, 17804, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 442, 53873, 35747, 481, 4933, 501, 1467, 1290, 1283, 11057, 1467, 320, 23995, 1555, 340, 262, 220, 103319, 7472, 442, 2213, 1178, 374, 738, 817, 30149, 4566, 3141, 3798, 198, 262, 220, 103302, 7472, 419, 23163, 980, 492, 4208, 290, 23568, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 102636, 13056, 442, 5351, 9219, 481, 2213, 738, 4566, 63561, 3798, 8739, 3798, 198, 262, 220, 101411, 7472, 17804, 262, 220, 101478, 1572, 262, 220, 102952, 7472, 442, 15807, 24983, 20396, 320, 36143, 11, 1034, 3398, 11, 8127, 3271, 340, 262, 220, 101840, 7472, 442, 7036, 25, 2213, 1178, 374, 738, 817, 30149, 4566, 3141, 3798, 198, 262, 220, 103093, 7472, 419, 23163, 980, 492, 5490, 17750, 13246, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 100096, 13056, 442, 5351, 9219, 481, 3151, 2213, 70752, 738, 4566, 63561, 3798, 8739, 3798, 198, 262, 220, 103437, 7472, 17804, 262, 220, 102650, 1572, 262, 220, 103388, 7472, 442, 33925, 3210, 20396, 198, 262, 220, 103498, 7472, 419, 23163, 980, 492, 36143, 17750, 13246, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 100899, 13056, 442, 37176, 9248, 4566, 3141, 3798, 369, 24108, 198, 262, 220, 102269, 7472, 17804, 262, 220, 102114, 1572, 262, 220, 100928, 7472, 442, 2887, 3398, 20396, 198, 262, 220, 102626, 7472, 419, 23163, 980, 492, 1192, 17750, 13246, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 99695, 13056, 442, 37176, 9248, 4566, 3141, 3798, 369, 24108, 198, 262, 220, 104340, 7472, 17804, 262, 220, 104160, 1572, 262, 220, 104127, 7472, 442, 27971, 3271, 20396, 198, 262, 220, 104029, 7472, 419, 23163, 980, 492, 17414, 17750, 13246, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 102284, 13056, 442, 37176, 9248, 4566, 3141, 3798, 369, 24108, 198, 262, 220, 102807, 7472, 17804, 262, 220, 103878, 2760, 456, 262, 220, 101252, 1572, 262, 220, 103502, 2760, 1554, 262, 220, 100067, 9356, 353, 2126, 264, 20187, 943, 553, 1376, 624, 262, 220, 104327, 9356, 735, 262, 220, 103825, 2760, 633, 5808, 1592, 25, 914, 1648, 55008, 1979, 9406, 19394, 929, 760, 5614, 341, 262, 220, 103946, 7472, 470, 419, 23163, 670, 5808, 1592, 317, 262, 220, 103992, 2760, 456, 262, 220, 101804, 1572, 262, 220, 102487, 2760, 1554, 262, 220, 103205, 9356, 353, 20509, 47286, 311, 458, 6440, 624, 262, 220, 101663, 9356, 735, 262, 220, 100809, 2760, 738, 35053, 804, 1006, 256, 220, 99457, 7472, 6440, 25, 55008, 1979, 9406, 345, 256, 220, 107609, 7472, 943, 1592, 25, 914, 345, 256, 220, 109871, 7472, 2606, 25, 55008, 22383, 7614, 3798, 19499, 256, 220, 110248, 2760, 16497, 737, 341, 256, 220, 109803, 7472, 733, 943, 284, 419, 23163, 670, 5808, 1592, 317, 256, 220, 108345, 7472, 421, 320, 1313, 8, 341, 256, 220, 109626, 13056, 6440, 980, 35053, 804, 5808, 11, 2606, 317, 256, 220, 110733, 7472, 456, 256, 220, 108479, 2760, 456, 256, 220, 110610, 1572, 256, 220, 104550, 2760, 1554, 256, 220, 111659, 9356, 353, 12017, 678, 47286, 504, 279, 4541, 6440, 624, 256, 220, 110800, 9356, 735, 256, 220, 114240, 2760, 2797, 2403, 56581, 4820, 55008, 1979, 9406, 1648, 737, 341, 256, 220, 114365, 7472, 733, 2169, 9406, 284, 6440, 9411, 55008, 19227, 13607, 1178, 9406, 280, 256, 220, 111508, 7472, 421, 320, 5657, 9406, 8, 341, 256, 220, 114495, 13056, 369, 320, 1024, 943, 315, 419, 23163, 10651, 2140, 341, 256, 220, 114959, 20789, 2169, 9406, 980, 35053, 804, 5808, 11, 41957, 256, 220, 112891, 13056, 456, 256, 220, 112114, 7472, 456, 256, 220, 103005, 2760, 456, 256, 220, 116045, 1572, 256, 220, 115760, 2760, 1554, 256, 220, 108714, 9356, 353, 12017, 264, 3151, 20187, 943, 624, 256, 220, 115878, 9356, 735, 256, 220, 109641, 2760, 2797, 56581, 25, 55008, 1979, 9406, 11, 943, 1592, 25, 914, 1648, 737, 341, 256, 220, 114062, 7472, 733, 943, 284, 419, 23163, 670, 5808, 1592, 317, 256, 220, 115925, 7472, 421, 320, 1313, 8, 341, 256, 220, 109295, 13056, 6440, 980, 35053, 804, 5808, 11, 41957, 256, 220, 117305, 7472, 456, 256, 220, 106464, 2760, 456, 256, 220, 118901, 1572, 256, 220, 118843, 2760, 1554, 256, 220, 117055, 9356, 353, 16455, 678, 20187, 4494, 13, 7143, 389, 8892, 409, 22941, 624, 256, 220, 119338, 9356, 735, 256, 220, 112840, 2760, 27299, 4555, 737, 341, 256, 220, 116996, 7472, 369, 320, 1024, 943, 315, 419, 23163, 10651, 2140, 341, 256, 220, 118558, 13056, 943, 30894, 543, 256, 220, 115547, 7472, 456, 256, 220, 117933, 7472, 419, 23163, 7426, 543, 256, 220, 108157, 2760, 456, 256, 220, 122804, 197, 532, 256, 220, 122866, 1572, 256, 220, 121498, 1572, 256, 220, 118836, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:02:11 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 586, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +646,1044859,"TERMINAL",0,0,"[2026-01-05 16:02:11 TP0] Prefill batch, #new-seq: 1, #new-token: 6010, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +647,1045247,"TERMINAL",0,0,"[2026-01-05 16:02:12] Finish: obj=GenerateReqInput(validation_time=1.5701167285442352e-05, received_time=1767625331.2627752, received_time_perf=2480215.935700431, rid='76aa3495e2bd45429cfed7a56ac83cbd', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... 'rAll(editor?: vscode.TextEditor): void {\n 114\t const targetEditor = editor ?? vscode.window.activeTextEditor;\n 115\t if (targetEditor) {\n 116\t for (const type of this.types.values()) {\n 117\t targetEditor.setDecorations(type, []);\n 118\t }\n 119\t }\n 120\t }\n 121\t\n 122\t /**\n 123\t * Clear a specific decoration type.\n 124\t */\n 125\t clear(editor: vscode.TextEditor, typeKey: string): void {\n 126\t const type = this.types.get(typeKey);\n 127\t if (type) {\n 128\t editor.setDecorations(type, []);\n 129\t }\n 130\t }\n 131\t\n 132\t /**\n 133\t * Dispose all decoration types. Call on extension deactivation.\n 134\t */\n 135\t dispose(): void {\n 136\t for (const type of this.types.values()) {\n 137\t type.dispose();\n 138\t }\n 139\t this.types.clear();\n 140\t }\n 141\t}\n 142\t\n 143\t\n 144\t\n/nothink<|assistant|>\n', input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 98360, 1572, 262, 220, 99146, 2760, 1554, 262, 220, 99241, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99619, 9356, 735, 262, 220, 99590, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99446, 7472, 419, 12389, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 2126, 279, 1482, 1917, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 633, 2512, 4555, 5586, 760, 845, 341, 262, 220, 101175, 7472, 470, 419, 12389, 280, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98668, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 18, 13, 100632, 19, 122250, 20, 100632, 99869, 18, 101474, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 15, 13, 22, 101663, 126173, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 20, 13, 101655, 16, 123006, 101474, 22, 11, 9276, 1131, 101130, 66, 23, 102114, 64, 23, 65, 15, 64, 103992, 69, 101252, 65, 100067, 7628, 19, 64, 121919, 101474, 22, 3632, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220] ... [340, 262, 220, 99698, 7472, 419, 23163, 980, 492, 450, 52279, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 102340, 13056, 12688, 25, 86102, 2285, 52279, 11711, 345, 262, 220, 101961, 13056, 35647, 25, 86102, 2285, 52279, 22159, 345, 262, 220, 102088, 13056, 3886, 2323, 25, 364, 30901, 751, 262, 220, 101723, 13056, 43850, 25, 364, 16, 1767, 751, 262, 220, 100461, 13056, 91174, 25, 364, 1056, 42503, 751, 262, 220, 101562, 7472, 17804, 262, 220, 101655, 1572, 262, 220, 100933, 7472, 442, 15621, 11586, 35884, 11162, 320, 1958, 86227, 304, 4499, 23044, 340, 262, 220, 101474, 7472, 419, 23163, 980, 492, 450, 52279, 42294, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 99200, 13056, 12688, 25, 86102, 2285, 52279, 11711, 345, 262, 220, 102624, 13056, 91174, 25, 364, 1056, 42503, 751, 262, 220, 102501, 13056, 442, 2308, 3886, 481, 31746, 369, 3842, 3668, 21265, 198, 262, 220, 102721, 7472, 17804, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 17082, 290, 2504, 481, 4933, 501, 1467, 448, 6176, 11162, 389, 1790, 1555, 198, 262, 220, 101917, 7472, 442, 2213, 1178, 374, 738, 817, 30149, 4566, 3141, 3798, 198, 262, 220, 102486, 7472, 419, 23163, 980, 492, 4208, 290, 9421, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 101729, 13056, 442, 5351, 9219, 481, 2213, 738, 4566, 63561, 3798, 8739, 3798, 198, 262, 220, 102573, 7472, 17804, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 442, 53873, 35747, 481, 4933, 501, 1467, 1290, 1283, 11057, 1467, 320, 23995, 1555, 340, 262, 220, 103319, 7472, 442, 2213, 1178, 374, 738, 817, 30149, 4566, 3141, 3798, 198, 262, 220, 103302, 7472, 419, 23163, 980, 492, 4208, 290, 23568, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 102636, 13056, 442, 5351, 9219, 481, 2213, 738, 4566, 63561, 3798, 8739, 3798, 198, 262, 220, 101411, 7472, 17804, 262, 220, 101478, 1572, 262, 220, 102952, 7472, 442, 15807, 24983, 20396, 320, 36143, 11, 1034, 3398, 11, 8127, 3271, 340, 262, 220, 101840, 7472, 442, 7036, 25, 2213, 1178, 374, 738, 817, 30149, 4566, 3141, 3798, 198, 262, 220, 103093, 7472, 419, 23163, 980, 492, 5490, 17750, 13246, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 100096, 13056, 442, 5351, 9219, 481, 3151, 2213, 70752, 738, 4566, 63561, 3798, 8739, 3798, 198, 262, 220, 103437, 7472, 17804, 262, 220, 102650, 1572, 262, 220, 103388, 7472, 442, 33925, 3210, 20396, 198, 262, 220, 103498, 7472, 419, 23163, 980, 492, 36143, 17750, 13246, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 100899, 13056, 442, 37176, 9248, 4566, 3141, 3798, 369, 24108, 198, 262, 220, 102269, 7472, 17804, 262, 220, 102114, 1572, 262, 220, 100928, 7472, 442, 2887, 3398, 20396, 198, 262, 220, 102626, 7472, 419, 23163, 980, 492, 1192, 17750, 13246, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 99695, 13056, 442, 37176, 9248, 4566, 3141, 3798, 369, 24108, 198, 262, 220, 104340, 7472, 17804, 262, 220, 104160, 1572, 262, 220, 104127, 7472, 442, 27971, 3271, 20396, 198, 262, 220, 104029, 7472, 419, 23163, 980, 492, 17414, 17750, 13246, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 102284, 13056, 442, 37176, 9248, 4566, 3141, 3798, 369, 24108, 198, 262, 220, 102807, 7472, 17804, 262, 220, 103878, 2760, 456, 262, 220, 101252, 1572, 262, 220, 103502, 2760, 1554, 262, 220, 100067, 9356, 353, 2126, 264, 20187, 943, 553, 1376, 624, 262, 220, 104327, 9356, 735, 262, 220, 103825, 2760, 633, 5808, 1592, 25, 914, 1648, 55008, 1979, 9406, 19394, 929, 760, 5614, 341, 262, 220, 103946, 7472, 470, 419, 23163, 670, 5808, 1592, 317, 262, 220, 103992, 2760, 456, 262, 220, 101804, 1572, 262, 220, 102487, 2760, 1554, 262, 220, 103205, 9356, 353, 20509, 47286, 311, 458, 6440, 624, 262, 220, 101663, 9356, 735, 262, 220, 100809, 2760, 738, 35053, 804, 1006, 256, 220, 99457, 7472, 6440, 25, 55008, 1979, 9406, 345, 256, 220, 107609, 7472, 943, 1592, 25, 914, 345, 256, 220, 109871, 7472, 2606, 25, 55008, 22383, 7614, 3798, 19499, 256, 220, 110248, 2760, 16497, 737, 341, 256, 220, 109803, 7472, 733, 943, 284, 419, 23163, 670, 5808, 1592, 317, 256, 220, 108345, 7472, 421, 320, 1313, 8, 341, 256, 220, 109626, 13056, 6440, 980, 35053, 804, 5808, 11, 2606, 317, 256, 220, 110733, 7472, 456, 256, 220, 108479, 2760, 456, 256, 220, 110610, 1572, 256, 220, 104550, 2760, 1554, 256, 220, 111659, 9356, 353, 12017, 678, 47286, 504, 279, 4541, 6440, 624, 256, 220, 110800, 9356, 735, 256, 220, 114240, 2760, 2797, 2403, 56581, 4820, 55008, 1979, 9406, 1648, 737, 341, 256, 220, 114365, 7472, 733, 2169, 9406, 284, 6440, 9411, 55008, 19227, 13607, 1178, 9406, 280, 256, 220, 111508, 7472, 421, 320, 5657, 9406, 8, 341, 256, 220, 114495, 13056, 369, 320, 1024, 943, 315, 419, 23163, 10651, 2140, 341, 256, 220, 114959, 20789, 2169, 9406, 980, 35053, 804, 5808, 11, 41957, 256, 220, 112891, 13056, 456, 256, 220, 112114, 7472, 456, 256, 220, 103005, 2760, 456, 256, 220, 116045, 1572, 256, 220, 115760, 2760, 1554, 256, 220, 108714, 9356, 353, 12017, 264, 3151, 20187, 943, 624, 256, 220, 115878, 9356, 735, 256, 220, 109641, 2760, 2797, 56581, 25, 55008, 1979, 9406, 11, 943, 1592, 25, 914, 1648, 737, 341, 256, 220, 114062, 7472, 733, 943, 284, 419, 23163, 670, 5808, 1592, 317, 256, 220, 115925, 7472, 421, 320, 1313, 8, 341, 256, 220, 109295, 13056, 6440, 980, 35053, 804, 5808, 11, 41957, 256, 220, 117305, 7472, 456, 256, 220, 106464, 2760, 456, 256, 220, 118901, 1572, 256, 220, 118843, 2760, 1554, 256, 220, 117055, 9356, 353, 16455, 678, 20187, 4494, 13, 7143, 389, 8892, 409, 22941, 624, 256, 220, 119338, 9356, 735, 256, 220, 112840, 2760, 27299, 4555, 737, 341, 256, 220, 116996, 7472, 369, 320, 1024, 943, 315, 419, 23163, 10651, 2140, 341, 256, 220, 118558, 13056, 943, 30894, 543, 256, 220, 115547, 7472, 456, 256, 220, 117933, 7472, 419, 23163, 7426, 543, 256, 220, 108157, 2760, 456, 256, 220, 122804, 197, 532, 256, 220, 122866, 1572, 256, 220, 121498, 1572, 256, 220, 118836, 1572, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/decorations.ts | sed -n '1,11p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336], 'meta_info': {'id': '76aa3495e2bd45429cfed7a56ac83cbd', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 14788, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-2.455681169521995e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.02668575756251812, 4616, 'cat'), (-0.005620746873319149, 481, ' -'), (0.0, 77, 'n'), (-1.4305104514278355e-06, 608, ' /'), (-1.1920928244535389e-07, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-3.576278118089249e-07, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.00040391870425082743, 13428, '/src'), (-0.0014604389434680343, 14, '/'), (-0.0005656072753481567, 27082, 'preview'), (-0.38959401845932007, 22490, '/de'), (-0.039408642798662186, 6005, 'cor'), (-0.0014074668288230896, 804, 'ations'), (-0.00033849707688204944, 21239, '.ts'), (-0.00027366707217879593, 760, ' |'), (-6.460934673668817e-05, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (-3.576278118089249e-07, 364, "" '""), (-1.9046835899353027, 16, '1'), (-4.768370445162873e-07, 11, ','), (-1.6508008241653442, 98965, '11'), (-1.1920928244535389e-07, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.00032360086333937943, 73022, '```'), (-1.8596476365928538e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 586, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 0.7526454925537109, 'response_sent_to_client_ts': 1767625332.0154748}}\r\n[2026-01-05 16:02:12] INFO: 10.86.2.252:52842 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +648,1137943,"src/preview/decorations.ts",107,0,"",typescript,selection_command +649,1138017,"TERMINAL",0,0,"[2026-01-05 16:03:44] Receive: obj=GenerateReqInput(validation_time=2.4206005036830902e-05, received_time=1767625424.7314637, received_time_perf=2480309.404389386, rid='47e2b38e4e2e4469b50258b7a5638a09', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101294, 22, 24, 99082, 126382, 101474, 22, 23, 100702, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 15, 13, 100809, 18, 23, 99951, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 20, 13, 101478, 21, 100899, 17, 19, 99317, 11, 9276, 1131, 20, 100557, 99916, 18, 65, 100461, 16, 67, 99367, 64, 99695, 2577, 20, 12502, 24, 87358, 19, 64, 24, 100933, 18, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [220, 110248, 2760, 16497, 737, 341, 256, 220, 109803, 7472, 733, 943, 284, 419, 23163, 670, 5808, 1592, 317, 256, 220, 108345, 7472, 421, 320, 1313, 8, 341, 256, 220, 109626, 13056, 6440, 980, 35053, 804, 5808, 11, 2606, 317, 256, 220, 110733, 7472, 456, 256, 220, 108479, 2760, 456, 256, 220, 110610, 1572, 256, 220, 104550, 2760, 1554, 256, 220, 111659, 9356, 353, 12017, 678, 47286, 504, 279, 4541, 6440, 624, 256, 220, 110800, 9356, 735, 256, 220, 114240, 2760, 2797, 2403, 56581, 4820, 55008, 1979, 9406, 1648, 737, 341, 256, 220, 114365, 7472, 733, 2169, 9406, 284, 6440, 9411, 55008, 19227, 13607, 1178, 9406, 280, 256, 220, 111508, 7472, 421, 320, 5657, 9406, 8, 341, 256, 220, 114495, 13056, 369, 320, 1024, 943, 315, 419, 23163, 10651, 2140, 341, 256, 220, 114959, 20789, 2169, 9406, 980, 35053, 804, 5808, 11, 41957, 256, 220, 112891, 13056, 456, 256, 220, 112114, 7472, 456, 256, 220, 103005, 2760, 456, 256, 220, 116045, 1572, 256, 220, 115760, 2760, 1554, 256, 220, 108714, 9356, 353, 12017, 264, 3151, 20187, 943, 624, 256, 220, 115878, 9356, 735, 256, 220, 109641, 2760, 2797, 56581, 25, 55008, 1979, 9406, 11, 943, 1592, 25, 914, 1648, 737, 341, 256, 220, 114062, 7472, 733, 943, 284, 419, 23163, 670, 5808, 1592, 317, 256, 220, 115925, 7472, 421, 320, 1313, 8, 341, 256, 220, 109295, 13056, 6440, 980, 35053, 804, 5808, 11, 41957, 256, 220, 117305, 7472, 456, 256, 220, 106464, 2760, 456, 256, 220, 118901, 1572, 256, 220, 118843, 2760, 1554, 256, 220, 117055, 9356, 353, 16455, 678, 20187, 4494, 13, 7143, 389, 8892, 409, 22941, 624, 256, 220, 119338, 9356, 735, 256, 220, 112840, 2760, 27299, 4555, 737, 341, 256, 220, 116996, 7472, 369, 320, 1024, 943, 315, 419, 23163, 10651, 2140, 341, 256, 220, 118558, 13056, 943, 30894, 543, 256, 220, 115547, 7472, 456, 256, 220, 117933, 7472, 419, 23163, 7426, 543, 256, 220, 108157, 2760, 456, 256, 220, 122804, 197, 532, 256, 220, 122866, 1572, 256, 220, 121498, 1572, 256, 220, 118836, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100096, 114495, 22, 99869, 20, 101723, 122406, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100702, 16, 13, 99916, 17, 22, 100899, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99146, 20, 13, 24, 100235, 106748, 19, 100557, 11, 9276, 1131, 102269, 5305, 101135, 24, 20, 68, 17, 8937, 100461, 19, 100104, 9787, 291, 22, 64, 101917, 580, 104127, 89998, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:03:44 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 594, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +650,1138236,"TERMINAL",0,0,"[2026-01-05 16:03:45] Receive: obj=GenerateReqInput(validation_time=1.9561033695936203e-05, received_time=1767625424.973887, received_time_perf=2480309.646812319, rid='179e31123a5d4e389000aa7139d80bda', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99243, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 19, 100557, 102114, 21, 101140, 19, 20, 100928, 101655, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 23, 13, 23, 100461, 19, 101562, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99317, 18, 13, 20, 99243, 101140, 17, 20, 101294, 11, 9276, 1131, 66, 101130, 22, 99695, 17, 65, 123886, 66, 19, 69, 21, 101252, 67, 102807, 68, 22, 100809, 64, 22, 65, 99916, 20, 100539, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11] ... [23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 16, 11, 99317, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 1572, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100441, 25, 101723, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 125603, 103306, 99200, 18, 101840, 18, 15, 100067, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101961, 19, 13, 22, 100557, 101562, 18, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99064, 24, 13, 99698, 19, 100919, 24, 100919, 21, 11, 9276, 1131, 101655, 68, 17, 65, 100919, 68, 19, 68, 17, 68, 19, 101562, 24, 65, 99200, 17, 101729, 65, 22, 64, 101917, 18, 23, 64, 100614, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +651,1138315,"TERMINAL",0,0,"[2026-01-05 16:03:45 TP0] Prefill batch, #new-seq: 1, #new-token: 5845, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +652,1138516,"TERMINAL",0,0,"[2026-01-05 16:03:45 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.03, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +653,1138952,"TERMINAL",0,0,"[2026-01-05 16:03:45 TP0] Prefill batch, #new-seq: 1, #new-token: 5830, #cached-token: 0, token usage: 0.05, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +654,1139330,"TERMINAL",0,0,"[2026-01-05 16:03:46] Finish: obj=GenerateReqInput(validation_time=2.4206005036830902e-05, received_time=1767625424.7314637, received_time_perf=2480309.404389386, rid='47e2b38e4e2e4469b50258b7a5638a09', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.5701167285442352e-05, received_time=1767625331.2627752, received_time_perf=2480215.935700431, rid='76aa3495e2bd45429cfed7a56ac83cbd', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101294, 22, 24, 99082, 126382, 101474, 22, 23, 100702, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 15, 13, 100809, 18, 23, 99951, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99082, 20, 13, 101478, 21, 100899, 17, 19, 99317, 11, 9276, 1131, 20, 100557, 99916, 18, 65, 100461, 16, 67, 99367, 64, 99695, 2577, 20, 12502, 24, 87358, 19, 64, 24, 100933, 18, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [220, 110248, 2760, 16497, 737, 341, 256, 220, 109803, 7472, 733, 943, 284, 419, 23163, 670, 5808, 1592, 317, 256, 220, 108345, 7472, 421, 320, 1313, 8, 341, 256, 220, 109626, 13056, 6440, 980, 35053, 804, 5808, 11, 2606, 317, 256, 220, 110733, 7472, 456, 256, 220, 108479, 2760, 456, 256, 220, 110610, 1572, 256, 220, 104550, 2760, 1554, 256, 220, 111659, 9356, 353, 12017, 678, 47286, 504, 279, 4541, 6440, 624, 256, 220, 110800, 9356, 735, 256, 220, 114240, 2760, 2797, 2403, 56581, 4820, 55008, 1979, 9406, 1648, 737, 341, 256, 220, 114365, 7472, 733, 2169, 9406, 284, 6440, 9411, 55008, 19227, 13607, 1178, 9406, 280, 256, 220, 111508, 7472, 421, 320, 5657, 9406, 8, 341, 256, 220, 114495, 13056, 369, 320, 1024, 943, 315, 419, 23163, 10651, 2140, 341, 256, 220, 114959, 20789, 2169, 9406, 980, 35053, 804, 5808, 11, 41957, 256, 220, 112891, 13056, 456, 256, 220, 112114, 7472, 456, 256, 220, 103005, 2760, 456, 256, 220, 116045, 1572, 256, 220, 115760, 2760, 1554, 256, 220, 108714, 9356, 353, 12017, 264, 3151, 20187, 943, 624, 256, 220, 115878, 9356, 735, 256, 220, 109641, 2760, 2797, 56581, 25, 55008, 1979, 9406, 11, 943, 1592, 25, 914, 1648, 737, 341, 256, 220, 114062, 7472, 733, 943, 284, 419, 23163, 670, 5808, 1592, 317, 256, 220, 115925, 7472, 421, 320, 1313, 8, 341, 256, 220, 109295, 13056, 6440, 980, 35053, 804, 5808, 11, 41957, 256, 220, 117305, 7472, 456, 256, 220, 106464, 2760, 456, 256, 220, 118901, 1572, 256, 220, 118843, 2760, 1554, 256, 220, 117055, 9356, 353, 16455, 678, 20187, 4494, 13, 7143, 389, 8892, 409, 22941, 624, 256, 220, 119338, 9356, 735, 256, 220, 112840, 2760, 27299, 4555, 737, 341, 256, 220, 116996, 7472, 369, 320, 1024, 943, 315, 419, 23163, 10651, 2140, 341, 256, 220, 118558, 13056, 943, 30894, 543, 256, 220, 115547, 7472, 456, 256, 220, 117933, 7472, 419, 23163, 7426, 543, 256, 220, 108157, 2760, 456, 256, 220, 122804, 197, 532, 256, 220, 122866, 1572, 256, 220, 121498, 1572, 256, 220, 118836, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100096, 114495, 22, 99869, 20, 101723, 122406, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100702, 16, 13, 99916, 17, 22, 100899, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99146, 20, 13, 24, 100235, 106748, 19, 100557, 11, 9276, 1131, 102269, 5305, 101135, 24, 20, 68, 17, 8937, 100461, 19, 100104, 9787, 291, 22, 64, 101917, 580, 104127, 89998, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/decorations.ts | sed -n '145,165p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 117721, 11, 116768, 79, 1248, 73022, 151336], 'meta_info': {'id': '47e2b38e4e2e4469b50258b7a5638a09', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 14631, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-1.2040065485052764e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.0526372566819191, 4616, 'cat'), (-0.003937350586056709, 481, ' -'), (0.0, 77, 'n'), (-9.536738616588991e-07, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.0728830375228426e-06, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0003363520372658968, 13428, '/src'), (-0.0011829291470348835, 14, '/'), (-0.0012635351158678532, 27082, 'preview'), (-0.3105050027370453, 22490, '/de'), (-0.039404746145009995, 6005, 'cor'), (-0.0016972911544144154, 804, 'ations'), (-0.0006952252588234842, 21239, '.ts'), (-0.00015925093612167984, 760, ' |'), (-1.0132738680113107e-05, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (0.0, 77, 'n'), (-3.576278118089249e-07, 364, "" '""), (-3.0037879943847656, 117721, '145'), (-0.0011329191038385034, 11, ','), (-0.1594594568014145, 116768, '165'), (-1.1920928244535389e-07, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.00032360086333937943, 73022, '```'), (-6.48477507638745e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 594, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.3084492683410645, 'response_sent_to_client_ts': 1767625426.0400193}}\r\n[2026-01-05 16:03:46] Finish: obj=GenerateReqInput(validation_time=1.9561033695936203e-05, received_time=1767625424.973887, received_time_perf=2480309.646812319, rid='179e31123a5d4e389000aa7139d80bda', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=2.4206005036830902e-05, received_time=1767625424.7314637, received_time_perf=2480309.404389386, rid='47e2b38e4e2e4469b50258b7a5638a09', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99243, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 19, 100557, 102114, 21, 101140, 19, 20, 100928, 101655, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 23, 13, 23, 100461, 19, 101562, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99317, 18, 13, 20, 99243, 101140, 17, 20, 101294, 11, 9276, 1131, 66, 101130, 22, 99695, 17, 65, 123886, 66, 19, 69, 21, 101252, 67, 102807, 68, 22, 100809, 64, 22, 65, 99916, 20, 100539, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11] ... [23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 16, 11, 99317, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 1572, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100441, 25, 101723, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 125603, 103306, 99200, 18, 101840, 18, 15, 100067, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101961, 19, 13, 22, 100557, 101562, 18, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99064, 24, 13, 99698, 19, 100919, 24, 100919, 21, 11, 9276, 1131, 101655, 68, 17, 65, 100919, 68, 19, 68, 17, 68, 19, 101562, 24, 65, 99200, 17, 101729, 65, 22, 64, 101917, 18, 23, 64, 100614, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/decorations.ts | sed -n '17,32p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 99419, 11, 101175, 79, 1248, 73022, 151336], 'meta_info': {'id': '179e31123a5d4e389000aa7139d80bda', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 14618, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-1.8954096958623268e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.1043323203921318, 4616, 'cat'), (-0.004726072307676077, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-1.764281842042692e-05, 608, ' /'), (-5.960462772236497e-07, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (-1.1920928244535389e-07, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-2.13382354559144e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-5.960462772236497e-07, 79888, '-extension'), (-0.005720553454011679, 13428, '/src'), (-0.0015330478781834245, 14, '/'), (-0.033314790576696396, 27082, 'preview'), (-0.12740851938724518, 22490, '/de'), (-0.0007929041748866439, 6005, 'cor'), (-0.0005743046058341861, 804, 'ations'), (-1.6689160474925302e-05, 21239, '.ts'), (-0.000553335587028414, 760, ' |'), (-1.2278481335670222e-05, 10918, ' sed'), (0.0, 481, ' -'), (-1.311301275563892e-06, 77, 'n'), (-4.768370445162873e-07, 364, "" '""), (-0.6718662977218628, 99419, '17'), (-1.6689286894688848e-06, 11, ','), (-0.6657909750938416, 101175, '32'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.00032360086333937943, 73022, '```'), (-6.48477507638745e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.6923076923076923, 'spec_accept_length': 3.076923076923077, 'spec_verify_ct': 13, 'spec_accept_token_num': 27, 'spec_draft_token_num': 39, 'e2e_latency': 1.0819220542907715, 'response_sent_to_client_ts': 1767625426.0558782}}\r\n[2026-01-05 16:03:46] INFO: 10.86.2.252:47580 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +655,1162840,"src/preview/decorations.ts",892,0,"",typescript,selection_command +656,1162928,"TERMINAL",0,0,"[2026-01-05 16:04:09] Receive: obj=GenerateReqInput(validation_time=1.950608566403389e-05, received_time=1767625449.6404488, received_time_perf=2480334.313374401, rid='2c9447c290094572acd70e81f4dbb0a1', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98729, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101917, 22, 99951, 21, 98729, 19, 103093, 16, 101411, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 24, 13, 100372, 16, 99916, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99317, 18, 13, 22, 101723, 98729, 19, 99200, 24, 11, 9276, 1131, 17, 823, 3235, 12502, 102486, 1999, 103498, 67, 15, 64, 104340, 67, 4385, 17, 68, 18, 65, 22, 68, 24, 100702, 98668, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100441, 25, 100461, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101804, 21, 110248, 100632, 24, 102573, 18, 21, 98360, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101961, 19, 13, 103205, 18, 101252, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99064, 24, 13, 21, 101562, 23, 98886, 18, 98729, 11, 9276, 1131, 126612, 68, 18, 98965, 99619, 64, 20, 67, 19, 68, 100919, 24, 99752, 5305, 22, 99366, 24, 67, 99695, 65, 3235, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:09 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +657,1163112,"TERMINAL",0,0,"[2026-01-05 16:04:09] Receive: obj=GenerateReqInput(validation_time=1.5333760529756546e-05, received_time=1767625449.871383, received_time_perf=2480334.544308378, rid='2264f31127e840e081a02f37abc640c9', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100702, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 110610, 124212, 103919, 23, 99618, 99698, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100104, 18, 13, 121743, 18, 99951, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99419, 22, 13, 23, 101175, 99446, 17, 22, 98965, 11, 9276, 1131, 101175, 65, 20, 99619, 100067, 64, 23, 99243, 19, 65, 24, 69, 104160, 8901, 99146, 69, 101917, 18, 21, 68, 100096, 24, 68, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220] ... [126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 99367, 11, 101135, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 262, 220, 99590, 1572, 262, 220, 99446, 2760, 442, 5586, 18872, 58068, 198, 262, 220, 99916, 2760, 15010, 25, 501, 55008, 69337, 1636, 492, 36143, 13, 51726, 19539, 3299, 262, 220, 99951, 2760, 1034, 16819, 25, 501, 55008, 69337, 1636, 492, 1318, 3939, 54205, 1951, 3299, 262, 220, 99869, 2760, 8127, 9855, 25, 501, 55008, 69337, 1636, 492, 8866, 14533, 54205, 1951, 3299, 262, 220, 100104, 197, 2440, 262, 220, 99064, 1572, 262, 220, 100557, 197, 1747, 262, 220, 101175, 197, 353, 22667, 315, 61139, 20187, 4494, 311, 5648, 4938, 36505, 323, 28248, 4671, 624, 262, 220, 100702, 197, 353, 4290, 3055, 11, 68244, 369, 678, 55770, 624, 262, 220, 101135, 197, 735, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100614, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 24, 99200, 99618, 23, 20, 101478, 99698, 18, 100919, 24, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101723, 24, 13, 21, 99698, 19, 100933, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 18, 19, 13, 18, 99366, 101140, 19, 99698, 16, 11, 9276, 1131, 17, 66, 24, 101723, 22, 66, 124484, 100614, 19, 102486, 17, 93322, 100096, 68, 104340, 69, 19, 1999, 65, 15, 64, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +658,1163254,"TERMINAL",0,0,"[2026-01-05 16:04:09 TP0] Prefill batch, #new-seq: 1, #new-token: 5827, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 1, \r\n",,terminal_output +659,1163554,"TERMINAL",0,0,"[2026-01-05 16:04:10 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.03, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +660,1163984,"TERMINAL",0,0,"[2026-01-05 16:04:10 TP0] Prefill batch, #new-seq: 1, #new-token: 5932, #cached-token: 0, token usage: 0.05, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +661,1164086,"TERMINAL",0,0,"[2026-01-05 16:04:10 TP0] Decode batch, #running-req: 2, #token: 28785, token usage: 0.06, accept len: 3.29, accept rate: 0.82, cuda graph: True, gen throughput (token/s): 1.58, #queue-req: 0, \r\n",,terminal_output +662,1164252,"TERMINAL",0,0,"[2026-01-05 16:04:10] Finish: obj=GenerateReqInput(validation_time=1.950608566403389e-05, received_time=1767625449.6404488, received_time_perf=2480334.313374401, rid='2c9447c290094572acd70e81f4dbb0a1', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.9561033695936203e-05, received_time=1767625424.973887, received_time_perf=2480309.646812319, rid='179e31123a5d4e389000aa7139d80bda', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 98729, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101917, 22, 99951, 21, 98729, 19, 103093, 16, 101411, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99951, 24, 13, 100372, 16, 99916, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99317, 18, 13, 22, 101723, 98729, 19, 99200, 24, 11, 9276, 1131, 17, 823, 3235, 12502, 102486, 1999, 103498, 67, 15, 64, 104340, 67, 4385, 17, 68, 18, 65, 22, 68, 24, 100702, 98668, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100441, 25, 100461, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101804, 21, 110248, 100632, 24, 102573, 18, 21, 98360, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101961, 19, 13, 103205, 18, 101252, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99064, 24, 13, 21, 101562, 23, 98886, 18, 98729, 11, 9276, 1131, 126612, 68, 18, 98965, 99619, 64, 20, 67, 19, 68, 100919, 24, 99752, 5305, 22, 99366, 24, 67, 99695, 65, 3235, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/decorations.ts | sed -n '17,30p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 99419, 11, 99064, 79, 1248, 73022, 151336], 'meta_info': {'id': '2c9447c290094572acd70e81f4dbb0a1', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 14615, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-6.079655122448457e-06, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.0923873633146286, 4616, 'cat'), (-0.004716936498880386, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-1.2040065485052764e-05, 608, ' /'), (-1.0728830375228426e-06, 5117, 'home'), (-1.1920928244535389e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.1205610462639015e-05, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-3.576278118089249e-07, 79888, '-extension'), (-0.0047800554893910885, 13428, '/src'), (-0.000931663322262466, 14, '/'), (-0.006831622216850519, 27082, 'preview'), (-0.1083030104637146, 22490, '/de'), (-0.00562845217064023, 6005, 'cor'), (-0.000568228424526751, 804, 'ations'), (-4.2199197196168825e-05, 21239, '.ts'), (-0.0005539313424378633, 760, ' |'), (-2.0265373677830212e-05, 10918, ' sed'), (0.0, 481, ' -'), (-6.318072337307967e-06, 77, 'n'), (-3.576278118089249e-07, 364, "" '""), (-0.6673319339752197, 99419, '17'), (-7.152555099310121e-07, 11, ','), (-1.2901575565338135, 99064, '30'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.00032360086333937943, 73022, '```'), (-2.2172682292875834e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.311964988708496, 'response_sent_to_client_ts': 1767625450.9526744}}\r\n[2026-01-05 16:04:10] Finish: obj=GenerateReqInput(validation_time=1.5333760529756546e-05, received_time=1767625449.871383, received_time_perf=2480334.544308378, rid='2264f31127e840e081a02f37abc640c9', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.950608566403389e-05, received_time=1767625449.6404488, received_time_perf=2480334.313374401, rid='2c9447c290094572acd70e81f4dbb0a1', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198,\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100702, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 110610, 124212, 103919, 23, 99618, 99698, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100104, 18, 13, 121743, 18, 99951, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99419, 22, 13, 23, 101175, 99446, 17, 22, 98965, 11, 9276, 1131, 101175, 65, 20, 99619, 100067, 64, 23, 99243, 19, 65, 24, 69, 104160, 8901, 99146, 69, 101917, 18, 21, 68, 100096, 24, 68, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220] ... [126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 99367, 11, 101135, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 262, 220, 99590, 1572, 262, 220, 99446, 2760, 442, 5586, 18872, 58068, 198, 262, 220, 99916, 2760, 15010, 25, 501, 55008, 69337, 1636, 492, 36143, 13, 51726, 19539, 3299, 262, 220, 99951, 2760, 1034, 16819, 25, 501, 55008, 69337, 1636, 492, 1318, 3939, 54205, 1951, 3299, 262, 220, 99869, 2760, 8127, 9855, 25, 501, 55008, 69337, 1636, 492, 8866, 14533, 54205, 1951, 3299, 262, 220, 100104, 197, 2440, 262, 220, 99064, 1572, 262, 220, 100557, 197, 1747, 262, 220, 101175, 197, 353, 22667, 315, 61139, 20187, 4494, 311, 5648, 4938, 36505, 323, 28248, 4671, 624, 262, 220, 100702, 197, 353, 4290, 3055, 11, 68244, 369, 678, 55770, 624, 262, 220, 101135, 197, 735, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100614, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 24, 99200, 99618, 23, 20, 101478, 99698, 18, 100919, 24, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101723, 24, 13, 21, 99698, 19, 100933, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 18, 19, 13, 18, 99366, 101140, 19, 99698, 16, 11, 9276, 1131, 17, 66, 24, 101723, 22, 66, 124484, 100614, 19, 102486, 17, 93322, 100096, 68, 104340, 69, 19, 1999, 65, 15, 64, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/decorations.ts | sed -n '34,54p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 101135, 11, 102856, 79, 1248, 73022, 151336], 'meta_info': {'id': '2264f31127e840e081a02f37abc640c9', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 14720, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-5.483612312673358e-06, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.02768450789153576, 4616, 'cat'), (-0.0032943999394774437, 481, ' -'), (0.0, 77, 'n'), (-1.5497195136049413e-06, 608, ' /'), (0.0, 5117, 'home'), (-3.576278118089249e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-1.1920922133867862e-06, 2899, '/c'), (0.0, 651, 'row'), (-1.1920928244535389e-07, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (-0.0013771107187494636, 13428, '/src'), (-0.00029297350556589663, 14, '/'), (-0.0011648305226117373, 27082, 'preview'), (-0.03163433447480202, 22490, '/de'), (-0.001353777595795691, 6005, 'cor'), (-0.0005629861843772233, 804, 'ations'), (-8.4638240878121e-06, 21239, '.ts'), (-4.410734163684538e-06, 760, ' |'), (-8.344646857949556e-07, 10918, ' sed'), (0.0, 481, ' -'), (-7.152555099310121e-07, 77, 'n'), (0.0, 364, "" '""), (-1.4593664407730103, 101135, '34'), (-7.152555099310121e-07, 11, ','), (-0.032682958990335464, 102856, '54'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-0.00018952481332235038, 73022, '```'), (-6.48477507638745e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 1.0811972618103027, 'response_sent_to_client_ts': 1767625450.954449}}\r\n[2026-01-05 16:04:10] INFO: 10.86.2.252:36648 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +663,1174680,"src/preview/decorations.ts",885,0,"",typescript,selection_command +664,1174912,"src/preview/decorations.ts",823,0,"",typescript,selection_command +665,1174950,"src/preview/decorations.ts",753,0,"",typescript,selection_command +666,1174967,"src/preview/decorations.ts",683,0,"",typescript,selection_command +667,1174985,"TERMINAL",0,0,"[2026-01-05 16:04:21] Receive: obj=GenerateReqInput(validation_time=2.786284312605858e-05, received_time=1767625461.7107995, received_time_perf=2480346.383725308, rid='a507c12c856b4b2a9277a2111640e6b0', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99698, 11, 99618, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99698, 2760, 456, 262, 220, 102340, 1572, 262, 220, 101961, 2760, 1554, 262, 220, 102088, 9356, 353, 39350, 7381, 9750, 3589, 624, 262, 220, 101723, 9356, 735, 262, 220, 100461, 2760, 3410, 25246, 33030, 4353, 1006, 262, 220, 101562, 7472, 2197, 25, 55008, 1979, 7524, 345, 262, 220, 101655, 7472, 2309, 25, 55008, 21900, 345, 262, 220, 100933, 7472, 2266, 25, 55008, 5337, 1056, 33030, 1972, 345, 262, 220, 101474, 7472, 3950, 25, 55008, 727, 23860, 198, 262, 220, 99200, 2760, 16497, 55008, 36025, 2077, 27, 11557, 1851, 5337, 1056, 33030, 852, 760, 55008, 5337, 1056, 33030, 1234, 20205, 341, 262, 220, 102624, 7472, 421, 1505, 574, 22086, 1369, 753, 574, 12389, 8, 341, 262, 220, 102501, 13056, 470, 5907, 262, 220, 102721, 7472, 456, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 8277, 3705, 10521, 5656, 908, 320, 1921, 52703, 340, 262, 220, 101917, 7472, 442, 1032, 63082, 525, 17583, 553, 47286, 311, 10272, 1473, 1128, 594, 1660, 11057, 198, 262, 220, 102486, 7472, 421, 320, 574, 12389, 36442, 4376, 364, 3587, 13771, 863, 341, 262, 220, 101729, 13056, 470, 5907, 262, 220, 102573, 7472, 456, 262, 220, 99618, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 18, 99366, 100235, 23, 100096, 24, 122250, 112891, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 15, 13, 100632, 23, 102807, 17, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 20, 13, 100590, 16, 100928, 22, 100461, 19, 11, 9276, 1131, 24, 100557, 23, 13225, 103093, 64, 15, 99064, 19, 65, 100614, 24, 100441, 19, 65, 23, 101562, 92482, 18, 68, 124399, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11] ... [220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100614, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100702, 101140, 21, 100002, 17, 24, 100899, 101411, 19, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101723, 24, 13, 23, 103437, 100919, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 18, 19, 13, 20, 101723, 123786, 18, 100928, 11, 9276, 1131, 99241, 21, 19, 69, 18, 98965, 99951, 68, 23, 99698, 68, 100562, 16, 64, 100772, 69, 101140, 13674, 21, 99698, 66, 24, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:21 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 616, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +668,1175013,"src/preview/decorations.ts",665,0,"",typescript,selection_command +669,1175052,"src/preview/decorations.ts",602,0,"",typescript,selection_command +670,1175084,"src/preview/decorations.ts",601,0,"",typescript,selection_command +671,1175104,"src/preview/decorations.ts",594,0,"",typescript,selection_command +672,1175136,"src/preview/decorations.ts",530,0,"",typescript,selection_command +673,1175180,"src/preview/decorations.ts",458,0,"",typescript,selection_command +674,1175209,"src/preview/decorations.ts",378,0,"",typescript,selection_command +675,1175244,"src/preview/decorations.ts",361,0,"",typescript,selection_command +676,1175268,"TERMINAL",0,0,"[2026-01-05 16:04:22 TP0] Prefill batch, #new-seq: 1, #new-token: 5914, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +677,1175268,"src/preview/decorations.ts",354,0,"",typescript,selection_command +678,1175312,"src/preview/decorations.ts",283,0,"",typescript,selection_command +679,1175416,"src/preview/decorations.ts",204,0,"",typescript,selection_command +680,1175586,"src/preview/decorations.ts",188,0,"",typescript,selection_command +681,1175626,"TERMINAL",0,0,"[2026-01-05 16:04:22] Receive: obj=GenerateReqInput(validation_time=2.025766298174858e-05, received_time=1767625462.3443887, received_time_perf=2480347.017314343, rid='6cde1cb097664b9490a841f012ea501b', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 22, 101478, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101130, 20, 100372, 20, 23, 101840, 102487, 19, 122414, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 15, 13, 22, 99698, 15, 99916, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 20, 13, 19, 98886, 101804, 16, 21, 100928, 11, 9276, 1131, 370, 580, 19, 100096, 67, 24, 64, 15, 66, 101723, 66, 3632, 101478, 16, 66, 15, 67, 112596, 99619, 19, 66, 101252, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220] ... [99869, 19, 18, 98886, 99618, 20, 23, 101729, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 16, 13, 22, 98668, 22, 100809, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 21, 13, 100919, 18, 22, 99446, 123786, 11, 9276, 1131, 64, 99200, 22, 66, 98886, 66, 23, 101917, 65, 19, 65, 17, 64, 24, 99951, 22, 64, 110234, 126293, 15, 68, 21, 65, 15, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 18, 11, 99619, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 102573, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:22 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 586, token usage: 0.03, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +682,1175691,"TERMINAL",0,0,"[2026-01-05 16:04:22] Receive: obj=GenerateReqInput(validation_time=1.8868129700422287e-05, received_time=1767625462.445087, received_time_perf=2480347.118012943, rid='ff14c03a8c8547198a78897aa7e54725', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 262, 220, 103502, 88166, 262, 220, 100067, 7472, 421, 320, 574, 12389, 36442, 2049, 364, 3587, 23044, 863, 341, 262, 220, 104327, 13056, 470, 8127, 2460, 2604, 419, 12389, 30029, 4962, 58, 15, 60, 481, 220, 16, 1009, 715, 262, 220, 103825, 197, 4293, 8127, 2460, 2651, 419, 12389, 30029, 5073, 58, 15, 60, 488, 220, 16, 280, 262, 220, 103946, 7472, 456, 262, 220, 103992, 88166, 262, 220, 101804, 7472, 470, 895, 280, 262, 220, 102487, 2760, 456, 262, 220, 103205, 197, 532, 262, 220, 101663, 1572, 262, 220, 100809, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 22, 101478, 23, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 99698, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101130, 20, 100372, 20, 23, 101840, 102487, 19, 122414, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 15, 13, 22, 99698, 15, 99916, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 20, 13, 19, 98886, 101804, 16, 21, 100928, 11, 9276, 1131, 370, 580, 19, 100096, 67, 24, 64, 15, 66, 101723, 66, 3632, 101478, 16, 66, 15, 67, 112596, 99619, 19, 66, 101252, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220] ... [99869, 19, 18, 98886, 99618, 20, 23, 101729, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 16, 13, 22, 98668, 22, 100809, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 21, 13, 100919, 18, 22, 99446, 123786, 11, 9276, 1131, 64, 99200, 22, 66, 98886, 66, 23, 101917, 65, 19, 65, 17, 64, 24, 99951, 22, 64, 110234, 126293, 15, 68, 21, 65, 15, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 18, 11, 99619, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 102573, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +683,1175727,"src/preview/decorations.ts",131,0,"",typescript,selection_command +684,1175882,"TERMINAL",0,0,"[2026-01-05 16:04:22 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8777, token usage: 0.05, #running-req: 1, #queue-req: 0, \r\n",,terminal_output +685,1175953,"src/preview/decorations.ts",135,0,"",typescript,selection_command +686,1176032,"TERMINAL",0,0,"[2026-01-05 16:04:22] Receive: obj=GenerateReqInput(validation_time=1.695193350315094e-05, received_time=1767625462.7443173, received_time_perf=2480347.41724249, rid='2b6a841afab34b4eab205ad9af04a5a7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 102088, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100899, 19, 100096, 19, 102807, 17, 23, 100702, 15, 99619, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 18, 13, 20, 99419, 117509, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 23, 13, 114146, 117055, 101723, 16, 11, 9276, 1131, 1371, 17, 65, 99887, 18, 67, 20, 67, 101135, 19, 100002, 23, 65, 2940, 66, 99916, 65, 24, 69, 18, 66, 103878, 68, 19, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [760, 10918, 481, 77, 364, 18, 11, 99619, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 102573, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 15, 99446, 22, 101478, 100104, 23, 127031, 23, 101729, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 17, 13, 101135, 19, 100919, 23, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 15, 99419, 18, 99367, 101135, 18, 11, 9276, 1131, 21, 66, 450, 16, 7221, 100614, 22, 101478, 19, 65, 24, 101474, 15, 64, 23, 102340, 69, 15, 98886, 12502, 99200, 16, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:22] Receive: obj=GenerateReqInput(validation_time=2.6139896363019943e-05, received_time=1767625462.787606, received_time_perf=2480347.460531284, rid='7d554078b2bc4544afdafaeef3c343a8', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 102088, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100899, 19, 100096, 19, 102807, 17, 23, 100702, 15, 99619, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 18, 13, 20, 99419, 117509, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 23, 13, 114146, 117055, 101723, 16, 11, 9276, 1131, 1371, 17, 65, 99887, 18, 67, 20, 67, 101135, 19, 100002, 23, 65, 2940, 66, 99916, 65, 24, 69, 18, 66, 103878, 68, 19, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [760, 10918, 481, 77, 364, 18, 11, 99619, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 102573, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 15, 99446, 22, 101478, 100104, 23, 127031, 23, 101729, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 17, 13, 101135, 19, 100919, 23, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 15, 99419, 18, 99367, 101135, 18, 11, 9276, 1131, 21, 66, 450, 16, 7221, 100614, 22, 101478, 19, 65, 24, 101474, 15, 64, 23, 102340, 69, 15, 98886, 12502, 99200, 16, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +687,1176090,"src/preview/decorations.ts",138,0,"",typescript,selection_command +688,1176216,"src/preview/decorations.ts",143,0,"",typescript,selection_command +689,1176216,"TERMINAL",0,0,"[2026-01-05 16:04:23 TP0] Prefill batch, #new-seq: 1, #new-token: 3865, #cached-token: 0, token usage: 0.06, #running-req: 2, #queue-req: 2, \r\n",,terminal_output +690,1176318,"TERMINAL",0,0,"[2026-01-05 16:04:23] Receive: obj=GenerateReqInput(validation_time=1.8508173525333405e-05, received_time=1767625463.0569155, received_time_perf=2480347.729840839, rid='8dcdacbef9234d20b3f7849998e62127', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 101723, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101140, 24, 100096, 22, 23, 99869, 126293, 99457, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 19, 13, 104836, 23, 99419, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 23, 13, 24, 99619, 22, 101961, 108714, 11, 9276, 1131, 101252, 17, 23, 100235, 20, 65, 21, 99317, 99367, 64, 23, 12502, 120392, 15, 3632, 102486, 69, 19, 98886, 102269, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18] ... [122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 102114, 22, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 21, 101804, 98729, 18, 108642, 125255, 100614, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 17, 13, 22, 101723, 18, 99419, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 19, 99419, 99590, 17, 101474, 11, 9276, 1131, 17, 65, 21, 64, 23, 102340, 2577, 370, 101135, 65, 19, 68, 370, 120547, 329, 24, 2577, 100590, 64, 20, 64, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +691,1176386,"src/preview/decorations.ts",151,0,"",typescript,selection_command +692,1176417,"TERMINAL",0,0,"[2026-01-05 16:04:23] Receive: obj=GenerateReqInput(validation_time=1.7113983631134033e-05, received_time=1767625463.120095, received_time_perf=2480347.793020331, rid='416c4be941ad400a85b80fec34b1055f', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 101723, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101140, 24, 100096, 22, 23, 99869, 126293, 99457, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 19, 13, 104836, 23, 99419, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99243, 23, 13, 24, 99619, 22, 101961, 108714, 11, 9276, 1131, 101252, 17, 23, 100235, 20, 65, 21, 99317, 99367, 64, 23, 12502, 120392, 15, 3632, 102486, 69, 19, 98886, 102269, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18] ... [11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 102114, 22, 11, 3950, 10426, 25, 220, 15, 13, 100002, 11, 671, 27084, 12, 2958, 25, 220, 16, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 21, 101804, 98729, 18, 108642, 125255, 100614, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 17, 13, 22, 101723, 18, 99419, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 19, 99419, 99590, 17, 101474, 11, 9276, 1131, 17, 65, 21, 64, 23, 102340, 2577, 370, 101135, 65, 19, 68, 370, 120547, 329, 24, 2577, 100590, 64, 20, 64, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 100919, 21, 20, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100539, 11, 671, 27084, 12, 2958, 25, 220, 17, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:23 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.06, #running-req: 3, #queue-req: 3, \r\n",,terminal_output +693,1176532,"src/preview/decorations.ts",153,0,"",typescript,selection_command +694,1176600,"TERMINAL",0,0,"[2026-01-05 16:04:23] Receive: obj=GenerateReqInput(validation_time=1.8497928977012634e-05, received_time=1767625463.3573108, received_time_perf=2480348.030236059, rid='30dec00403a64f2998f8fdf39567346c', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 98360, 11, 99698, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 98360, 1572, 262, 220, 99146, 2760, 1554, 262, 220, 99241, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99619, 9356, 735, 262, 220, 99590, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99446, 7472, 419, 12389, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 2126, 279, 1482, 1917, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 633, 2512, 4555, 5586, 760, 845, 341, 262, 220, 101175, 7472, 470, 419, 12389, 280, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102626, 17, 114146, 20, 102487, 101562, 16, 100104, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 23, 13, 127031, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98729, 17, 13, 23, 101655, 23, 99916, 21, 101804, 11, 9276, 1131, 20, 4475, 23, 100104, 8937, 18, 100933, 19, 100809, 102573, 18, 69, 99618, 17, 13225, 101135, 22, 98360, 631, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866] ... [103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 98965, 101294, 23, 100632, 18, 114240, 99698, 18, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 18, 13, 103005, 100614, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 102626, 18, 15, 98360, 18, 100557, 11, 9276, 1131, 19, 99317, 66, 19, 1371, 24, 102340, 329, 102259, 64, 102284, 65, 99695, 75918, 101135, 65, 108345, 20, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +695,1176729,"TERMINAL",0,0,"[2026-01-05 16:04:23] Receive: obj=GenerateReqInput(validation_time=1.826370134949684e-05, received_time=1767625463.4202561, received_time_perf=2480348.09318141, rid='e2858243b6234b198bd487ee8693c4a1', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 98360, 11, 99698, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 98360, 1572, 262, 220, 99146, 2760, 1554, 262, 220, 99241, 9356, 353, 12017, 279, 1482, 1917, 624, 262, 220, 99619, 9356, 735, 262, 220, 99590, 2760, 2797, 2512, 4555, 737, 341, 262, 220, 99446, 7472, 419, 12389, 284, 845, 280, 262, 220, 99916, 2760, 456, 262, 220, 99951, 1572, 262, 220, 99869, 2760, 1554, 262, 220, 100104, 9356, 353, 2126, 279, 1482, 1917, 624, 262, 220, 99064, 9356, 735, 262, 220, 100557, 2760, 633, 2512, 4555, 5586, 760, 845, 341, 262, 220, 101175, 7472, 470, 419, 12389, 280, 262, 220, 100702, 2760, 456, 262, 220, 101135, 1572, 262, 220, 100235, 2760, 1554, 262, 220, 100632, 9356, 353, 18535, 476, 11151, 279, 9106, 624, 262, 220, 101140, 9356, 735, 262, 220, 100919, 2760, 738, 5462, 87027, 25, 2710, 1648, 737, 341, 262, 220, 101294, 7472, 419, 22086, 284, 8967, 280, 262, 220, 99698, 2760, 456, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102626, 17, 114146, 20, 102487, 101562, 16, 100104, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 23, 13, 127031, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98729, 17, 13, 23, 101655, 23, 99916, 21, 101804, 11, 9276, 1131, 20, 4475, 23, 100104, 8937, 18, 100933, 19, 100809, 102573, 18, 69, 99618, 17, 13225, 101135, 22, 98360, 631, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866] ... [103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 98965, 101294, 23, 100632, 18, 114240, 99698, 18, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 18, 13, 103005, 100614, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 102626, 18, 15, 98360, 18, 100557, 11, 9276, 1131, 19, 99317, 66, 19, 1371, 24, 102340, 329, 102259, 64, 102284, 65, 99695, 75918, 101135, 65, 108345, 20, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:23 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8787, token usage: 0.08, #running-req: 3, #queue-req: 4, \r\n",,terminal_output +696,1176833,"src/preview/decorations.ts",152,0,"",typescript,selection_command +697,1176909,"TERMINAL",0,0,"[2026-01-05 16:04:23] Receive: obj=GenerateReqInput(validation_time=1.7043203115463257e-05, received_time=1767625463.6564815, received_time_perf=2480348.329407107, rid='d83af7f73baf4a3a8315eb8491c16ac0', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102626, 17, 114146, 20, 102487, 101562, 16, 100104, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 23, 13, 127031, 100067, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98729, 17, 13, 23, 101655, 23, 99916, 21, 101804, 11, 9276, 1131, 20, 4475, 23, 100104, 8937, 18, 100933, 19, 100809, 102573, 18, 69, 99618, 17, 13225, 101135, 22, 98360, 631, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916] ... [11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 101474, 102626, 17, 23, 103205, 100096, 16, 99916, 18, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 18, 13, 100235, 22, 120979, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 23, 13, 15, 99064, 99619, 21, 100002, 24, 11, 9276, 1131, 99064, 8168, 98503, 19, 100441, 64, 102636, 69, 100104, 24, 23, 69, 23, 79930, 101294, 20, 102952, 18, 101562, 66, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +698,1176952,"src/preview/decorations.ts",151,0,"",typescript,selection_command +699,1177018,"TERMINAL",0,0,"[2026-01-05 16:04:23 TP0] Prefill batch, #new-seq: 1, #new-token: 3711, #cached-token: 0, token usage: 0.10, #running-req: 4, #queue-req: 5, \r\n",,terminal_output +700,1177116,"src/preview/decorations.ts",150,0,"",typescript,selection_command +701,1177192,"src/preview/decorations.ts",150,1," ",typescript,selection_command +702,1177213,"TERMINAL",0,0,"[2026-01-05 16:04:23] Receive: obj=GenerateReqInput(validation_time=1.8671154975891113e-05, received_time=1767625463.9565697, received_time_perf=2480348.629494966, rid='3fb8d7f906f0496c8e16ed550e4f222d', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99695, 20, 99618, 16, 21, 102340, 20, 100235, 100899, 24, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 23, 13, 100235, 22, 107578, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98729, 18, 13, 15, 99064, 23, 99446, 99590, 23, 11, 9276, 1131, 69, 118173, 19, 64, 16, 66, 102626, 22, 65, 101562, 19, 3065, 8315, 15, 12502, 22, 100928, 21, 102650, 20, 100632, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100096, 19, 111782, 18, 98965, 20, 101562, 18, 99446, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 18, 13, 101411, 21, 100933, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 23, 13, 18, 100104, 99698, 22, 110733, 11, 9276, 1131, 67, 104127, 2577, 22, 69, 103388, 65, 2577, 19, 64, 18, 64, 23, 100557, 20, 3065, 23, 101474, 16, 66, 99317, 580, 15, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101140, 16, 16, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98668, 11, 671, 27084, 12, 2958, 25, 220, 19, 11, 671, 4584, 12, 2958, 25, 220, 20, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:23 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.10, #running-req: 5, #queue-req: 4, \r\n",,terminal_output +703,1177278,"TERMINAL",0,0,"[2026-01-05 16:04:24] Receive: obj=GenerateReqInput(validation_time=1.7288140952587128e-05, received_time=1767625463.992918, received_time_perf=2480348.665843321, rid='d992e3cba0d84bb98eb7d781cf987129', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100286, 25, 100933, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99695, 20, 99618, 16, 21, 102340, 20, 100235, 100899, 24, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 99064, 23, 13, 100235, 22, 107578, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98729, 18, 13, 15, 99064, 23, 99446, 99590, 23, 11, 9276, 1131, 69, 118173, 19, 64, 16, 66, 102626, 22, 65, 101562, 19, 3065, 8315, 15, 12502, 22, 100928, 21, 102650, 20, 100632, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100096, 19, 111782, 18, 98965, 20, 101562, 18, 99446, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 18, 13, 101411, 21, 100933, 16, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 23, 13, 18, 100104, 99698, 22, 110733, 11, 9276, 1131, 67, 104127, 2577, 22, 69, 103388, 65, 2577, 19, 64, 18, 64, 23, 100557, 20, 3065, 23, 101474, 16, 66, 99317, 580, 15, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101140, 16, 16, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98668, 11, 671, 27084, 12, 2958, 25, 220, 19, 11, 671, 4584, 12, 2958, 25, 220, 20, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +704,1177469,"src/preview/decorations.ts",150,2," -",typescript,selection_command +705,1177470,"src/preview/decorations.ts",150,2,"",typescript,content +706,1177538,"TERMINAL",0,0,"[2026-01-05 16:04:24 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8787, token usage: 0.11, #running-req: 5, #queue-req: 5, \r\n[2026-01-05 16:04:24] Receive: obj=GenerateReqInput(validation_time=1.7628073692321777e-05, received_time=1767625464.2848723, received_time_perf=2480348.957797431, rid='69850d292de2473285119ab8a17e9fac', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 5057, 5179, 21239, 760, 10918, 481, 77, 364, 99618, 11, 99695, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 733, 5656, 4859, 284, 311, 51397, 1851, 3812, 1394, 12389, 6187, 317, 262, 220, 103319, 88166, 262, 220, 103302, 7472, 442, 8277, 3410, 9750, 421, 5656, 2309, 374, 518, 476, 1283, 279, 8127, 198, 262, 220, 102636, 7472, 442, 30530, 6119, 594, 7381, 9750, 5333, 4933, 19828, 1467, 518, 14, 10689, 8127, 2309, 198, 262, 220, 101411, 7472, 421, 320, 4208, 4859, 2079, 10222, 12464, 593, 341, 262, 220, 101478, 13056, 470, 5907, 262, 220, 102952, 7472, 456, 262, 220, 101840, 88166, 262, 220, 103093, 7472, 733, 1509, 284, 501, 55008, 5337, 1056, 33030, 1234, 1006, 262, 220, 100096, 13056, 419, 12389, 2788, 345, 262, 220, 103437, 13056, 501, 55008, 24707, 60163, 4859, 11, 5656, 4859, 340, 262, 220, 102650, 7472, 1439, 262, 220, 103388, 88166, 262, 220, 103498, 7472, 470, 508, 1203, 935, 262, 220, 100899, 2760, 456, 262, 220, 102269, 1572, 262, 220, 102114, 2760, 1554, 262, 220, 100928, 9356, 353, 4248, 421, 279, 1482, 1917, 594, 2309, 374, 3143, 279, 2661, 8127, 2309, 624, 262, 220, 102626, 9356, 353, 12193, 311, 8252, 421, 582, 1184, 264, 32619, 20396, 624, 262, 220, 99695, 9356, 735, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 100772, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 23, 99698, 15, 99367, 121570, 101478, 22, 99695, 16, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101175, 17, 13, 19, 99951, 122406, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98360, 22, 13, 99457, 107271, 101655, 21, 11, 9276, 1131, 102340, 66, 98886, 69, 15, 65, 100557, 8901, 19, 67, 102088, 65, 100632, 18, 101474, 16, 66, 24, 100702, 99951, 24, 16, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16] ... [122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101140, 16, 16, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 98668, 11, 671, 27084, 12, 2958, 25, 220, 19, 11, 671, 4584, 12, 2958, 25, 220, 20, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102807, 22, 111508, 101474, 22, 101729, 24, 111659, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 18, 13, 101804, 21, 101917, 24, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 23, 13, 21, 100104, 101474, 19, 24, 101478, 11, 9276, 1131, 18, 10793, 23, 67, 22, 69, 100067, 21, 69, 100590, 24, 21, 66, 23, 68, 99317, 291, 117239, 68, 19, 69, 123564, 67, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +707,1177834,"TERMINAL",0,0,"[2026-01-05 16:04:24] Receive: obj=GenerateReqInput(validation_time=1.8524937331676483e-05, received_time=1767625464.585279, received_time_perf=2480349.2582043, rid='1e55d3cd357943c6b568be0d8e8cad64', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 100772, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 20, 100928, 23, 101175, 124618, 104550, 99916, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101175, 17, 13, 21, 100067, 18, 100067, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 98360, 22, 13, 100632, 18, 125255, 21, 102269, 11, 9276, 1131, 20, 99082, 100539, 69, 21, 68, 24, 99243, 24, 102088, 68, 23, 64, 16, 69, 21, 101804, 65, 100067, 8901, 114365, 99419, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916] ... [108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 22, 11, 22, 66, 5661, 262, 442, 6119, 4344, 990, 30530, 6119, 14901, 14901, 6, 82, 5798, 3419, 3638, 7986, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 16, 11, 99419, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 1572, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99590, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 100928, 22, 11, 3950, 10426, 25, 220, 15, 13, 98965, 11, 671, 27084, 12, 2958, 25, 220, 20, 11, 671, 4584, 12, 2958, 25, 220, 20, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99590, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102269, 17, 99695, 22, 100632, 24, 126173, 124898, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 19, 13, 99869, 19, 23, 102650, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 23, 13, 101804, 22, 102626, 22, 19, 100557, 11, 9276, 1131, 21, 101663, 99200, 67, 100104, 17, 450, 99590, 22, 18, 99869, 20, 98965, 24, 370, 23, 64, 99419, 68, 24, 22130, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:24 TP0] Prefill batch, #new-seq: 1, #new-token: 3944, #cached-token: 0, token usage: 0.13, #running-req: 6, #queue-req: 6, \r\n",,terminal_output +708,1178027,"TERMINAL",0,0,"[2026-01-05 16:04:24 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 616, token usage: 0.13, #running-req: 7, #queue-req: 6, \r\n",,terminal_output +709,1178292,"src/preview/decorations.ts",150,0,":",typescript,content +710,1178293,"src/preview/decorations.ts",151,0,"",typescript,selection_keyboard +711,1178311,"TERMINAL",0,0,"[2026-01-05 16:04:25 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8807, token usage: 0.15, #running-req: 7, #queue-req: 5, \r\n",,terminal_output +712,1178596,"TERMINAL",0,0,"[2026-01-05 16:04:25] Receive: obj=GenerateReqInput(validation_time=1.694587990641594e-05, received_time=1767625465.3237166, received_time_perf=2480349.996641925, rid='2bbf51cbbe6b4c1593f1bba643d49971', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 14, 27669, 36761, 21239, 760, 10918, 481, 77, 364, 16, 11, 98965, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 21864, 314, 5586, 11, 56390, 335, 504, 2756, 9239, 1010, 257, 220, 18, 1572, 257, 220, 19, 197, 1747, 257, 220, 20, 197, 353, 5714, 315, 279, 3974, 3735, 16216, 624, 257, 220, 21, 197, 735, 257, 220, 22, 59028, 943, 17231, 36761, 2077, 284, 364, 10325, 6, 760, 364, 80672, 6, 760, 845, 280, 257, 220, 23, 1572, 257, 220, 24, 197, 1747, 262, 220, 98668, 197, 353, 6928, 264, 3974, 3735, 13220, 369, 279, 15268, 1917, 624, 262, 220, 98965, 197, 353, 12193, 979, 15010, 374, 10730, 323, 47286, 646, 944, 387, 6839, 624, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 100614, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 101130, 24, 23, 102650, 105818, 99241, 18, 19, 101411, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101175, 24, 13, 100759, 100539, 18, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99146, 18, 13, 23, 100096, 24, 101252, 20, 100809, 11, 9276, 1131, 631, 15, 99367, 118285, 68, 104327, 67, 19, 66, 99082, 13225, 122876, 99619, 65, 24, 66, 99419, 67, 120580, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11] ... [2505, 78775, 3009, 28, 16, 13, 102284, 17, 101474, 18, 22, 100702, 124047, 21, 100933, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 19, 13, 101729, 20, 99951, 24, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 24, 13, 99446, 23, 119621, 18, 11, 9276, 1131, 16, 68, 101130, 67, 18, 4385, 100235, 22, 24, 102088, 66, 21, 65, 20, 101840, 1371, 15, 67, 23, 68, 23, 34285, 102636, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 220, 17, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 22, 11, 22, 66, 5661, 262, 442, 6119, 4344, 25, 990, 30530, 6119, 14901, 14901, 6, 82, 5798, 3419, 3638, 7986, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 16, 11, 99419, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 1572, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 25, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99446, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 99695, 22, 11, 3950, 10426, 25, 220, 15, 13, 99082, 11, 671, 27084, 12, 2958, 25, 220, 22, 11, 671, 4584, 12, 2958, 25, 220, 20, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +713,1178753,"TERMINAL",0,0,"[2026-01-05 16:04:25 TP0] Prefill batch, #new-seq: 1, #new-token: 3691, #cached-token: 0, token usage: 0.16, #running-req: 8, #queue-req: 6, \r\n",,terminal_output +714,1178825,"TERMINAL",0,0,"[2026-01-05 16:04:25 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.16, #running-req: 9, #queue-req: 5, \r\n",,terminal_output +715,1179103,"TERMINAL",0,0,"[2026-01-05 16:04:25 TP0] Prefill batch, #new-seq: 1, #new-token: 6444, #cached-token: 0, token usage: 0.18, #running-req: 9, #queue-req: 5, \r\n",,terminal_output +716,1179358,"src/preview/decorations.ts",150,0,"",typescript,selection_command +717,1179382,"TERMINAL",0,0,"[2026-01-05 16:04:26 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 610, token usage: 0.19, #running-req: 10, #queue-req: 4, \r\n",,terminal_output +718,1179669,"TERMINAL",0,0,"[2026-01-05 16:04:26] Receive: obj=GenerateReqInput(validation_time=1.7925165593624115e-05, received_time=1767625466.3876247, received_time_perf=2480351.06054991, rid='a8f744f5368d402391b7491690bd8b66', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 198, 73022, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 1572, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 262, 220, 99590, 1572, 262, 220, 99446, 2760, 442, 5586, 18872, 58068, 198, 262, 220, 99916, 2760, 15010, 25, 501, 55008, 69337, 1636, 492, 36143, 13, 51726, 19539, 3299, 262, 220, 99951, 2760, 1034, 16819, 25, 501, 55008, 69337, 1636, 492, 1318, 3939, 54205, 1951, 3299, 262, 220, 99869, 2760, 8127, 9855, 25, 501, 55008, 69337, 1636, 492, 8866, 14533, 54205, 1951, 3299, 262, 220, 100104, 197, 2440, 262, 220, 99064, 1572, 262, 220, 100557, 197, 1747, 262, 220, 101175, 197, 353, 22667, 315, 61139, 20187, 4494, 311, 5648, 4938, 36505, 323, 28248, 4671, 624, 262, 220, 100702, 197, 353, 4290, 3055, 11, 68244, 369, 678, 55770, 624, 262, 220, 101135, 197, 735, 262, 220, 100235, 59028, 536, 63561, 10546, 341, 262, 220, 100632, 2760, 869, 4494, 25, 5027, 4947, 11, 55008, 1979, 9406, 19394, 929, 29, 284, 501, 5027, 543, 262, 220, 101140, 1572, 262, 220, 100919, 2760] ... [55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 1572, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 25, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99446, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 17, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 23, 99695, 22, 11, 3950, 10426, 25, 220, 15, 13, 99082, 11, 671, 27084, 12, 2958, 25, 220, 22, 11, 671, 4584, 12, 2958, 25, 220, 20, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99446, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 103093, 19, 101729, 22, 24, 100067, 21, 102340, 102573, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 20, 13, 18, 99619, 22, 99317, 21, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 24, 13, 100809, 21, 21, 102340, 24, 99446, 11, 9276, 1131, 17, 6066, 69, 102624, 7221, 1371, 21, 65, 19, 66, 121743, 18, 69, 16, 65, 4645, 21, 102088, 67, 19, 100809, 103437, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99916, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 21, 98668, 11, 3950, 10426, 25, 220, 15, 13, 98729, 11, 671, 27084, 12, 2958, 25, 220, 98668, 11, 671, 4584, 12, 2958, 25, 220, 19, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:26 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8801, token usage: 0.21, #running-req: 10, #queue-req: 4, \r\n",,terminal_output +719,1180021,"TERMINAL",0,0,"[2026-01-05 16:04:26 TP0] Prefill batch, #new-seq: 1, #new-token: 4831, #cached-token: 0, token usage: 0.22, #running-req: 11, #queue-req: 4, \r\n",,terminal_output +720,1180230,"TERMINAL",0,0,"[2026-01-05 16:04:27 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 616, token usage: 0.22, #running-req: 12, #queue-req: 3, \r\n",,terminal_output +721,1180646,"TERMINAL",0,0,"[2026-01-05 16:04:27 TP0] Prefill batch, #new-seq: 1, #new-token: 6506, #cached-token: 0, token usage: 0.24, #running-req: 12, #queue-req: 3, \r\n",,terminal_output +722,1180800,"TERMINAL",0,0,"[2026-01-05 16:04:27 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 594, token usage: 0.26, #running-req: 13, #queue-req: 2, \r\n",,terminal_output +723,1180859,"src/preview/decorations.ts",201,0,"",typescript,selection_command +724,1181107,"src/preview/decorations.ts",222,0,"",typescript,selection_command +725,1181126,"src/preview/decorations.ts",301,0,"",typescript,selection_command +726,1181129,"TERMINAL",0,0,"[2026-01-05 16:04:27 TP0] Prefill batch, #new-seq: 1, #new-token: 6527, #cached-token: 0, token usage: 0.27, #running-req: 13, #queue-req: 2, \r\n[2026-01-05 16:04:27] Receive: obj=GenerateReqInput(validation_time=1.606810837984085e-05, received_time=1767625467.887396, received_time_perf=2480352.560321199, rid='a442a1a622254deca2e0636336f48e31', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100096, 114495, 22, 99869, 20, 101723, 122406, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100702, 16, 13, 99916, 17, 22, 100899, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99146, 20, 13, 24, 100235, 106748, 19, 100557, 11, 9276, 1131, 102269, 5305, 101135, 24, 20, 68, 17, 8937, 100461, 19, 100104, 9787, 291, 22, 64, 101917, 580, 104127, 89998, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18] ... [11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99916, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 21, 98668, 11, 3950, 10426, 25, 220, 15, 13, 98729, 11, 671, 27084, 12, 2958, 25, 220, 98668, 11, 671, 4584, 12, 2958, 25, 220, 19, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99916, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102626, 17, 20, 99317, 101130, 24, 100632, 17, 19, 98965, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 21, 13, 100919, 22, 21, 99590, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 16, 13, 15, 99618, 20, 101474, 104327, 11, 9276, 1131, 64, 23, 69, 22, 101723, 69, 20, 100632, 23, 67, 99698, 17, 101294, 16, 65, 22, 101474, 121416, 15, 8937, 23, 65, 101478, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +727,1181164,"src/preview/decorations.ts",358,0,"",typescript,selection_command +728,1181202,"src/preview/decorations.ts",375,0,"",typescript,selection_command +729,1181227,"src/preview/decorations.ts",396,0,"",typescript,selection_command +730,1181265,"src/preview/decorations.ts",476,0,"",typescript,selection_command +731,1181308,"src/preview/decorations.ts",548,0,"",typescript,selection_command +732,1181333,"src/preview/decorations.ts",598,0,"",typescript,selection_command +733,1181349,"TERMINAL",0,0,"[2026-01-05 16:04:28 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 608, token usage: 0.29, #running-req: 14, #queue-req: 2, \r\n",,terminal_output +734,1181361,"src/preview/decorations.ts",600,0,"",typescript,selection_command +735,1181416,"src/preview/decorations.ts",620,0,"",typescript,selection_command +736,1181640,"TERMINAL",0,0,"[2026-01-05 16:04:28 TP0] Prefill batch, #new-seq: 1, #new-token: 6053, #cached-token: 0, token usage: 0.31, #running-req: 14, #queue-req: 2, \r\n[2026-01-05 16:04:28] Receive: obj=GenerateReqInput(validation_time=1.9622035324573517e-05, received_time=1767625468.3917484, received_time_perf=2480353.064673517, rid='a2e9d2d0130f4bea915b311eb95776c9', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100096, 114495, 22, 99869, 20, 101723, 122406, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100702, 16, 13, 99916, 17, 22, 100899, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99146, 20, 13, 24, 100235, 106748, 19, 100557, 11, 9276, 1131, 102269, 5305, 101135, 24, 20, 68, 17, 8937, 100461, 19, 100104, 9787, 291, 22, 64, 101917, 580, 104127, 89998, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18] ... [100002, 220, 99317, 25, 100590, 25, 99951, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101411, 17, 22, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 99951, 11, 671, 27084, 12, 2958, 25, 220, 99366, 11, 671, 4584, 12, 2958, 25, 220, 17, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99951, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99618, 21, 23, 98668, 23, 101140, 101663, 19, 100562, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 22, 13, 101252, 22, 101294, 21, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 17, 13, 20, 99618, 18, 99146, 99887, 11, 9276, 1131, 64, 101723, 17, 64, 16, 64, 21, 99241, 99446, 19, 450, 924, 17, 68, 100539, 18, 21, 100702, 21, 69, 100933, 68, 100557, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99869, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 99618, 23, 11, 3950, 10426, 25, 220, 15, 13, 100104, 11, 671, 27084, 12, 2958, 25, 220, 99367, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 23, 11, 99869, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 262, 220, 99590, 1572, 262, 220, 99446, 2760, 442, 5586, 18872, 58068, 198, 262, 220, 99916, 2760, 15010, 25, 501, 55008, 69337, 1636, 492, 36143, 13, 51726, 19539, 3299, 262, 220, 99951, 2760, 1034, 16819, 25, 501, 55008, 69337, 1636, 492, 1318, 3939, 54205, 1951, 3299, 262, 220, 99869, 2760, 8127, 9855, 25, 501, 55008, 69337, 1636, 492, 8866, 14533, 54205, 1951, 3299, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +737,1181733,"src/preview/decorations.ts",680,0,"",typescript,selection_command +738,1181734,"TERMINAL",0,0,"[2026-01-05 16:04:28] Receive: obj=GenerateReqInput(validation_time=1.6491860151290894e-05, received_time=1767625468.445901, received_time_perf=2480353.118826097, rid='72ac8201a8e44c27b4b7d4d4511eb9f7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100096, 114495, 22, 99869, 20, 101723, 122406, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100702, 16, 13, 99916, 17, 22, 100899, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99146, 20, 13, 24, 100235, 106748, 19, 100557, 11, 9276, 1131, 102269, 5305, 101135, 24, 20, 68, 17, 8937, 100461, 19, 100104, 9787, 291, 22, 64, 101917, 580, 104127, 89998, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18] ... [100002, 220, 99317, 25, 100590, 25, 99951, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 101411, 17, 22, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 99951, 11, 671, 27084, 12, 2958, 25, 220, 99366, 11, 671, 4584, 12, 2958, 25, 220, 17, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99951, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 99618, 21, 23, 98668, 23, 101140, 101663, 19, 100562, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 22, 13, 101252, 22, 101294, 21, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 17, 13, 20, 99618, 18, 99146, 99887, 11, 9276, 1131, 64, 101723, 17, 64, 16, 64, 21, 99241, 99446, 19, 450, 924, 17, 68, 100539, 18, 21, 100702, 21, 69, 100933, 68, 100557, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 22, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99869, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 99618, 23, 11, 3950, 10426, 25, 220, 15, 13, 100104, 11, 671, 27084, 12, 2958, 25, 220, 99367, 11, 671, 4584, 12, 2958, 25, 220, 17, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 23, 11, 99869, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 262, 220, 99590, 1572, 262, 220, 99446, 2760, 442, 5586, 18872, 58068, 198, 262, 220, 99916, 2760, 15010, 25, 501, 55008, 69337, 1636, 492, 36143, 13, 51726, 19539, 3299, 262, 220, 99951, 2760, 1034, 16819, 25, 501, 55008, 69337, 1636, 492, 1318, 3939, 54205, 1951, 3299, 262, 220, 99869, 2760, 8127, 9855, 25, 501, 55008, 69337, 1636, 492, 8866, 14533, 54205, 1951, 3299, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +739,1181895,"src/preview/decorations.ts",620,0,"",typescript,selection_command +740,1181895,"TERMINAL",0,0,"[2026-01-05 16:04:28 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 607, token usage: 0.32, #running-req: 15, #queue-req: 3, \r\n",,terminal_output +741,1182136,"TERMINAL",0,0,"[2026-01-05 16:04:28] Receive: obj=GenerateReqInput(validation_time=1.6496051102876663e-05, received_time=1767625468.7627804, received_time_perf=2480353.435705738, rid='1ad04a779eb041c5bb0c0289b775c014', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100096, 114495, 22, 99869, 20, 101723, 122406, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100702, 16, 13, 99916, 17, 22, 100899, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99146, 20, 13, 24, 100235, 106748, 19, 100557, 11, 9276, 1131, 102269, 5305, 101135, 24, 20, 68, 17, 8937, 100461, 19, 100104, 9787, 291, 22, 64, 101917, 580, 104127, 89998, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18] ... [123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99869, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 21, 101474, 122541, 15, 99082, 117305, 100562, 24, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 23, 13, 19, 100461, 100067, 16, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 18, 13, 112891, 23, 99916, 100614, 22, 11, 9276, 1131, 102650, 580, 23, 98360, 16, 64, 23, 68, 101723, 66, 99951, 65, 19, 65, 22, 67, 19, 67, 100461, 16, 16, 3065, 24, 69, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +742,1182276,"TERMINAL",0,0,"[2026-01-05 16:04:28 TP0] Prefill batch, #new-seq: 1, #new-token: 5974, #cached-token: 0, token usage: 0.34, #running-req: 15, #queue-req: 4, \r\n",,terminal_output +743,1182277,"src/preview/decorations.ts",620,1," ",typescript,selection_command +744,1182365,"TERMINAL",0,0,"[2026-01-05 16:04:29] Receive: obj=GenerateReqInput(validation_time=1.701992005109787e-05, received_time=1767625469.0628467, received_time_perf=2480353.735771911, rid='9bb26660843e45b3bdad459fbb8c186a', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100772, 25, 98965, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100096, 114495, 22, 99869, 20, 101723, 122406, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100702, 16, 13, 99916, 17, 22, 100899, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99146, 20, 13, 24, 100235, 106748, 19, 100557, 11, 9276, 1131, 102269, 5305, 101135, 24, 20, 68, 17, 8937, 100461, 19, 100104, 9787, 291, 22, 64, 101917, 580, 104127, 89998, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18] ... [101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99869, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 99618, 22, 11, 3950, 10426, 25, 220, 15, 13, 101175, 11, 671, 27084, 12, 2958, 25, 220, 99082, 11, 671, 4584, 12, 2958, 25, 220, 18, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99869, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 21, 101474, 99618, 20, 104550, 99869, 22, 101478, 21, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 23, 13, 102269, 17, 22, 99695, 19, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 18, 13, 19, 100235, 22, 100002, 22, 100919, 11, 9276, 1131, 16, 329, 100590, 64, 102114, 24, 3065, 100590, 16, 66, 20, 6066, 15, 66, 15, 99869, 24, 65, 22, 100899, 66, 15, 99367, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +745,1182426,"src/preview/decorations.ts",620,2," -",typescript,selection_command +746,1182426,"TERMINAL",0,0,"[2026-01-05 16:04:29 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.35, #running-req: 16, #queue-req: 4, \r\n",,terminal_output +747,1182590,"src/preview/decorations.ts",620,2,"",typescript,content +748,1182634,"TERMINAL",0,0,"[2026-01-05 16:04:29] Receive: obj=GenerateReqInput(validation_time=1.6462989151477814e-05, received_time=1767625469.363332, received_time_perf=2480354.03625728, rid='3cd61527b6a24892b89f3299869a3cb6', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 1572, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100441, 25, 101723, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 125603, 103306, 99200, 18, 101840, 18, 15, 100067, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101961, 19, 13, 22, 100557, 101562, 18, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99064, 24, 13, 99698, 19, 100919, 24, 100919, 21, 11, 9276, 1131, 101655, 68, 17, 65, 100919, 68, 19, 68, 17, 68, 19, 101562, 24, 65, 99200, 17, 101729, 65, 22, 64, 101917, 18, 23, 64, 100614, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632] ... [11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99869, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 102573, 22, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 101135, 11, 671, 27084, 12, 2958, 25, 220, 99082, 11, 671, 4584, 12, 2958, 25, 220, 19, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100104, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100096, 16, 100809, 17, 98503, 20, 110610, 100928, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 24, 13, 100539, 17, 23, 101562, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 18, 13, 22, 100235, 102114, 16, 24, 98965, 11, 9276, 1131, 24, 6066, 99916, 21, 99618, 23, 102088, 68, 100461, 65, 18, 8937, 329, 100461, 24, 69, 6066, 23, 66, 122541, 64, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100104, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 100235, 11, 671, 27084, 12, 2958, 25, 220, 99317, 11, 671, 4584, 12, 2958, 25, 220, 19, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +749,1182879,"TERMINAL",0,0,"[2026-01-05 16:04:29] Receive: obj=GenerateReqInput(validation_time=1.6320031136274338e-05, received_time=1767625469.455209, received_time_perf=2480354.128134135, rid='0ff35a81ee184fa89873b1ba360acb3f', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 257, 220, 16, 21864, 353, 438, 55008, 504, 364, 11557, 1851, 1010, 257, 220, 17, 1572, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100441, 25, 101723, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 125603, 103306, 99200, 18, 101840, 18, 15, 100067, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101961, 19, 13, 22, 100557, 101562, 18, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 99064, 24, 13, 99698, 19, 100919, 24, 100919, 21, 11, 9276, 1131, 101655, 68, 17, 65, 100919, 68, 19, 68, 17, 68, 19, 101562, 24, 65, 99200, 17, 101729, 65, 22, 64, 101917, 18, 23, 64, 100614, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632] ... [11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99869, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 102573, 22, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 101135, 11, 671, 27084, 12, 2958, 25, 220, 99082, 11, 671, 4584, 12, 2958, 25, 220, 19, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100104, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 100096, 16, 100809, 17, 98503, 20, 110610, 100928, 22, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 24, 13, 100539, 17, 23, 101562, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 18, 13, 22, 100235, 102114, 16, 24, 98965, 11, 9276, 1131, 24, 6066, 99916, 21, 99618, 23, 102088, 68, 100461, 65, 18, 8937, 329, 100461, 24, 69, 6066, 23, 66, 122541, 64, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100104, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 20, 102487, 11, 3950, 10426, 25, 220, 15, 13, 100235, 11, 671, 27084, 12, 2958, 25, 220, 99317, 11, 671, 4584, 12, 2958, 25, 220, 19, 345, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:29 TP0] Prefill batch, #new-seq: 2, #new-token: 8192, #cached-token: 8787, token usage: 0.37, #running-req: 16, #queue-req: 5, \r\n",,terminal_output +750,1183007,"src/preview/decorations.ts",620,0,":",typescript,content +751,1183007,"src/preview/decorations.ts",621,0,"",typescript,selection_keyboard +752,1183376,"TERMINAL",0,0,"[2026-01-05 16:04:29] Receive: obj=GenerateReqInput(validation_time=1.6357749700546265e-05, received_time=1767625469.7563312, received_time_perf=2480354.429256748, rid='23831f20b2f34c83b5f607934ddd911e', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 99367, 11, 101135, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 262, 220, 99590, 1572, 262, 220, 99446, 2760, 442, 5586, 18872, 58068, 198, 262, 220, 99916, 2760, 15010, 25, 501, 55008, 69337, 1636, 492, 36143, 13, 51726, 19539, 3299, 262, 220, 99951, 2760, 1034, 16819, 25, 501, 55008, 69337, 1636, 492, 1318, 3939, 54205, 1951, 3299, 262, 220, 99869, 2760, 8127, 9855, 25, 501, 55008, 69337, 1636, 492, 8866, 14533, 54205, 1951, 3299, 262, 220, 100104, 197, 2440, 262, 220, 99064, 1572, 262, 220, 100557, 197, 1747, 262, 220, 101175, 197, 353, 22667, 315, 61139, 20187, 4494, 311, 5648, 4938, 36505, 323, 28248, 4671, 624, 262, 220, 100702, 197, 353, 4290, 3055, 11, 68244, 369, 678, 55770, 624, 262, 220, 101135, 197, 735, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100614, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 24, 99200, 99618, 23, 20, 101478, 99698, 18, 100919, 24, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101723, 24, 13, 21, 99698, 19, 100933, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 18, 19, 13, 18, 99366, 101140, 19, 99698, 16, 11, 9276, 1131, 17, 66, 24, 101723, 22, 66, 124484, 100614, 19, 102486, 17, 93322, 100096, 68, 104340, 69, 19, 1999, 65, 15, 64, 16, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11] ... [31193, 481, 72, 364, 99243, 11, 99243, 66, 5661, 262, 442, 15807, 6168, 990, 6440, 9083, 7986, 369, 28040, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 23, 11, 99869, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 262, 220, 99590, 1572, 262, 220, 99446, 2760, 442, 5586, 18872, 58068, 198, 262, 220, 99916, 2760, 15010, 25, 501, 55008, 69337, 1636, 492, 36143, 13, 51726, 19539, 3299, 262, 220, 99951, 2760, 1034, 16819, 25, 501, 55008, 69337, 1636, 492, 1318, 3939, 54205, 1951, 3299, 262, 220, 99869, 2760, 8127, 9855, 25, 501, 55008, 69337, 1636, 492, 8866, 14533, 54205, 1951, 3299, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100104, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 21, 101562, 100104, 23, 24, 99082, 122463, 100928, 16, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 24, 13, 100632, 18, 100702, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 19, 13, 100441, 21, 99446, 22, 99869, 11, 9276, 1131, 18, 4385, 21, 99082, 99951, 65, 21, 64, 99590, 23, 103825, 65, 103502, 69, 18, 100104, 101663, 21, 24, 64, 18, 7221, 21, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:29 TP0] Prefill batch, #new-seq: 4, #new-token: 8192, #cached-token: 41913, token usage: 0.38, #running-req: 17, #queue-req: 3, \r\n",,terminal_output +753,1183490,"TERMINAL",0,0,"[2026-01-05 16:04:30] Receive: obj=GenerateReqInput(validation_time=1.6520265489816666e-05, received_time=1767625470.0576787, received_time_perf=2480354.730603724, rid='099aa2c2395642f4b7617fe644a3623c', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100614, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100702, 101140, 21, 100002, 17, 24, 100899, 101411, 19, 21, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101723, 24, 13, 23, 103437, 100919, 18, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 18, 19, 13, 20, 101723, 123786, 18, 100928, 11, 9276, 1131, 99241, 21, 19, 69, 18, 98965, 99951, 68, 23, 99698, 68, 100562, 16, 64, 100772, 69, 101140, 13674, 21, 99698, 66, 24, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104] ... [198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100104, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 21, 101175, 98503, 18, 114240, 21, 99951, 19, 100702, 23, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 24, 13, 100461, 20, 124618, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 19, 13, 109295, 119338, 112840, 11, 9276, 1131, 15, 542, 100235, 64, 104340, 2127, 126382, 3632, 23, 101663, 103388, 65, 16, 4645, 107110, 96652, 18, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 345, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 31193, 481, 72, 364, 99243, 11, 99243, 66, 5661, 262, 442, 15807, 6168, 25, 990, 6440, 9083, 7986, 369, 28040, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 23, 11, 99869, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 25, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 262, 220, 99590, 1572, 262, 220, 99446, 2760, 442, 5586, 18872, 58068, 198, 262, 220, 99916, 2760, 15010, 25, 501, 55008, 69337, 1636, 492, 36143, 13, 51726, 19539, 3299, 262, 220, 99951, 2760, 1034, 16819, 25, 501, 55008, 69337, 1636, 492, 1318, 3939, 54205, 1951, 3299, 262, 220, 99869, 2760, 8127, 9855, 25, 501, 55008, 69337, 1636, 492, 8866, 14533, 54205, 1951, 3299, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +754,1183491,"TERMINAL",0,0,"[2026-01-05 16:04:30 TP0] Prefill batch, #new-seq: 1, #new-token: 1446, #cached-token: 0, token usage: 0.39, #running-req: 20, #queue-req: 4, \r\n",,terminal_output +755,1183600,"TERMINAL",0,0,"[2026-01-05 16:04:30 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 584, token usage: 0.39, #running-req: 21, #queue-req: 3, \r\n",,terminal_output +756,1183665,"src/preview/decorations.ts",620,0,"",typescript,selection_command +757,1183914,"TERMINAL",0,0,"[2026-01-05 16:04:30 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.41, #running-req: 21, #queue-req: 3, \r\n",,terminal_output +758,1184025,"TERMINAL",0,0,"[2026-01-05 16:04:30] Receive: obj=GenerateReqInput(validation_time=1.575099304318428e-05, received_time=1767625470.6944628, received_time_perf=2480355.367388082, rid='c9b17f475c4b4bbfb97451292ae074d7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99146, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100928, 21, 99869, 19, 18, 98886, 99618, 20, 23, 101729, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 16, 13, 22, 98668, 22, 100809, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 21, 13, 100919, 18, 22, 99446, 123786, 11, 9276, 1131, 64, 99200, 22, 66, 98886, 66, 23, 101917, 65, 19, 65, 17, 64, 24, 99951, 22, 64, 110234, 126293, 15, 68, 21, 65, 15, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11] ... [72, 364, 99243, 11, 99243, 66, 5661, 262, 442, 15807, 6168, 25, 990, 6440, 9083, 7986, 369, 28040, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 23, 11, 99869, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 25, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 262, 220, 99590, 1572, 262, 220, 99446, 2760, 442, 5586, 18872, 58068, 198, 262, 220, 99916, 2760, 15010, 25, 501, 55008, 69337, 1636, 492, 36143, 13, 51726, 19539, 3299, 262, 220, 99951, 2760, 1034, 16819, 25, 501, 55008, 69337, 1636, 492, 1318, 3939, 54205, 1951, 3299, 262, 220, 99869, 2760, 8127, 9855, 25, 501, 55008, 69337, 1636, 492, 8866, 14533, 54205, 1951, 3299, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100104, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 21, 100235, 102114, 19, 24, 100096, 100002, 19, 21, 99916, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 24, 13, 100899, 21, 18, 100557, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 19, 13, 19, 100104, 116265, 22, 100933, 11, 9276, 1131, 99619, 23, 100557, 69, 98360, 65, 17, 69, 101135, 66, 104127, 65, 20, 69, 99618, 22, 24, 101135, 40138, 24, 98965, 68, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n",,terminal_output +759,1184202,"TERMINAL",0,0,"[2026-01-05 16:04:30 TP0] Prefill batch, #new-seq: 3, #new-token: 8192, #cached-token: 17578, token usage: 0.43, #running-req: 21, #queue-req: 2, \r\n",,terminal_output +760,1184501,"TERMINAL",0,0,"[2026-01-05 16:04:31 TP0] Prefill batch, #new-seq: 1, #new-token: 8046, #cached-token: 0, token usage: 0.44, #running-req: 23, #queue-req: 2, \r\n",,terminal_output +761,1184841,"TERMINAL",0,0,"[2026-01-05 16:04:31 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 594, token usage: 0.46, #running-req: 24, #queue-req: 1, \r\n",,terminal_output +762,1185071,"TERMINAL",0,0,"[2026-01-05 16:04:31 TP0] Prefill batch, #new-seq: 1, #new-token: 7849, #cached-token: 0, token usage: 0.48, #running-req: 24, #queue-req: 1, \r\n",,terminal_output +763,1185448,"TERMINAL",0,0,"[2026-01-05 16:04:32 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.50, #running-req: 25, #queue-req: 0, \r\n",,terminal_output +764,1185728,"TERMINAL",0,0,"[2026-01-05 16:04:32] Receive: obj=GenerateReqInput(validation_time=1.5758909285068512e-05, received_time=1767625472.4368727, received_time_perf=2480357.109798078, rid='dd8c2ab9f8ca49d9ab9adffd8402ca64', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 18, 11, 99619, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 102573, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 15, 99446, 22, 101478, 100104, 23, 127031, 23, 101729, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 17, 13, 101135, 19, 100919, 23, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 15, 99419, 18, 99367, 101135] ... [220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99064, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 102340, 11, 671, 27084, 12, 2958, 25, 220, 99146, 11, 671, 4584, 12, 2958, 25, 220, 18, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99064, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100899, 100614, 24, 120911, 18, 99243, 19, 99869, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101655, 15, 13, 103093, 19, 101562, 17, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 20, 13, 100632, 22, 100919, 23, 100562, 17, 11, 9276, 1131, 66, 24, 65, 99419, 69, 19, 100899, 66, 19, 65, 19, 6066, 10793, 103205, 19, 124184, 103825, 5918, 100372, 19, 67, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:32 TP0] Prefill batch, #new-seq: 1, #new-token: 7848, #cached-token: 0, token usage: 0.51, #running-req: 25, #queue-req: 1, \r\n",,terminal_output +765,1186036,"TERMINAL",0,0,"[2026-01-05 16:04:32 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 616, token usage: 0.53, #running-req: 26, #queue-req: 0, \r\n",,terminal_output +766,1186311,"TERMINAL",0,0,"[2026-01-05 16:04:33 TP0] Prefill batch, #new-seq: 1, #new-token: 7791, #cached-token: 0, token usage: 0.55, #running-req: 26, #queue-req: 0, \r\n",,terminal_output +767,1187170,"TERMINAL",0,0,"[2026-01-05 16:04:33] Finish: obj=GenerateReqInput(validation_time=1.575099304318428e-05, received_time=1767625470.6944628, received_time_perf=2480355.367388082, rid='c9b17f475c4b4bbfb97451292ae074d7', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.6357749700546265e-05, received_time=1767625469.7563312, received_time_perf=2480354.429256748, rid='23831f20b2f34c83b5f607934ddd911e', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99146, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 100928, 21, 99869, 19, 18, 98886, 99618, 20, 23, 101729, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 16, 13, 22, 98668, 22, 100809, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 21, 13, 100919, 18, 22, 99446, 123786, 11, 9276, 1131, 64, 99200, 22, 66, 98886, 66, 23, 101917, 65, 19, 65, 17, 64, 24, 99951, 22, 64, 110234, 126293, 15, 68, 21, 65, 15, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11] ... [72, 364, 99243, 11, 99243, 66, 5661, 262, 442, 15807, 6168, 25, 990, 6440, 9083, 7986, 369, 28040, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 23, 11, 99869, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 25, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 262, 220, 99590, 1572, 262, 220, 99446, 2760, 442, 5586, 18872, 58068, 198, 262, 220, 99916, 2760, 15010, 25, 501, 55008, 69337, 1636, 492, 36143, 13, 51726, 19539, 3299, 262, 220, 99951, 2760, 1034, 16819, 25, 501, 55008, 69337, 1636, 492, 1318, 3939, 54205, 1951, 3299, 262, 220, 99869, 2760, 8127, 9855, 25, 501, 55008, 69337, 1636, 492, 8866, 14533, 54205, 1951, 3299, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 100104, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 21, 100235, 102114, 19, 24, 100096, 100002, 19, 21, 99916, 20, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 24, 13, 100899, 21, 18, 100557, 17, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 19, 13, 19, 100104, 116265, 22, 100933, 11, 9276, 1131, 99619, 23, 100557, 69, 98360, 65, 17, 69, 101135, 66, 104127, 65, 20, 69, 99618, 22, 24, 101135, 40138, 24, 98965, 68, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\ncat -n /home/franz.srambical/crowd-pilot-extension/src/preview/decorations.ts | sed -n '25,35p'\n```"", 'output_ids': [198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 99446, 11, 100235, 79, 1248, 73022, 151336], 'meta_info': {'id': 'c9b17f475c4b4bbfb97451292ae074d7', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16636, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-1.2755313036905136e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-1.6878174543380737, 4616, 'cat'), (-0.03308946266770363, 481, ' -'), (-2.0265558760002023e-06, 77, 'n'), (-8.940656698541716e-06, 608, ' /'), (-1.7881377516459906e-06, 5117, 'home'), (-2.3841855067985307e-07, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (-9.894321920000948e-06, 2899, '/c'), (-1.1920928244535389e-07, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (-1.1920928244535389e-07, 79888, '-extension'), (-0.002371001522988081, 13428, '/src'), (-0.00035601475974544883, 14, '/'), (-8.356221951544285e-05, 27082, 'preview'), (-0.0020292659755796194, 22490, '/de'), (-0.0027554186526685953, 6005, 'cor'), (-0.00032395837479270995, 804, 'ations'), (-5.006777428206988e-06, 21239, '.ts'), (-0.0016169581795111299, 760, ' |'), (-0.0008632985409349203, 10918, ' sed'), (-1.1920928244535389e-07, 481, ' -'), (-1.1920928244535389e-07, 77, 'n'), (-5.960462772236497e-07, 364, "" '""), (-0.4587922692298889, 99446, '25'), (-1.2755313036905136e-05, 11, ','), (-0.12087145447731018, 100235, '35'), (-1.1920928244535389e-07, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-1.549708758830093e-05, 73022, '```'), (-2.2172682292875834e-05, 151336, '<|user|>')], 'completion_tokens': 40, 'cached_tokens': 596, 'spec_accept_rate': 0.7777777777777778, 'spec_accept_length': 3.3333333333333335, 'spec_verify_ct': 12, 'spec_accept_token_num': 28, 'spec_draft_token_num': 36, 'e2e_latency': 3.2495713233947754, 'response_sent_to_client_ts': 1767625473.9440815}}\r\n",,terminal_output +768,1187350,"TERMINAL",0,0,"[2026-01-05 16:04:34] Finish: obj=GenerateReqInput(validation_time=1.5758909285068512e-05, received_time=1767625472.4368727, received_time_perf=2480357.109798078, rid='dd8c2ab9f8ca49d9ab9adffd8402ca64', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""n: 8192, #cached-token: 0, token usage: 0.41, #running-req: 21, #queue-req: 3, \n[2026-01-05 16:04:30] Receive: obj=GenerateReqInput(validation_time=1.575099304318428e-05, received_time=1767625470.6944628, received_time_perf=2480355.367388082, rid='c9b17f475c4b4bbfb97451292ae074d7', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, \n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 18, 11, 99619, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 257, 220, 18, 197, 1747, 257, 220, 19, 197, 353, 16428, 7986, 369, 12959, 41095, 3941, 3100, 3446, 838, 19740, 624, 257, 220, 20, 197, 735, 257, 220, 21, 59028, 733, 86102, 284, 341, 257, 220, 22, 2760, 442, 6119, 4344, 481, 990, 30530, 6119, 594, 5798, 3419, 3638, 7986, 198, 257, 220, 23, 2760, 35884, 25, 341, 257, 220, 24, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 8705, 3299, 262, 220, 98668, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 24004, 4941, 1178, 10686, 3299, 262, 220, 98965, 2760, 1153, 262, 220, 98886, 2760, 35747, 25, 341, 262, 220, 99366, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 8705, 3299, 262, 220, 99367, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 13481, 9406, 7030, 291, 1178, 10686, 3299, 262, 220, 99082, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 54205, 1951, 3299, 262, 220, 99317, 2760, 1153, 262, 220, 99419, 1572, 262, 220, 99243, 2760, 442, 15807, 6168, 481, 990, 6440, 9083, 7986, 369, 28040, 198, 262, 220, 98729, 2760, 8822, 2512, 25, 341, 262, 220, 98360, 7472, 39093, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 54205, 1951, 3299, 262, 220, 99146, 7472, 4004, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 11711, 3299, 262, 220, 99241, 7472, 3886, 25, 501, 55008, 69337, 1636, 492, 8866, 4548, 22159, 3299, 262, 220, 99619, 2760, 1153, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 102573, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 15, 99446, 22, 101478, 100104, 23, 127031, 23, 101729, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 17, 13, 101135, 19, 100919, 23, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 15, 99419, 18, 99367, 101135] ... [220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99064, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 23, 98729, 17, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 102340, 11, 671, 27084, 12, 2958, 25, 220, 99146, 11, 671, 4584, 12, 2958, 25, 220, 18, 11, 715, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99064, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100899, 100614, 24, 120911, 18, 99243, 19, 99869, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101655, 15, 13, 103093, 19, 101562, 17, 23, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 20, 13, 100632, 22, 100919, 23, 100562, 17, 11, 9276, 1131, 66, 24, 65, 99419, 69, 19, 100899, 66, 19, 65, 19, 6066, 10793, 103205, 19, 124184, 103825, 5918, 100372, 19, 67, 22, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\nsed -i '25,25c\\\n // Action-specific accents use VS Code theme colors' /home/franz.srambical/crowd-pilot-extension/src/preview/decorations.ts && cat -n /home/franz.srambical/crowd-pilot-extension/src/preview/decorations.ts | sed -n '8,28p'\n```"", 'output_ids': [198, 73022, 45937, 198, 31193, 481, 72, 364, 99446, 11, 99446, 66, 5661, 262, 442, 5586, 18872, 58068, 990, 30530, 6119, 6912, 7986, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 23, 11, 99869, 79, 1248, 73022, 151336], 'meta_info': {'id': 'dd8c2ab9f8ca49d9ab9adffd8402ca64', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16599, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-4.7205765440594405e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.4712340831756592, 31193, 'sed'), (0.0, 481, ' -'), (-7.629365427419543e-06, 72, 'i'), (-2.7418097943154862e-06, 364, "" '""), (-0.03059755451977253, 99446, '25'), (-0.0002944036095868796, 11, ','), (-0.02504933997988701, 99446, '25'), (-3.4689302992774174e-05, 66, 'c'), (-1.3351351299206726e-05, 5661, '\\\n'), (-2.50339189733495e-06, 262, ' '), (-2.1576648578047752e-05, 442, ' //'), (-0.006577034946531057, 5586, ' Action'), (-0.028472570702433586, 18872, '-specific'), (-0.147026926279068, 58068, ' accents'), (-1.0255582332611084, 990, ' use'), (-1.2552826404571533, 30530, ' VS'), (-0.055200811475515366, 6119, ' Code'), (-0.1333451271057129, 6912, ' theme'), (-0.00020311199477873743, 7986, ' colors'), (-0.00013684290752280504, 6, ""'""), (0.0, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (0.0, 2899, '/c'), (-1.1920928244535389e-07, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (0.0, 13428, '/src'), (0.0, 14, '/'), (0.0, 27082, 'preview'), (0.0, 22490, '/de'), (0.0, 6005, 'cor'), (0.0, 804, 'ations'), (0.0, 21239, '.ts'), (-1.1920928244535389e-07, 1009, ' &&'), (0.0, 8250, ' cat'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (0.0, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (0.0, 13428, '/src'), (0.0, 14, '/'), (0.0, 27082, 'preview'), (0.0, 22490, '/de'), (0.0, 6005, 'cor'), (0.0, 804, 'ations'), (0.0, 21239, '.ts'), (0.0, 760, ' |'), (0.0, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.2046738862991333, 23, '8'), (0.0, 11, ','), (-0.004333035554736853, 99869, '28'), (0.0, 79, 'p'), (-1.1920928244535389e-07, 1248, ""'\n""), (-1.7881377516459906e-06, 73022, '```'), (-2.2172682292875834e-05, 151336, '<|user|>')], 'completion_tokens': 82, 'cached_tokens': 616, 'spec_accept_rate': 0.9090909090909091, 'spec_accept_length': 3.727272727272727, 'spec_verify_ct': 22, 'spec_accept_token_num': 60, 'spec_draft_token_num': 66, 'e2e_latency': 1.677563190460205, 'response_sent_to_client_ts': 1767625474.1144807}}\r\n[2026-01-05 16:04:34] INFO: 10.86.2.252:51656 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +769,1198411,"TERMINAL",0,0,"[2026-01-05 16:04:45] Receive: obj=GenerateReqInput(validation_time=1.9612256437540054e-05, received_time=1767625485.1554787, received_time_perf=2480369.828404127, rid='849c45613219445abce5d909745e57a8', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 102573, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 15, 99446, 22, 101478, 100104, 23, 127031, 23, 101729, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 17, 13, 101135, 19, 100919, 23, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 15, 99419, 18, 99367, 101135, 18, 11, 9276, 1131, 21, 66, 450, 16, 7221, 100614, 22, 101478, 19, 65, 24, 101474, 15, 64, 23, 102340, 69, 15, 98886, 12502, 99200, 16, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21] ... [123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100899, 23, 100067, 24, 99869, 99200, 21, 102284, 16, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101655, 17, 13, 19, 100632, 23, 102650, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 22, 13, 110610, 22, 101663, 100372, 23, 11, 9276, 1131, 631, 23, 66, 17, 370, 24, 69, 23, 924, 101474, 67, 24, 370, 24, 329, 71998, 23, 99698, 17, 924, 102636, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:45 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +770,1198685,"TERMINAL",0,0,"[2026-01-05 16:04:45 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +771,1199028,"TERMINAL",0,0,"[2026-01-05 16:04:45 TP0] Prefill batch, #new-seq: 1, #new-token: 9, #cached-token: 0, token usage: 0.04, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +772,1199191,"TERMINAL",0,0,"[2026-01-05 16:04:45 TP0] Decode batch, #running-req: 1, #token: 17007, token usage: 0.04, accept len: 2.89, accept rate: 0.72, cuda graph: True, gen throughput (token/s): 8.97, #queue-req: 0, \r\n",,terminal_output +773,1199355,"src/preview/decorations.ts",1968,0,"",typescript,selection_command +774,1199472,"TERMINAL",0,0,"[2026-01-05 16:04:46] Finish: obj=GenerateReqInput(validation_time=1.9612256437540054e-05, received_time=1767625485.1554787, received_time_perf=2480369.828404127, rid='849c45613219445abce5d909745e57a8', http_worker_ipc=None, text=""[gMASK]<|system|>\nYou are a helpful assistant that interacts with a computer shell to solve programming tasks.\nYour goal is to predict the next bash command a developer would most likely execute, given their editing and navigation history.\n\n=== CONVERSATION FORMAT ===\nThe conversation history alternates between:\n- Assistant messages: bash commands in fenced code blocks\n- User messages: command output wrapped in ... tags\n\nAfter each edit, you should show the resulting file contents using `cat -n FILE | sed -n 'START,ENDp'`, which produces 6-character right-aligned line numbers followed by a tab, e.g.:\n 1\tfirst line\n 2\tsecond line\n\nThe chained cat command should show 21 lines around the edited region.\n\n=== RESPONSE FORMAT ===\nYour response must contain exactly ONE bash code block with one command or two commands connected with &&.\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected.\n\n=== EDIT COMMA"" ... ""time=1.5758909285068512e-05, received_time=1767625472.4368727, received_time_perf=2480357.109798078, rid='dd8c2ab9f8ca49d9ab9adffd8402ca64', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198\n... [truncated]\n/nothink<|assistant|>\n"", input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 28964, 15, 60, 42344, 483, 7162, 11, 671, 931, 7806, 80, 25, 220, 16, 11, 671, 931, 34668, 25, 220, 102573, 16, 19, 11, 671, 32762, 34668, 25, 220, 15, 11, 3950, 10426, 25, 220, 15, 13, 100772, 11, 671, 27084, 12, 2958, 25, 220, 15, 11, 671, 4584, 12, 2958, 25, 220, 15, 345, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99241, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 17, 13, 15, 99446, 22, 101478, 100104, 23, 127031, 23, 101729, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 17, 13, 101135, 19, 100919, 23, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 15, 99419, 18, 99367, 101135, 18, 11, 9276, 1131, 21, 66, 450, 16, 7221, 100614, 22, 101478, 19, 65, 24, 101474, 15, 64, 23, 102340, 69, 15, 98886, 12502, 99200, 16, 65, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21] ... [123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 101175, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 20, 100899, 23, 100067, 24, 99869, 99200, 21, 102284, 16, 17, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101655, 17, 13, 19, 100632, 23, 102650, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 15, 100235, 22, 13, 110610, 22, 101663, 100372, 23, 11, 9276, 1131, 631, 23, 66, 17, 370, 24, 69, 23, 924, 101474, 67, 24, 370, 24, 329, 71998, 23, 99698, 17, 924, 102636, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False), out={'text': ""\n```bash\nsed -i '25,25c\\\n // Action-specific accents use appropriate VS Code theme colors' /home/franz.srambical/crowd-pilot-extension/src/preview/decorations.ts && cat -n /home/franz.srambical/crowd-pilot-extension/src/preview/decorations.ts | sed -n '8,28p'\n```"", 'output_ids': [198, 73022, 45937, 198, 31193, 481, 72, 364, 99446, 11, 99446, 66, 5661, 262, 442, 5586, 18872, 58068, 990, 8310, 30530, 6119, 6912, 7986, 6, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 1009, 8250, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 23, 11, 99869, 79, 1248, 73022, 151336], 'meta_info': {'id': '849c45613219445abce5d909745e57a8', 'finish_reason': {'type': 'stop', 'matched': 151336}, 'prompt_tokens': 16989, 'weight_version': 'default', 'total_retractions': 0, 'input_token_logprobs': [(None, 151351, '')], 'output_token_logprobs': [(0.0, 198, '\n'), (-9.83428253675811e-05, 73022, '```'), (0.0, 45937, 'bash'), (0.0, 198, '\n'), (-0.44101521372795105, 31193, 'sed'), (0.0, 481, ' -'), (-1.8596476365928538e-05, 72, 'i'), (-2.706014311115723e-05, 364, "" '""), (-0.3637892007827759, 99446, '25'), (-0.0004702892620116472, 11, ','), (-0.032303761690855026, 99446, '25'), (-9.142934868577868e-05, 66, 'c'), (-1.585470999998506e-05, 5661, '\\\n'), (-6.794906312279636e-06, 262, ' '), (-2.539125671319198e-05, 442, ' //'), (-0.00775493448600173, 5586, ' Action'), (-0.012448233552277088, 18872, '-specific'), (-0.05767015367746353, 58068, ' accents'), (-1.211035966873169, 990, ' use'), (-1.3828096389770508, 8310, ' appropriate'), (-0.5303966403007507, 30530, ' VS'), (-0.00029404606902971864, 6119, ' Code'), (-0.7904463410377502, 6912, ' theme'), (-2.8729025871143676e-05, 7986, ' colors'), (-6.532455881824717e-05, 6, ""'""), (0.0, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (0.0, 2899, '/c'), (0.0, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (0.0, 13428, '/src'), (0.0, 14, '/'), (0.0, 27082, 'preview'), (0.0, 22490, '/de'), (0.0, 6005, 'cor'), (-5.960462772236497e-07, 804, 'ations'), (0.0, 21239, '.ts'), (-2.3841855067985307e-07, 1009, ' &&'), (-3.576278118089249e-07, 8250, ' cat'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 608, ' /'), (0.0, 5117, 'home'), (0.0, 80215, '/fr'), (0.0, 12064, 'anz'), (0.0, 514, '.s'), (0.0, 2396, 'ram'), (0.0, 65, 'b'), (0.0, 938, 'ical'), (0.0, 2899, '/c'), (-1.1920928244535389e-07, 651, 'row'), (0.0, 67, 'd'), (0.0, 2268, '-p'), (0.0, 23891, 'ilot'), (0.0, 79888, '-extension'), (0.0, 13428, '/src'), (0.0, 14, '/'), (0.0, 27082, 'preview'), (0.0, 22490, '/de'), (0.0, 6005, 'cor'), (-1.1920928244535389e-07, 804, 'ations'), (0.0, 21239, '.ts'), (0.0, 760, ' |'), (0.0, 10918, ' sed'), (0.0, 481, ' -'), (0.0, 77, 'n'), (0.0, 364, "" '""), (-0.019702211022377014, 23, '8'), (0.0, 11, ','), (-0.0010059778578579426, 99869, '28'), (0.0, 79, 'p'), (0.0, 1248, ""'\n""), (-4.410734163684538e-06, 73022, '```'), (-5.364403477869928e-06, 151336, '<|user|>')], 'completion_tokens': 83, 'cached_tokens': 596, 'spec_accept_rate': 0.76, 'spec_accept_length': 3.32, 'spec_verify_ct': 25, 'spec_accept_token_num': 57, 'spec_draft_token_num': 75, 'e2e_latency': 1.102778434753418, 'response_sent_to_client_ts': 1767625486.2583067}}\r\n[2026-01-05 16:04:46] INFO: 10.86.2.252:57868 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +775,1199637,"TERMINAL",0,0,"[2026-01-05 16:04:46] Receive: obj=GenerateReqInput(validation_time=1.6591046005487442e-05, received_time=1767625486.3850005, received_time_perf=2480371.057925691, rid='574c785847294341a999a06f666bf51e', http_worker_ipc=None, text=None, input_ids=[151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 41951, 1555, 198, 257, 220, 17, 197, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 6896, 23970, 26935, 2038, 2504, 448, 825, 3210, 476, 1378, 11288, 8432, 448, 1009, 382, 27, 2243, 39092, 397, 73022, 45937, 198, 21680, 10806, 72704, 198, 13865, 3989, 522, 2243, 39092, 1339, 17483, 311, 1795, 1493, 5601, 686, 5240, 697, 2033, 311, 387, 17526, 382, 8706, 32113, 39997, 51882, 320, 97906, 8, 2049, 198, 4498, 498, 1366, 311, 32113, 264, 1034, 11, 498, 27639, 16150, 279, 4499, 1667, 1555, 5980, 10918, 11288, 304, 23970, 315, 279, 2701, 7586, 345, 437, 498, 27639, 4183, 990, 48657, 11288, 1075, 330, 47069, 14, 813, 25298, 4846, 11431, 5615, 3885, 678, 1555, 5109, 525, 220, 16, 5980, 323, 12710, 525, 10735, 624, 35207, 4499, 3209, 57937, 320, 26964, 6896, 825, 817, 2033, 7731, 16, 8, 29446, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 66, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 17, 8, 10423, 264, 66019, 2504, 315, 5128, 510, 256, 10918, 481, 72, 364, 22505, 11, 4689, 67, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 18, 8, 17082, 501, 5128, 56102, 264, 2661, 1555, 510, 256, 10918, 481, 72, 364, 22505, 72, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 19, 8, 29692, 501, 5128, 518, 279, 11171, 315, 279, 1034, 510, 256, 10918, 481, 72, 8247, 64, 5661, 20551, 15645, 62, 16, 5661, 20551, 15645, 62, 17, 5661, 9335, 6, 608, 3435, 50642, 32283, 23836, 1009, 8250, 481, 77, 608, 3435, 50642, 32283, 23836, 760, 10918, 481, 77, 364, 53, 22505, 39736, 4689, 79, 3876, 9061, 647, 22505, 323, 647, 4689, 13828, 264, 2613, 32109, 2163, 279, 19102, 5537, 382, 5404, 4183, 16674, 11288, 1075, 330, 18, 82, 91447, 91447, 28761, 70, 1, 476, 894, 1008, 330, 82, 14, 813, 25298, 11220, 1707, 10918, 48657, 26, 4518, 345, 31978, 18102, 279, 11490, 5128, 1667, 825, 315, 279, 1555, 5980, 7586, 3403, 382, 4498, 498, 525, 4183, 15652, 3542, 320, 68, 1302, 2572, 4303, 7032, 11, 16331, 11288, 11, 7375, 11, 4992, 24316, 498, 1231, 16674, 24099, 26935, 11288, 13, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 99619, 60, 37228, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 22, 98965, 101294, 23, 100632, 18, 114240, 99698, 18, 18, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 101562, 18, 13, 103005, 100614, 20, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 19, 22, 13, 102626, 18, 15, 98360, 18, 100557, 11, 9276, 1131, 19, 99317, 66, 19, 1371, 24, 102340, 329, 102259, 64, 102284, 65, 99695, 75918, 101135, 65, 108345, 20, 69, 516, 1758, 40163, 83114, 5856, 11, 1467, 5856, 11, 1946, 8076, 5818, 123720, 18, 100557, 11, 220, 123720, 121577, 11, 220, 123720, 18, 100235, 11, 220, 100759, 11, 220, 99916, 16, 15, 11, 220, 20, 99446, 11, 220, 126743, 11, 220, 110610, 100461, 11, 220, 120392, 99146, 11, 220, 19, 100104, 11, 220, 23, 99590, 102650, 11, 220, 19, 100933, 11, 220, 126743, 11, 220, 21, 100632, 21, 11, 220, 109641, 99241, 11, 220, 18, 98965, 11, 220, 114495, 98729, 11, 220, 124211, 103595, 11, 220, 100067, 22, 21, 11, 220, 21, 99590, 11, 220, 102114, 22, 16, 11, 220, 102486, 24, 20, 11, 220, 101140, 19, 11, 220, 18, 98965, 11, 220, 100096, 17, 18, 11, 220, 99951, 24, 11, 220, 126612, 15, 11, 220, 99916, 24, 100235, 11, 220, 18, 99146, 15, 11, 220, 126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18] ... [126743, 11, 220, 123006, 101961, 11, 220, 110248, 20, 11, 220, 122866, 24, 11, 220, 19, 100632, 18, 11, 220, 100067, 17, 18, 11, 220, 98965, 11, 220, 99916, 21, 16, 11, 220, 102807, 17, 11, 220, 119953, 102501, 11, 220, 18, 99619, 11, 220, 109626, 102340, 11, 220, 100919, 19, 15, 11, 220, 100919, 17, 11, 220, 23, 100096, 21, 11, 220, 101135, 16, 23, 11, 220, 22, 99317, 102650, 11, 220, 101135, 24, 20, 11, 220, 20, 99243, 104160, 11, 220, 119621, 24, 11, 220, 100759, 11, 220, 100928, 20, 11, 220, 109803, 99064, 11, 220, 100919, 19, 15, 11, 220, 103093, 16, 24, 11, 220, 103205, 18, 11, 220, 98729, 19, 23, 11, 220, 20, 98668, 11, 220, 98886, 11, 220, 121860, 100919, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 99916, 24, 100235, 11, 220, 110800, 101252, 11, 220, 120911, 11, 220, 23, 99951, 100441, 11, 220, 121755, 23, 11, 220, 99457, 100002, 11, 220, 100759, 11, 220, 98886, 11, 220, 99916, 20, 22, 11, 220, 21, 99618, 20, 11, 220, 99446, 11, 220, 18, 99146, 15, 11, 220, 122300, 15, 11, 220, 98729, 19, 100235, 11, 220, 120911, 11, 220, 100632, 21, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 99916, 18, 100702, 11, 220, 100632, 16, 103498, 11, 220, 100104, 11, 220, 24, 100933, 23, 11, 220, 99951, 16, 11, 220, 99618, 17, 20, 11, 220, 127020, 22, 11, 220, 19, 101474, 24, 11, 220, 98965, 11, 220, 101474, 23, 11, 220, 114062, 20, 11, 220, 122463, 18, 11, 220, 99951, 24, 11, 220, 117305, 100235, 11, 220, 110248, 19, 11, 220, 23, 102626, 18, 11, 220, 122569, 22, 11, 220, 119953, 20, 11, 220, 101562, 16, 21, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 112891, 100557, 11, 220, 22, 99618, 11, 220, 110610, 99243, 11, 220, 100933, 16, 11, 220, 102114, 11, 220, 100632, 19, 11, 220, 119651, 100002, 11, 220, 98965, 11, 220, 101562, 23, 24, 11, 220, 102626, 11, 220, 21, 11, 220, 22, 99695, 23, 11, 220, 103502, 17, 11, 220, 122541, 98965, 11, 220, 108499, 11, 220, 99146, 11, 220, 100928, 20, 101252, 11, 220, 117305, 15, 11, 220, 24, 101478, 101175, 11, 220, 117290, 20, 11, 220, 20, 98668, 24, 11, 220, 23, 98668, 24, 11, 220, 101130, 18, 11, 220, 126743, 11, 220, 20, 101411, 16, 11, 220, 98965, 11, 220, 100919, 19, 11, 220, 106464, 17, 11, 220, 99366, 11, 220, 20, 98668, 11, 220, 99446, 22, 11, 220, 108499, 11, 220, 99317, 11, 220, 19, 98729, 102624, 11, 220, 117290, 20, 11, 220, 100759, 198, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351, 198, 73022, 45937, 198, 4616, 481, 77, 608, 5117, 80215, 12064, 514, 2396, 65, 938, 2899, 651, 67, 2268, 23891, 79888, 13428, 14, 27082, 22490, 6005, 804, 21239, 760, 10918, 481, 77, 364, 102340, 11, 103595, 79, 1248, 73022, 151336, 198, 8049, 411, 397, 262, 220, 102340, 13056, 12688, 25, 86102, 2285, 52279, 11711, 345, 262, 220, 101961, 13056, 35647, 25, 86102, 2285, 52279, 22159, 345, 262, 220, 102088, 13056, 3886, 2323, 25, 364, 30901, 751, 262, 220, 101723, 13056, 43850, 25, 364, 16, 1767, 751, 262, 220, 100461, 13056, 91174, 25, 364, 1056, 42503, 751, 262, 220, 101562, 7472, 17804, 262, 220, 101655, 1572, 262, 220, 100933, 7472, 442, 15621, 11586, 35884, 11162, 320, 1958, 86227, 304, 4499, 23044, 340, 262, 220, 101474, 7472, 419, 23163, 980, 492, 450, 52279, 42294, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 99200, 13056, 12688, 25, 86102, 2285, 52279, 11711, 345, 262, 220, 102624, 13056, 91174, 25, 364, 1056, 42503, 751, 262, 220, 102501, 13056, 442, 2308, 3886, 481, 31746, 369, 3842, 3668, 21265, 198, 262, 220, 102721, 7472, 17804, 262, 220, 102856, 1572, 262, 220, 101130, 7472, 442, 17082, 290, 2504, 481, 4933, 501, 1467, 448, 6176, 11162, 389, 1790, 1555, 198, 262, 220, 101917, 7472, 442, 2213, 1178, 374, 738, 817, 30149, 4566, 3141, 3798, 198, 262, 220, 102486, 7472, 419, 23163, 980, 492, 4208, 290, 9421, 516, 55008, 19227, 2520, 1178, 9406, 19394, 929, 2262, 262, 220, 101729, 13056, 442, 5351, 9219, 481, 2213, 738, 4566, 63561, 3798, 8739, 3798, 198, 262, 220, 102573, 7472, 17804, 262, 220, 99618, 1572, 262, 220, 103595, 7472, 442, 53873, 35747, 481, 4933, 501, 1467, 1290, 1283, 11057, 1467, 320, 23995, 1555, 340, 522, 36174, 29, 151360, 151336, 198, 8049, 411, 397, 58, 115937, 21, 12, 100286, 12, 100002, 220, 99317, 25, 100590, 25, 101562, 60, 35357, 25, 2839, 28, 30989, 27145, 2505, 78775, 3009, 28, 16, 13, 102487, 16, 119651, 21, 102088, 100899, 19, 98503, 20, 19, 68, 12, 100002, 11, 3949, 3009, 28, 123853, 102269, 17, 20, 100933, 20, 13, 117290, 19, 100928, 22, 11, 3949, 3009, 76167, 28, 99590, 23, 100441, 21, 24, 13, 23, 99869, 99698, 19, 115925, 11, 9276, 1131, 23, 101474, 66, 100461, 21, 118843, 98729, 19, 100461, 370, 346, 20, 67, 100067, 24, 22, 100461, 68, 102486, 64, 23, 516, 1758, 40163, 83114, 5856, 11, 1467, 428, 151331, 151333, 151335, 198, 2610, 525, 264, 10945, 17821, 429, 82472, 448, 264, 6366, 12522, 311, 11619, 15461, 9076, 624, 7771, 5795, 374, 311, 7023, 279, 1790, 26935, 3210, 264, 15742, 1035, 1429, 4363, 9023, 11, 2661, 862, 15652, 323, 10641, 3840, 382, 8706, 3418, 71672, 3495, 51882, 2049, 198, 785, 10430, 3840, 6919, 973, 1948, 510, 12, 21338, 6605, 25, 26935, 11288, 304, 82703, 2038, 10005, 198, 12, 2657, 6605, 25, 3210, 2550, 19435, 304, 366, 36174, 29, 26333, 36174, 29, 9488, 271, 6025, 1817, 4499, 11, 498, 1265, 1473, 279, 12935, 1034, 8793, 1667, 1565, 4616, 481, 77, 11831, 760, 10918, 481, 77, 364, 22505, 11, 4689, 79, 6, 7808, 892, 18611, 220, 21, 78588, 1290, 96632, 1555, 5109, 8109, 553, 264, 5651, 11, 384, 1302, 13, 510, 257, 220, 16, 4955, 3896, 1555, 198, 257, 220, 17, 4955, 5569, 1555, 271, 785, 66481, 8250, 3210, 1265, 1473, 220, 99146, 5128, 2163, 279, 19102, 5537, 382, 8706, 75566, 51882, 2049, 198, 7771, 2033, 1969, 6644, 715, 1112, 508, 376, 37809, 921, 522, 36174, 29, 151360, 151337, 198, 151350, 151351], input_embeds=None, image_data=None, video_data=None, audio_data=None, sampling_params={'temperature': 0.7, 'max_new_tokens': None, 'min_new_tokens': 0, 'stop': None, 'stop_token_ids': None, 'stop_regex': None, 'top_p': 0.8, 'top_k': 20, 'min_p': 0.0, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'repetition_penalty': 1.0, 'regex': None, 'ebnf': None, 'n': 1, 'no_stop_trim': False, 'ignore_eos': False, 'skip_special_tokens': True, 'logit_bias': None, 'custom_params': None}, return_logprob=True, logprob_start_len=-1, top_logprobs_num=0, token_ids_logprob=None, return_text_in_logprobs=True, stream=False, log_metrics=True, return_hidden_states=False, modalities=[], session_params=None, lora_path=None, lora_id=None, custom_logit_processor=None, bootstrap_host=None, bootstrap_port=None, bootstrap_room=None, bootstrap_pair_key=None, decode_tp_size=None, reasoning=False, data_parallel_rank=None, background=False, conversation_id=None, priority=None, extra_key=None, no_logs=False, custom_labels=None, return_bytes=False, return_entropy=False)\r\n[2026-01-05 16:04:46 TP0] Prefill batch, #new-seq: 1, #new-token: 8192, #cached-token: 596, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +776,1199895,"TERMINAL",0,0,"[2026-01-05 16:04:46 TP0] Prefill batch, #new-seq: 1, #new-token: 7178, #cached-token: 0, token usage: 0.02, #running-req: 0, #queue-req: 0, \r\n",,terminal_output diff --git a/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-12ac3267-b673-44fd-8ea3-37e3e74cb0101755540018956-2025_08_18-20.00.27.475/source.csv b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-12ac3267-b673-44fd-8ea3-37e3e74cb0101755540018956-2025_08_18-20.00.27.475/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..62208ac25409b10829c5782a159287de819b99d5 --- /dev/null +++ b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-12ac3267-b673-44fd-8ea3-37e3e74cb0101755540018956-2025_08_18-20.00.27.475/source.csv @@ -0,0 +1,566 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,788,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"8:00:27 PM [info] Activating crowd-code\n8:00:27 PM [info] Recording started\n8:00:27 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,1099,"TERMINAL",0,0,"python3",,terminal_focus +4,1105,"TERMINAL",0,0,"bash",,terminal_focus +5,2785,"test/test_nan.ipynb",0,0,"# Restore a dynamics checkpoint and enable sowing\nimport os\nfrom typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport optax\nimport orbax.checkpoint as ocp\nimport grain\n\nfrom utils.dataloader import get_dataloader\nfrom models.lam import LatentActionModel\n\n# Adjust to your checkpoint directory, dataset directory, and dynamics type\nckpt_dir = ""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/train_lam_coinrun_reproduction_20067/100000_ckpt""\ndata_dir = ""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\nnum_steps: int = 200_000\nseed: int = 0\nseq_len: int = 16\nimage_channels: int = 3\nimage_height: int = 64\nimage_width: int = 64\nsave_ckpt: bool = False\nrestore_ckpt: bool = False\n# Optimization\nbatch_size: int = 36\nvq_beta: float = 0.25\ninit_lr: float = 0.0\nmax_lr: float = 3e-5\ndecay_end: float = 0.0\nwsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n)\nwarmup_steps: int = 5000\nlr_schedule: str = ""wsd"" # supported options: wsd, cos\nvq_reset_thresh: int = 50\n# LAM\nmodel_dim: int = 512\nffn_dim: int = 2048\nlatent_dim: int = 32\nnum_latents: int = 6\npatch_size: int = 16\nnum_blocks: int = 4\nnum_heads: int = 8\ndropout: float = 0.0\ncodebook_dropout: float = 0.0\nparam_dtype = jnp.float32\ndtype = jnp.bfloat16\n# Logging\nlog_interval: int = 5\nlog_image_interval: int = 250\nuse_flash_attention: bool = True\n\n# Build model graph matching the checkpoint\nrng = jax.random.key(seed)\nrng, _rng = jax.random.split(rng)\nrngs = nnx.Rngs(_rng)\nlam = LatentActionModel(\n in_dim=image_channels,\n model_dim=model_dim,\n ffn_dim=ffn_dim,\n latent_dim=latent_dim,\n num_latents=num_latents,\n patch_size=patch_size,\n num_blocks=num_blocks,\n num_heads=num_heads,\n dropout=dropout,\n codebook_dropout=codebook_dropout,\n param_dtype=param_dtype,\n dtype=dtype,\n use_flash_attention=use_flash_attention,\n rngs=rngs,\n)\n\n# Optimizer (matches training opt hyperparams; lr value is irrelevant for restore)\ntx = optax.adamw(\n learning_rate=max_lr,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=param_dtype,\n)\noptimizer = nnx.Optimizer(lam, tx)\n",python,tab +6,325278,"test/test_nan.ipynb",186,0,"",python,selection_mouse +7,325279,"test/test_nan.ipynb",185,0,"",python,selection_command +8,331804,"test/test_nan.ipynb",1007,8,"schedule",python,selection_command +9,334786,"test/test_nan.ipynb",968,8,"schedule",python,selection_command +10,338868,"test/test_nan.ipynb",1007,8,"schedule",python,selection_command +11,341147,"test/test_nan.ipynb",1014,0,"",python,selection_command +12,341595,"test/test_nan.ipynb",1004,0,"",python,selection_command +13,343378,"test/test_nan.ipynb",0,0,"# Restore latest checkpoint: optimizer and dataloader state, like in training\nfrom typing import cast\n\nhandler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\nhandler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n)\nhandler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n)\nhandler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n)\nhandler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n)\n\nckpt_mgr = ocp.CheckpointManager(\n directory=ckpt_dir,\n options=ocp.CheckpointManagerOptions(step_format_fixed_length=6),\n handler_registry=handler_registry,\n)\n\n# Recreate dataloader and iterator exactly like training\narray_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n]\ngrain_dataloader = get_dataloader(\n array_record_files,\n seq_len,\n batch_size,\n image_height,\n image_width,\n image_channels,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=seed,\n)\ninitial_state = grain_dataloader._create_initial_state()\nloader_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n# Restore optimizer and dataloader iterator states\nabstract_optimizer = nnx.eval_shape(lambda: optimizer)\nabstract_optimizer_state = nnx.state(abstract_optimizer)\nrestored = ckpt_mgr.restore(\n ckpt_mgr.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(loader_iterator), # type: ignore\n ),\n)\n\nnnx.update(optimizer, restored[""model_state""]) # type: ignore\nloader_iterator = restored[""dataloader_state""]\nstep = ckpt_mgr.latest_step() or 0\nckpt_mgr.close()\n\n# Convenience handle\nlam = optimizer.model\nprint(f""Restored optimizer and dataloader at step {step}."")\n",python,tab +14,347677,"test/test_nan.ipynb",0,0,"",python,tab +15,348788,"test/test_nan.ipynb",2041,0,"",python,selection_command +16,350687,"test/test_nan.ipynb",2059,0,"",python,selection_command +17,350907,"test/test_nan.ipynb",2085,0,"",python,selection_command +18,350927,"test/test_nan.ipynb",2097,0,"",python,selection_command +19,350996,"test/test_nan.ipynb",2109,0,"",python,selection_command +20,351167,"test/test_nan.ipynb",2132,0,"",python,selection_command +21,351315,"test/test_nan.ipynb",2158,0,"",python,selection_command +22,351707,"test/test_nan.ipynb",2160,0,"",python,selection_command +23,352067,"test/test_nan.ipynb",2158,0,"",python,selection_command +24,360027,"test/test_nan.ipynb",2081,0,"",python,selection_mouse +25,361167,"test/test_nan.ipynb",2077,0,"",python,selection_command +26,361336,"test/test_nan.ipynb",2077,6,"",python,content +27,364567,"test/test_nan.ipynb",2076,0,"",python,selection_command +28,366495,"test/test_nan.ipynb",2077,0,"max_lr",python,content +29,366497,"test/test_nan.ipynb",2077,0,"",python,selection_command +30,376216,"test/test_nan.ipynb",0,0,"%pwd",python,tab +31,377207,"test/test_nan.ipynb",0,0,"",python,tab +32,379207,"test/test_nan.ipynb",2057,0,"",python,selection_command +33,380927,"test/test_nan.ipynb",2040,0,"\n",python,content +34,381867,"test/test_nan.ipynb",2040,0,"\n",python,content +35,384135,"test/test_nan.ipynb",2041,0,"l",python,content +36,384136,"test/test_nan.ipynb",2042,0,"",python,selection_keyboard +37,384176,"test/test_nan.ipynb",2042,0,"r",python,content +38,384177,"test/test_nan.ipynb",2043,0,"",python,selection_keyboard +39,384387,"test/test_nan.ipynb",2043,0,"_",python,content +40,384388,"test/test_nan.ipynb",2044,0,"",python,selection_keyboard +41,384707,"test/test_nan.ipynb",2044,0,"f",python,content +42,384709,"test/test_nan.ipynb",2045,0,"",python,selection_keyboard +43,384792,"test/test_nan.ipynb",2045,0,"n",python,content +44,384793,"test/test_nan.ipynb",2046,0,"",python,selection_keyboard +45,384911,"test/test_nan.ipynb",2046,0," ",python,content +46,384912,"test/test_nan.ipynb",2047,0,"",python,selection_keyboard +47,385055,"test/test_nan.ipynb",2047,0,"=",python,content +48,385056,"test/test_nan.ipynb",2048,0,"",python,selection_keyboard +49,385127,"test/test_nan.ipynb",2048,0," ",python,content +50,385128,"test/test_nan.ipynb",2049,0,"",python,selection_keyboard +51,386627,"test/test_nan.ipynb",2049,0,"g",python,content +52,386628,"test/test_nan.ipynb",2050,0,"",python,selection_keyboard +53,386707,"test/test_nan.ipynb",2050,0,"e",python,content +54,386708,"test/test_nan.ipynb",2051,0,"",python,selection_keyboard +55,386787,"test/test_nan.ipynb",2051,0,"t",python,content +56,386788,"test/test_nan.ipynb",2052,0,"",python,selection_keyboard +57,387024,"test/test_nan.ipynb",2052,0,"_",python,content +58,387025,"test/test_nan.ipynb",2053,0,"",python,selection_keyboard +59,387227,"test/test_nan.ipynb",2053,0,"l",python,content +60,387228,"test/test_nan.ipynb",2054,0,"",python,selection_keyboard +61,387302,"test/test_nan.ipynb",2054,0,"r",python,content +62,387303,"test/test_nan.ipynb",2055,0,"",python,selection_keyboard +63,387576,"test/test_nan.ipynb",2055,0,"_",python,content +64,387577,"test/test_nan.ipynb",2056,0,"",python,selection_keyboard +65,387696,"test/test_nan.ipynb",2056,0,"s",python,content +66,387697,"test/test_nan.ipynb",2057,0,"",python,selection_keyboard +67,387791,"test/test_nan.ipynb",2057,0,"c",python,content +68,387792,"test/test_nan.ipynb",2058,0,"",python,selection_keyboard +69,387847,"test/test_nan.ipynb",2058,0,"h",python,content +70,387848,"test/test_nan.ipynb",2059,0,"",python,selection_keyboard +71,388047,"test/test_nan.ipynb",2059,0,"e",python,content +72,388048,"test/test_nan.ipynb",2060,0,"",python,selection_keyboard +73,388049,"test/test_nan.ipynb",2060,0,"d",python,content +74,388049,"test/test_nan.ipynb",2061,0,"",python,selection_keyboard +75,388258,"test/test_nan.ipynb",2061,0,"u",python,content +76,388258,"test/test_nan.ipynb",2062,0,"",python,selection_keyboard +77,388259,"test/test_nan.ipynb",2062,0,"l",python,content +78,388260,"test/test_nan.ipynb",2063,0,"",python,selection_keyboard +79,388260,"test/test_nan.ipynb",2063,0,"e",python,content +80,388261,"test/test_nan.ipynb",2064,0,"",python,selection_keyboard +81,388447,"test/test_nan.ipynb",2064,0,"{}",python,content +82,388448,"test/test_nan.ipynb",2065,0,"",python,selection_keyboard +83,388795,"test/test_nan.ipynb",2064,2,"",python,content +84,389027,"test/test_nan.ipynb",2064,0,"()",python,content +85,389028,"test/test_nan.ipynb",2065,0,"",python,selection_keyboard +86,389975,"test/test_nan.ipynb",2065,0,"\n \n",python,content +87,390187,"test/test_nan.ipynb",2070,0,"l",python,content +88,390188,"test/test_nan.ipynb",2071,0,"",python,selection_keyboard +89,390547,"test/test_nan.ipynb",2071,0,"r",python,content +90,390548,"test/test_nan.ipynb",2072,0,"",python,selection_keyboard +91,391007,"test/test_nan.ipynb",2072,0,"_",python,content +92,391008,"test/test_nan.ipynb",2073,0,"",python,selection_keyboard +93,391190,"test/test_nan.ipynb",2073,0,"s",python,content +94,391191,"test/test_nan.ipynb",2074,0,"",python,selection_keyboard +95,391192,"test/test_nan.ipynb",2074,0,"c",python,content +96,391193,"test/test_nan.ipynb",2075,0,"",python,selection_keyboard +97,391283,"test/test_nan.ipynb",2075,0,"h",python,content +98,391284,"test/test_nan.ipynb",2076,0,"",python,selection_keyboard +99,391387,"test/test_nan.ipynb",2076,0,"e",python,content +100,391388,"test/test_nan.ipynb",2077,0,"",python,selection_keyboard +101,391407,"test/test_nan.ipynb",2077,0,"d",python,content +102,391408,"test/test_nan.ipynb",2078,0,"",python,selection_keyboard +103,391535,"test/test_nan.ipynb",2078,0,"u",python,content +104,391536,"test/test_nan.ipynb",2079,0,"",python,selection_keyboard +105,391608,"test/test_nan.ipynb",2079,0,"l",python,content +106,391609,"test/test_nan.ipynb",2080,0,"",python,selection_keyboard +107,391695,"test/test_nan.ipynb",2080,0,"e",python,content +108,391696,"test/test_nan.ipynb",2081,0,"",python,selection_keyboard +109,391827,"test/test_nan.ipynb",2081,0,",",python,content +110,391828,"test/test_nan.ipynb",2082,0,"",python,selection_keyboard +111,392035,"test/test_nan.ipynb",2082,0,"\n ",python,content +112,394907,"test/test_nan.ipynb",2087,0,"i",python,content +113,394908,"test/test_nan.ipynb",2088,0,"",python,selection_keyboard +114,394947,"test/test_nan.ipynb",2088,0,"n",python,content +115,394948,"test/test_nan.ipynb",2089,0,"",python,selection_keyboard +116,395075,"test/test_nan.ipynb",2089,0,"i",python,content +117,395076,"test/test_nan.ipynb",2090,0,"",python,selection_keyboard +118,395187,"test/test_nan.ipynb",2090,0,"t",python,content +119,395188,"test/test_nan.ipynb",2091,0,"",python,selection_keyboard +120,395415,"test/test_nan.ipynb",2091,0,"_",python,content +121,395416,"test/test_nan.ipynb",2092,0,"",python,selection_keyboard +122,395607,"test/test_nan.ipynb",2092,0,"l",python,content +123,395608,"test/test_nan.ipynb",2093,0,"",python,selection_keyboard +124,395755,"test/test_nan.ipynb",2093,0,"r",python,content +125,395756,"test/test_nan.ipynb",2094,0,"",python,selection_keyboard +126,395835,"test/test_nan.ipynb",2094,0,",",python,content +127,395836,"test/test_nan.ipynb",2095,0,"",python,selection_keyboard +128,396307,"test/test_nan.ipynb",2095,0,"\n ",python,content +129,396576,"test/test_nan.ipynb",2100,0,"m",python,content +130,396577,"test/test_nan.ipynb",2101,0,"",python,selection_keyboard +131,396656,"test/test_nan.ipynb",2101,0,"a",python,content +132,396657,"test/test_nan.ipynb",2102,0,"",python,selection_keyboard +133,396829,"test/test_nan.ipynb",2102,0,"x",python,content +134,396830,"test/test_nan.ipynb",2103,0,"",python,selection_keyboard +135,396987,"test/test_nan.ipynb",2103,0,"_",python,content +136,396988,"test/test_nan.ipynb",2104,0,"",python,selection_keyboard +137,397223,"test/test_nan.ipynb",2104,0,"l",python,content +138,397224,"test/test_nan.ipynb",2105,0,"",python,selection_keyboard +139,397315,"test/test_nan.ipynb",2105,0,"r",python,content +140,397316,"test/test_nan.ipynb",2106,0,"",python,selection_keyboard +141,397415,"test/test_nan.ipynb",2106,0,",",python,content +142,397416,"test/test_nan.ipynb",2107,0,"",python,selection_keyboard +143,398196,"test/test_nan.ipynb",2107,0,"\n ",python,content +144,398675,"test/test_nan.ipynb",2112,0,"d",python,content +145,398676,"test/test_nan.ipynb",2113,0,"",python,selection_keyboard +146,398835,"test/test_nan.ipynb",2113,0,"e",python,content +147,398836,"test/test_nan.ipynb",2114,0,"",python,selection_keyboard +148,398907,"test/test_nan.ipynb",2114,0,"c",python,content +149,398908,"test/test_nan.ipynb",2115,0,"",python,selection_keyboard +150,399015,"test/test_nan.ipynb",2115,0,"a",python,content +151,399016,"test/test_nan.ipynb",2116,0,"",python,selection_keyboard +152,399115,"test/test_nan.ipynb",2116,0,"y",python,content +153,399116,"test/test_nan.ipynb",2117,0,"",python,selection_keyboard +154,399356,"test/test_nan.ipynb",2117,0,"_",python,content +155,399357,"test/test_nan.ipynb",2118,0,"",python,selection_keyboard +156,399542,"test/test_nan.ipynb",2118,0,"e",python,content +157,399543,"test/test_nan.ipynb",2119,0,"",python,selection_keyboard +158,399667,"test/test_nan.ipynb",2119,0,"n",python,content +159,399668,"test/test_nan.ipynb",2120,0,"",python,selection_keyboard +160,399739,"test/test_nan.ipynb",2120,0,"d",python,content +161,399740,"test/test_nan.ipynb",2121,0,"",python,selection_keyboard +162,399867,"test/test_nan.ipynb",2121,0,",",python,content +163,399868,"test/test_nan.ipynb",2122,0,"",python,selection_keyboard +164,400175,"test/test_nan.ipynb",2122,0,"\n ",python,content +165,400447,"test/test_nan.ipynb",2127,0,"n",python,content +166,400448,"test/test_nan.ipynb",2128,0,"",python,selection_keyboard +167,400676,"test/test_nan.ipynb",2128,0,"u",python,content +168,400677,"test/test_nan.ipynb",2129,0,"",python,selection_keyboard +169,400943,"test/test_nan.ipynb",2129,0,"m",python,content +170,400944,"test/test_nan.ipynb",2130,0,"",python,selection_keyboard +171,401175,"test/test_nan.ipynb",2130,0,"_",python,content +172,401175,"test/test_nan.ipynb",2131,0,"",python,selection_keyboard +173,401267,"test/test_nan.ipynb",2131,0,"s",python,content +174,401268,"test/test_nan.ipynb",2132,0,"",python,selection_keyboard +175,401435,"test/test_nan.ipynb",2132,0,"t",python,content +176,401436,"test/test_nan.ipynb",2133,0,"",python,selection_keyboard +177,401507,"test/test_nan.ipynb",2133,0,"p",python,content +178,401508,"test/test_nan.ipynb",2134,0,"",python,selection_keyboard +179,401515,"test/test_nan.ipynb",2134,0,"e",python,content +180,401515,"test/test_nan.ipynb",2135,0,"",python,selection_keyboard +181,401767,"test/test_nan.ipynb",2135,0,"s",python,content +182,401768,"test/test_nan.ipynb",2136,0,"",python,selection_keyboard +183,402067,"test/test_nan.ipynb",2135,0,"",python,selection_command +184,404783,"train_lam.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\njax.config.update(""jax_transfer_guard"", ""allow"")\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(\n model: LatentActionModel, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n # --- Compute loss ---\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n model.train()\n outputs = model(inputs, training=True)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n gt_future_frames = gt[:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@nnx.jit\ndef train_step(\n lam: LatentActionModel,\n optimizer: nnx.Optimizer,\n inputs: dict,\n action_last_active: jax.Array,\n rng: jax.Array,\n) -> tuple[jax.Array, jax.Array, jax.Array, dict]:\n def loss_fn(\n model: LatentActionModel,\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n return lam_loss_fn(model, inputs)\n\n # --- Update model ---\n (loss, (recon, idx_counts, metrics)), grads = nnx.value_and_grad(\n loss_fn, has_aux=True\n )(lam)\n optimizer.update(grads)\n\n # --- Reset inactive latent actions ---\n codebook = lam.vq.codebook\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook.value\n )\n lam.vq.codebook.value = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return loss, recon, action_last_active, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n # Count parameters\n _, params, _ = nnx.split(lam, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.Optimizer(lam, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )\n print(f""Starting training from step {step}..."")\n action_last_active = jnp.zeros(args.num_latents, dtype=jnp.int32)\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n inputs = dict(videos=videos, rng=_rng)\n rng, _rng = jax.random.split(rng)\n loss, recon, action_last_active, metrics = train_step(\n lam, optimizer, inputs, action_last_active, _rng\n )\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +185,409447,"train_lam.py",502,0,"",python,selection_command +186,409727,"train_lam.py",6619,0,"",python,selection_command +187,409907,"train_lam.py",502,0,"",python,selection_command +188,410427,"train_lam.py",6619,0,"",python,selection_command +189,411011,"train_lam.py",6601,34," lr_schedule = get_lr_schedule(",python,selection_command +190,411091,"train_lam.py",6601,60," lr_schedule = get_lr_schedule(\n args.lr_schedule,",python,selection_command +191,411349,"train_lam.py",6601,82," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,",python,selection_command +192,411371,"train_lam.py",6601,103," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,",python,selection_command +193,411412,"train_lam.py",6601,127," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,",python,selection_command +194,411451,"train_lam.py",6601,151," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,",python,selection_command +195,411467,"train_lam.py",6601,178," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,",python,selection_command +196,411491,"train_lam.py",6601,208," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,",python,selection_command +197,411651,"train_lam.py",6601,214," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )",python,selection_command +198,412171,"train_lam.py",6601,208," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,",python,selection_command +199,412387,"train_lam.py",6601,214," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )",python,selection_command +200,412987,"train_lam.py",6601,0,"",python,selection_command +201,413529,"test/test_nan.ipynb",0,0,"",python,tab +202,416723,"test/test_nan.ipynb",2075,0,"",python,selection_mouse +203,416847,"test/test_nan.ipynb",2070,11,"lr_schedule",python,selection_mouse +204,416967,"test/test_nan.ipynb",2066,17," lr_schedule,\n",python,selection_mouse +205,417544,"test/test_nan.ipynb",2048,0,"",python,selection_mouse +206,417727,"test/test_nan.ipynb",2048,1," ",python,selection_mouse +207,417827,"test/test_nan.ipynb",2041,25,"lr_fn = get_lr_schedule(\n",python,selection_mouse +208,417967,"test/test_nan.ipynb",2041,42,"lr_fn = get_lr_schedule(\n lr_schedule,\n",python,selection_mouse +209,418007,"test/test_nan.ipynb",2041,55,"lr_fn = get_lr_schedule(\n lr_schedule,\n init_lr,\n",python,selection_mouse +210,418047,"test/test_nan.ipynb",2041,67,"lr_fn = get_lr_schedule(\n lr_schedule,\n init_lr,\n max_lr,\n",python,selection_mouse +211,418055,"test/test_nan.ipynb",2041,82,"lr_fn = get_lr_schedule(\n lr_schedule,\n init_lr,\n max_lr,\n decay_end,\n",python,selection_mouse +212,418155,"test/test_nan.ipynb",2041,96,"lr_fn = get_lr_schedule(\n lr_schedule,\n init_lr,\n max_lr,\n decay_end,\n num_stpes\n",python,selection_mouse +213,418407,"test/test_nan.ipynb",2041,98,"lr_fn = get_lr_schedule(\n lr_schedule,\n init_lr,\n max_lr,\n decay_end,\n num_stpes\n)\n",python,selection_mouse +214,419235,"test/test_nan.ipynb",2041,97,"\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n",python,content +215,419247,"test/test_nan.ipynb",2046,0,"",python,selection_command +216,420707,"test/test_nan.ipynb",2042,34," lr_schedule = get_lr_schedule(",python,selection_command +217,420827,"test/test_nan.ipynb",2042,60," lr_schedule = get_lr_schedule(\n args.lr_schedule,",python,selection_command +218,421087,"test/test_nan.ipynb",2042,82," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,",python,selection_command +219,421107,"test/test_nan.ipynb",2042,103," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,",python,selection_command +220,421147,"test/test_nan.ipynb",2042,127," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,",python,selection_command +221,421167,"test/test_nan.ipynb",2042,151," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,",python,selection_command +222,421207,"test/test_nan.ipynb",2042,178," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,",python,selection_command +223,421227,"test/test_nan.ipynb",2042,208," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,",python,selection_command +224,421375,"test/test_nan.ipynb",2042,214," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )",python,selection_command +225,421543,"test/test_nan.ipynb",2042,215," lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n",python,selection_command +226,421775,"test/test_nan.ipynb",2251,4,"",python,content +227,421775,"test/test_nan.ipynb",2221,8," ",python,content +228,421775,"test/test_nan.ipynb",2194,8," ",python,content +229,421775,"test/test_nan.ipynb",2170,8," ",python,content +230,421775,"test/test_nan.ipynb",2146,8," ",python,content +231,421775,"test/test_nan.ipynb",2125,8," ",python,content +232,421775,"test/test_nan.ipynb",2103,8," ",python,content +233,421775,"test/test_nan.ipynb",2077,8," ",python,content +234,421775,"test/test_nan.ipynb",2042,4,"",python,content +235,421776,"test/test_nan.ipynb",2042,0,"",python,selection_command +236,422167,"test/test_nan.ipynb",2041,0,"",python,selection_command +237,422475,"test/test_nan.ipynb",2041,1,"",python,content +238,422707,"test/test_nan.ipynb",2072,0,"",python,selection_command +239,422947,"test/test_nan.ipynb",2094,0,"",python,selection_command +240,422987,"test/test_nan.ipynb",2112,0,"",python,selection_command +241,423027,"test/test_nan.ipynb",2129,0,"",python,selection_command +242,423075,"test/test_nan.ipynb",2149,0,"",python,selection_command +243,423087,"test/test_nan.ipynb",2169,0,"",python,selection_command +244,423127,"test/test_nan.ipynb",2192,0,"",python,selection_command +245,423147,"test/test_nan.ipynb",2218,0,"",python,selection_command +246,423267,"test/test_nan.ipynb",2220,0,"",python,selection_command +247,423867,"test/test_nan.ipynb",2220,1,"",python,content +248,424007,"test/test_nan.ipynb",2218,0,"",python,selection_command +249,424248,"test/test_nan.ipynb",2192,0,"",python,selection_command +250,424295,"test/test_nan.ipynb",2169,0,"",python,selection_command +251,424327,"test/test_nan.ipynb",2149,0,"",python,selection_command +252,424355,"test/test_nan.ipynb",2129,0,"",python,selection_command +253,424387,"test/test_nan.ipynb",2112,0,"",python,selection_command +254,424415,"test/test_nan.ipynb",2094,0,"",python,selection_command +255,424447,"test/test_nan.ipynb",2072,0,"",python,selection_command +256,424487,"test/test_nan.ipynb",2041,0,"",python,selection_command +257,424767,"test/test_nan.ipynb",1958,0,"",python,selection_command +258,424927,"test/test_nan.ipynb",1960,0,"",python,selection_command +259,425187,"test/test_nan.ipynb",1970,0,"",python,selection_command +260,425207,"test/test_nan.ipynb",1971,0,"",python,selection_command +261,425247,"test/test_nan.ipynb",1979,0,"",python,selection_command +262,425275,"test/test_nan.ipynb",1988,0,"",python,selection_command +263,425307,"test/test_nan.ipynb",1992,0,"",python,selection_command +264,425335,"test/test_nan.ipynb",2003,0,"",python,selection_command +265,425567,"test/test_nan.ipynb",2005,0,"",python,selection_command +266,425807,"test/test_nan.ipynb",2004,0,"",python,selection_command +267,426127,"test/test_nan.ipynb",2003,0,"",python,selection_command +268,426208,"test/test_nan.ipynb",2003,1,";",python,selection_command +269,426227,"test/test_nan.ipynb",2003,4,"; lr",python,selection_command +270,426387,"test/test_nan.ipynb",2003,10,"; lr value",python,selection_command +271,426575,"test/test_nan.ipynb",2003,13,"; lr value is",python,selection_command +272,426727,"test/test_nan.ipynb",2003,24,"; lr value is irrelevant",python,selection_command +273,427107,"test/test_nan.ipynb",2003,28,"; lr value is irrelevant for",python,selection_command +274,427527,"test/test_nan.ipynb",2003,36,"; lr value is irrelevant for restore",python,selection_command +275,427747,"test/test_nan.ipynb",2003,36,"",python,content +276,428547,"test/test_nan.ipynb",1958,0,"",python,selection_command +277,428907,"test/test_nan.ipynb",2005,0,"",python,selection_command +278,429187,"test/test_nan.ipynb",2017,0,"",python,selection_command +279,430215,"test/test_nan.ipynb",0,0,"",python,selection_command +280,431287,"test/test_nan.ipynb",50,0,"",python,selection_command +281,431527,"test/test_nan.ipynb",60,0,"",python,selection_command +282,431567,"test/test_nan.ipynb",84,0,"",python,selection_command +283,431590,"test/test_nan.ipynb",85,0,"",python,selection_command +284,431607,"test/test_nan.ipynb",96,0,"",python,selection_command +285,431647,"test/test_nan.ipynb",120,0,"",python,selection_command +286,431687,"test/test_nan.ipynb",143,0,"",python,selection_command +287,431727,"test/test_nan.ipynb",156,0,"",python,selection_command +288,431887,"test/test_nan.ipynb",187,0,"",python,selection_command +289,432027,"test/test_nan.ipynb",200,0,"",python,selection_command +290,432167,"test/test_nan.ipynb",201,0,"",python,selection_command +291,433475,"test/test_nan.ipynb",199,0,"\nfrom utils.lr_schedule import get_lr_schedule",python,content +292,434547,"test/test_nan.ipynb",246,0,"",python,selection_command +293,434727,"test/test_nan.ipynb",200,0,"",python,selection_command +294,435147,"test/test_nan.ipynb",246,0,"",python,selection_command +295,435467,"test/test_nan.ipynb",246,1,"",python,content +296,435527,"test/test_nan.ipynb",200,0,"",python,selection_command +297,435667,"test/test_nan.ipynb",187,0,"",python,selection_command +298,435967,"test/test_nan.ipynb",200,0,"",python,selection_command +299,436315,"test/test_nan.ipynb",199,0,"\n",python,content +300,436895,"test/test_nan.ipynb",201,0,"",python,selection_command +301,437067,"test/test_nan.ipynb",206,0,"",python,selection_command +302,437247,"test/test_nan.ipynb",211,0,"",python,selection_command +303,437429,"test/test_nan.ipynb",212,0,"",python,selection_command +304,438267,"test/test_nan.ipynb",211,0,"",python,selection_command +305,438415,"test/test_nan.ipynb",206,0,"",python,selection_command +306,442867,"test/test_nan.ipynb",206,0,"L",python,content +307,442868,"test/test_nan.ipynb",207,0,"",python,selection_keyboard +308,443555,"test/test_nan.ipynb",206,1,"",python,content +309,443739,"test/test_nan.ipynb",206,0,"l",python,content +310,443740,"test/test_nan.ipynb",207,0,"",python,selection_keyboard +311,443863,"test/test_nan.ipynb",207,0,"r",python,content +312,443863,"test/test_nan.ipynb",208,0,"",python,selection_keyboard +313,444355,"test/test_nan.ipynb",208,0,"_",python,content +314,444355,"test/test_nan.ipynb",209,0,"",python,selection_keyboard +315,444884,"test/test_nan.ipynb",208,0,"",python,selection_command +316,445667,"test/test_nan.ipynb",214,0,"",python,selection_command +317,445967,"test/test_nan.ipynb",206,0,"",python,selection_command +318,448816,"utils/lr_utils.py",0,0,"import optax\n\n\ndef get_lr_schedule(\n lr_schedule: str,\n init_lr: float,\n max_lr: float,\n decay_end: float,\n total_steps: int,\n warmup_steps: int,\n wsd_decay_steps: int,\n) -> optax.Schedule:\n supported_schedules = [""wsd"", ""cos""]\n if lr_schedule == ""cos"":\n assert (\n warmup_steps <= total_steps\n ), ""Warmup steps can't be greater than total steps.""\n return optax.warmup_cosine_decay_schedule(\n init_value=init_lr,\n peak_value=max_lr,\n warmup_steps=warmup_steps,\n decay_steps=total_steps, # Note: decay_steps includes the warmup steps, so we need to pass total value\n end_value=decay_end,\n )\n elif lr_schedule == ""wsd"":\n assert (\n warmup_steps + wsd_decay_steps <= total_steps\n ), ""Warmup and decay period is longer than total steps.""\n schedules = [\n optax.linear_schedule(\n init_value=init_lr, end_value=max_lr, transition_steps=warmup_steps\n ),\n optax.constant_schedule(value=max_lr),\n optax.linear_schedule(\n init_value=max_lr, end_value=decay_end, transition_steps=wsd_decay_steps\n ),\n ]\n boundaries = [warmup_steps, total_steps - wsd_decay_steps]\n return optax.join_schedules(schedules, boundaries)\n else:\n raise ValueError(\n f""Learning rate schedule not supported. Please use one of {supported_schedules}""\n )\n",python,tab +319,449457,"test/test_nan.ipynb",0,0,"",python,tab +320,449847,"test/test_nan.ipynb",214,0,"",python,selection_command +321,450007,"test/test_nan.ipynb",215,0,"",python,selection_command +322,450807,"test/test_nan.ipynb",214,0,"",python,selection_command +323,450947,"test/test_nan.ipynb",206,0,"",python,selection_command +324,451295,"test/test_nan.ipynb",206,1,"l",python,selection_command +325,451367,"test/test_nan.ipynb",206,8,"lr_utils",python,selection_command +326,451527,"test/test_nan.ipynb",206,9,"lr_utils.",python,selection_command +327,451947,"test/test_nan.ipynb",206,20,"lr_utils.lr_schedule",python,selection_command +328,452140,"test/test_nan.ipynb",206,20,"",python,content +329,452675,"test/test_nan.ipynb",206,0,"u",python,content +330,452675,"test/test_nan.ipynb",207,0,"",python,selection_keyboard +331,452787,"test/test_nan.ipynb",207,0,"t",python,content +332,452787,"test/test_nan.ipynb",208,0,"",python,selection_keyboard +333,452915,"test/test_nan.ipynb",208,0,"i",python,content +334,452916,"test/test_nan.ipynb",209,0,"",python,selection_keyboard +335,453095,"test/test_nan.ipynb",209,0,"s",python,content +336,453095,"test/test_nan.ipynb",210,0,"",python,selection_keyboard +337,453535,"test/test_nan.ipynb",209,1,"",python,content +338,453707,"test/test_nan.ipynb",209,0,"l",python,content +339,453708,"test/test_nan.ipynb",210,0,"",python,selection_keyboard +340,453807,"test/test_nan.ipynb",210,0,"s",python,content +341,453807,"test/test_nan.ipynb",211,0,"",python,selection_keyboard +342,453907,"test/test_nan.ipynb",211,0,".",python,content +343,453908,"test/test_nan.ipynb",212,0,"",python,selection_keyboard +344,454103,"test/test_nan.ipynb",212,0,"l",python,content +345,454104,"test/test_nan.ipynb",213,0,"",python,selection_keyboard +346,454147,"test/test_nan.ipynb",213,0,"r",python,content +347,454147,"test/test_nan.ipynb",214,0,"",python,selection_keyboard +348,454355,"test/test_nan.ipynb",214,0,"_",python,content +349,454355,"test/test_nan.ipynb",215,0,"",python,selection_keyboard +350,454555,"test/test_nan.ipynb",215,0,"s",python,content +351,454555,"test/test_nan.ipynb",216,0,"",python,selection_keyboard +352,454816,"test/test_nan.ipynb",215,1,"",python,content +353,454995,"test/test_nan.ipynb",215,0,"u",python,content +354,454995,"test/test_nan.ipynb",216,0,"",python,selection_keyboard +355,455043,"test/test_nan.ipynb",216,0,"t",python,content +356,455044,"test/test_nan.ipynb",217,0,"",python,selection_keyboard +357,455147,"test/test_nan.ipynb",217,0,"i",python,content +358,455147,"test/test_nan.ipynb",218,0,"",python,selection_keyboard +359,455251,"test/test_nan.ipynb",218,0,"s",python,content +360,455252,"test/test_nan.ipynb",219,0,"",python,selection_keyboard +361,455252,"test/test_nan.ipynb",219,0,"l",python,content +362,455252,"test/test_nan.ipynb",220,0,"",python,selection_keyboard +363,455811,"test/test_nan.ipynb",219,1,"",python,content +364,455944,"test/test_nan.ipynb",218,1,"",python,content +365,456131,"test/test_nan.ipynb",218,0,"l",python,content +366,456132,"test/test_nan.ipynb",219,0,"",python,selection_keyboard +367,456147,"test/test_nan.ipynb",219,0,"s",python,content +368,456147,"test/test_nan.ipynb",220,0,"",python,selection_keyboard +369,456356,"test/test_nan.ipynb",219,0,"",python,selection_command +370,458754,"test/test_nan.ipynb",221,0,"",python,selection_command +371,458761,"test/test_nan.ipynb",228,0,"",python,selection_command +372,459227,"test/test_nan.ipynb",2062,0,"",python,selection_command +373,460370,"test/test_nan.ipynb",2093,0,"",python,selection_command +374,460617,"test/test_nan.ipynb",2115,0,"",python,selection_command +375,460650,"test/test_nan.ipynb",2133,0,"",python,selection_command +376,460695,"test/test_nan.ipynb",2150,0,"",python,selection_command +377,460803,"test/test_nan.ipynb",2170,0,"",python,selection_command +378,461115,"test/test_nan.ipynb",2150,0,"",python,selection_command +379,461416,"test/test_nan.ipynb",2170,0,"",python,selection_command +380,461533,"test/test_nan.ipynb",2190,0,"",python,selection_command +381,461863,"test/test_nan.ipynb",2213,0,"",python,selection_command +382,462229,"test/test_nan.ipynb",2225,0,"",python,selection_command +383,463971,"test/test_nan.ipynb",2227,0,"",python,selection_command +384,464681,"test/test_nan.ipynb",2225,0,"",python,selection_command +385,464934,"test/test_nan.ipynb",2213,0,"",python,selection_command +386,464959,"test/test_nan.ipynb",2190,0,"",python,selection_command +387,465000,"test/test_nan.ipynb",2170,0,"",python,selection_command +388,465019,"test/test_nan.ipynb",2150,0,"",python,selection_command +389,465189,"test/test_nan.ipynb",2133,0,"",python,selection_command +390,465345,"test/test_nan.ipynb",2115,0,"",python,selection_command +391,465482,"test/test_nan.ipynb",2093,0,"",python,selection_command +392,465661,"test/test_nan.ipynb",2062,0,"",python,selection_command +393,469342,"test/test_nan.ipynb",0,0,"",python,tab +394,470114,"test/test_nan.ipynb",0,0,"",python,tab +395,470720,"test/test_nan.ipynb",0,0,"",python,tab +396,470912,"test/test_nan.ipynb",0,0,"",python,tab +397,477040,"test/test_nan.ipynb",2093,0,"",python,selection_command +398,477041,"test/test_nan.ipynb",2079,0,"",python,selection_command +399,477042,"test/test_nan.ipynb",2083,0,"",python,selection_command +400,477042,"test/test_nan.ipynb",2083,1,"a",python,selection_command +401,477043,"test/test_nan.ipynb",2083,2,"ar",python,selection_command +402,477043,"test/test_nan.ipynb",2083,3,"arg",python,selection_command +403,477044,"test/test_nan.ipynb",2083,4,"args",python,selection_command +404,477044,"test/test_nan.ipynb",2083,4,"args",python,selection_command +405,477045,"test/test_nan.ipynb",2083,5,"args.",python,selection_command +406,477046,"test/test_nan.ipynb",2083,5,"args.",python,selection_command +407,477046,"test/test_nan.ipynb",2083,5,"args.",python,selection_command +408,477046,"test/test_nan.ipynb",2083,5,"args.",python,selection_command +409,477047,"test/test_nan.ipynb",2083,5,"args.",python,selection_command +410,477047,"test/test_nan.ipynb",2083,5,"args.",python,selection_command +411,477047,"test/test_nan.ipynb",2080,4," a",python,selection_command +412,477048,"test/test_nan.ipynb",2083,5,"args.",python,selection_command +413,477122,"test/test_nan.ipynb",2203,5,"",python,content +414,477122,"test/test_nan.ipynb",2180,5,"",python,content +415,477122,"test/test_nan.ipynb",2160,5,"",python,content +416,477122,"test/test_nan.ipynb",2140,5,"",python,content +417,477122,"test/test_nan.ipynb",2123,5,"",python,content +418,477122,"test/test_nan.ipynb",2105,5,"",python,content +419,477122,"test/test_nan.ipynb",2083,5,"",python,content +420,477143,"test/test_nan.ipynb",2083,0,"",python,selection_command +421,485066,"test/test_nan.ipynb",2052,0,"",python,selection_command +422,485112,"test/test_nan.ipynb",2060,0,"",python,selection_command +423,485287,"test/test_nan.ipynb",2062,0,"",python,selection_command +424,485804,"utils/lr_utils.py",0,0,"",python,tab +425,487249,"test/test_nan.ipynb",0,0,"",python,tab +426,488274,"test/test_nan.ipynb",2093,0,"",python,selection_command +427,488285,"test/test_nan.ipynb",2083,0,"",python,selection_command +428,491359,"test/test_nan.ipynb",2048,11,"lr_schedule",python,selection_command +429,491916,"test/test_nan.ipynb",2058,0,"",python,selection_command +430,492057,"test/test_nan.ipynb",2083,0,"",python,selection_command +431,492512,"test/test_nan.ipynb",2052,0,"",python,selection_command +432,494140,"test/test_nan.ipynb",2058,0,"",python,selection_command +433,494543,"test/test_nan.ipynb",2059,0,"",python,selection_command +434,494855,"test/test_nan.ipynb",2059,0,"_",python,content +435,494856,"test/test_nan.ipynb",2060,0,"",python,selection_keyboard +436,495058,"test/test_nan.ipynb",2060,0,"f",python,content +437,495058,"test/test_nan.ipynb",2061,0,"",python,selection_keyboard +438,495198,"test/test_nan.ipynb",2061,0,"n",python,content +439,495198,"test/test_nan.ipynb",2062,0,"",python,selection_keyboard +440,495273,"test/test_nan.ipynb",2061,0,"",python,selection_command +441,495408,"test/test_nan.ipynb",2095,0,"",python,selection_command +442,495657,"test/test_nan.ipynb",2110,0,"",python,selection_command +443,495693,"test/test_nan.ipynb",2122,0,"",python,selection_command +444,495725,"test/test_nan.ipynb",2137,0,"",python,selection_command +445,495756,"test/test_nan.ipynb",2152,0,"",python,selection_command +446,495814,"test/test_nan.ipynb",2167,0,"",python,selection_command +447,495821,"test/test_nan.ipynb",2185,0,"",python,selection_command +448,495859,"test/test_nan.ipynb",2193,0,"",python,selection_command +449,495907,"test/test_nan.ipynb",2195,0,"",python,selection_command +450,495929,"test/test_nan.ipynb",2209,0,"",python,selection_command +451,496180,"test/test_nan.ipynb",2227,0,"",python,selection_command +452,496339,"test/test_nan.ipynb",2250,0,"",python,selection_command +453,496517,"test/test_nan.ipynb",2227,0,"",python,selection_command +454,496722,"test/test_nan.ipynb",2209,0,"",python,selection_command +455,496987,"test/test_nan.ipynb",2195,0,"",python,selection_command +456,497031,"test/test_nan.ipynb",2193,0,"",python,selection_command +457,497040,"test/test_nan.ipynb",2185,0,"",python,selection_command +458,497071,"test/test_nan.ipynb",2167,0,"",python,selection_command +459,497097,"test/test_nan.ipynb",2152,0,"",python,selection_command +460,497129,"test/test_nan.ipynb",2137,0,"",python,selection_command +461,497166,"test/test_nan.ipynb",2122,0,"",python,selection_command +462,497203,"test/test_nan.ipynb",2110,0,"",python,selection_command +463,497229,"test/test_nan.ipynb",2095,0,"",python,selection_command +464,497264,"test/test_nan.ipynb",2061,0,"",python,selection_command +465,497296,"test/test_nan.ipynb",2014,0,"",python,selection_command +466,497385,"test/test_nan.ipynb",2061,0,"",python,selection_command +467,497669,"test/test_nan.ipynb",2095,0,"",python,selection_command +468,497926,"test/test_nan.ipynb",2110,0,"",python,selection_command +469,497952,"test/test_nan.ipynb",2122,0,"",python,selection_command +470,497979,"test/test_nan.ipynb",2137,0,"",python,selection_command +471,498017,"test/test_nan.ipynb",2152,0,"",python,selection_command +472,498075,"test/test_nan.ipynb",2167,0,"",python,selection_command +473,498098,"test/test_nan.ipynb",2185,0,"",python,selection_command +474,498117,"test/test_nan.ipynb",2193,0,"",python,selection_command +475,498144,"test/test_nan.ipynb",2195,0,"",python,selection_command +476,498182,"test/test_nan.ipynb",2209,0,"",python,selection_command +477,498226,"test/test_nan.ipynb",2227,0,"",python,selection_command +478,498362,"test/test_nan.ipynb",2250,0,"",python,selection_command +479,498820,"test/test_nan.ipynb",2227,0,"",python,selection_command +480,498940,"test/test_nan.ipynb",2231,0,"",python,selection_command +481,499088,"test/test_nan.ipynb",2232,0,"",python,selection_command +482,499440,"test/test_nan.ipynb",2232,6,"",python,content +483,499929,"test/test_nan.ipynb",2232,0,"l",python,content +484,499929,"test/test_nan.ipynb",2233,0,"",python,selection_keyboard +485,500026,"test/test_nan.ipynb",2233,0,"r",python,content +486,500026,"test/test_nan.ipynb",2234,0,"",python,selection_keyboard +487,500321,"test/test_nan.ipynb",2234,0,"_",python,content +488,500322,"test/test_nan.ipynb",2235,0,"",python,selection_keyboard +489,500456,"test/test_nan.ipynb",2235,0,"s",python,content +490,500457,"test/test_nan.ipynb",2236,0,"",python,selection_keyboard +491,500543,"test/test_nan.ipynb",2236,0,"c",python,content +492,500544,"test/test_nan.ipynb",2237,0,"",python,selection_keyboard +493,500557,"test/test_nan.ipynb",2237,0,"h",python,content +494,500557,"test/test_nan.ipynb",2238,0,"",python,selection_keyboard +495,500684,"test/test_nan.ipynb",2238,0,"e",python,content +496,500685,"test/test_nan.ipynb",2239,0,"",python,selection_keyboard +497,500722,"test/test_nan.ipynb",2239,0,"d",python,content +498,500723,"test/test_nan.ipynb",2240,0,"",python,selection_keyboard +499,500829,"test/test_nan.ipynb",2240,0,"u",python,content +500,500830,"test/test_nan.ipynb",2241,0,"",python,selection_keyboard +501,500958,"test/test_nan.ipynb",2241,0,"l",python,content +502,500959,"test/test_nan.ipynb",2242,0,"",python,selection_keyboard +503,501100,"test/test_nan.ipynb",2242,0,"e",python,content +504,501101,"test/test_nan.ipynb",2243,0,"",python,selection_keyboard +505,501400,"test/test_nan.ipynb",2243,0,"_",python,content +506,501401,"test/test_nan.ipynb",2244,0,"",python,selection_keyboard +507,501576,"test/test_nan.ipynb",2244,0,"f",python,content +508,501577,"test/test_nan.ipynb",2245,0,"",python,selection_keyboard +509,501635,"test/test_nan.ipynb",2245,0,"n",python,content +510,501636,"test/test_nan.ipynb",2246,0,"",python,selection_keyboard +511,501797,"test/test_nan.ipynb",2245,0,"",python,selection_command +512,502157,"test/test_nan.ipynb",2214,0,"",python,selection_command +513,504365,"utils/lr_utils.py",0,0,"",python,tab +514,507250,"test/test_nan.ipynb",0,0,"",python,tab +515,509332,"train_lam.py",0,0,"",python,tab +516,512271,"test/test_nan.ipynb",0,0,"",python,tab +517,512870,"train_lam.py",0,0,"",python,tab +518,513510,"test/test_nan.ipynb",0,0,"",python,tab +519,2086061,"train_lam.py",0,0,"",python,tab +520,2086423,"train_lam.py",0,13598,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\njax.config.update(""jax_transfer_guard"", ""allow"")\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(\n model: LatentActionModel, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n # --- Compute loss ---\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n model.train()\n outputs = model(inputs, training=True)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n gt_future_frames = gt[:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@nnx.jit\ndef train_step(\n lam: LatentActionModel,\n optimizer: nnx.Optimizer,\n inputs: dict,\n action_last_active: jax.Array,\n rng: jax.Array,\n) -> tuple[jax.Array, jax.Array, jax.Array, dict]:\n def loss_fn(\n model: LatentActionModel,\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n return lam_loss_fn(model, inputs)\n\n # --- Update model ---\n (loss, (recon, idx_counts, metrics)), grads = nnx.value_and_grad(\n loss_fn, has_aux=True\n )(lam)\n optimizer.update(grads)\n\n # --- Reset inactive latent actions ---\n codebook = lam.vq.codebook\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook.value\n )\n lam.vq.codebook.value = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return loss, recon, action_last_active, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n # Count parameters\n _, params, _ = nnx.split(lam, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.Optimizer(lam, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )\n print(f""Starting training from step {step}..."")\n action_last_active = jnp.zeros(args.num_latents, dtype=jnp.int32)\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n inputs = dict(videos=videos, rng=_rng)\n rng, _rng = jax.random.split(rng)\n loss, recon, action_last_active, metrics = train_step(\n lam, optimizer, inputs, action_last_active, _rng\n )\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,selection_command +521,2086571,"train_lam.py",13598,0,"",python,selection_command +522,2248550,"utils/nn.py",0,0,"import math\nfrom typing import Tuple, Callable, List\n\nfrom flax import nnx\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass SpatioTemporalPositionalEncoding(nnx.Module):\n """"""\n Applies separate sinusoidal positional encodings to the temporal and spatial dimensions.\n """"""\n def __init__(self, d_model: int, max_len: int = 5000):\n self.d_model = d_model\n self.max_len = max_len\n\n pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n pe = pe.at[:, 0::2].set(jnp.sin(position * div_term))\n pe = pe.at[:, 1::2].set(jnp.cos(position * div_term))\n self.pe = nnx.Variable(pe)\n\n def __call__(self, x: jax.Array) -> jax.Array:\n """"""\n Args:\n x: The input tensor of shape (Batch, Time, Space, Dimension).\n\n Returns:\n The input tensor with positional encodings added.\n """"""\n assert x.ndim == 4, f""Input must be 4-dimensional, but got shape {x.shape}""\n\n num_timesteps = x.shape[1]\n num_spatial_patches = x.shape[2]\n\n # Temporal positional encoding: (1, T, 1, D)\n temporal_pe = self.pe.value[None, :num_timesteps, None, :]\n x = x + temporal_pe\n\n # Spatial positional encoding: (1, 1, S, D)\n spatial_pe = self.pe.value[None, None, :num_spatial_patches, :]\n x = x + spatial_pe\n\n return x\n\n\nclass STBlock(nnx.Module):\n def __init__(\n self,\n dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n sow_weights: bool,\n sow_activations: bool,\n ):\n self.dim = dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n rngs=rngs,\n decode=False,\n )\n\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=False,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x_BTNM: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z_BTNM = self.spatial_norm(x_BTNM)\n z_BTNM = self.spatial_attention(z_BTNM, sow_weights=self.sow_weights)\n x_BTNM = x_BTNM + z_BTNM\n\n # --- Temporal attention ---\n x_BNTM = x_BTNM.swapaxes(1, 2)\n z_BNTM = self.temporal_norm(x_BNTM)\n z_BNTM = self.temporal_attention(z_BNTM, sow_weights=self.sow_weights)\n x_BNTM = x_BNTM + z_BNTM\n x_BTNM = x_BNTM.swapaxes(1, 2)\n\n # --- Feedforward ---\n z_BTNM = self.ffn_norm(x_BTNM)\n z_BTND = self.ffn_dense1(z_BTNM)\n z_BTND = jax.nn.gelu(z_BTND)\n z_BTNM = self.ffn_dense2(z_BTND)\n x_BTNM = x_BTNM + z_BTNM\n if self.sow_activations:\n self.sow(nnx.Intermediate, 'activations', x_BTNM)\n return x_BTNM\n\n\nclass STTransformer(nnx.Module):\n """"""\n Dimension keys:\n B: batch size\n T: number of frames\n N: number of patches per frame\n I: number of input features\n M: model dimension\n D: FFN dimension\n V: vocabulary size\n """"""\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n sow_weights: bool = False,\n sow_activations: bool = False,\n sow_logits: bool = False,\n max_len: int = 5000,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n self.sow_logits = sow_logits\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n\n self.pos_enc = SpatioTemporalPositionalEncoding(self.model_dim, max_len=max_len)\n\n self.blocks = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n sow_weights=self.sow_weights,\n sow_activations=self.sow_activations,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x_BTNI: jax.Array) -> jax.Array:\n x_BTNI = self.input_norm1(x_BTNI)\n x_BTNM = self.input_dense(x_BTNI)\n x_BTNM = self.input_norm2(x_BTNM)\n x_BTNM = self.pos_enc(x_BTNM)\n for block in self.blocks:\n x_BTNM = block(x_BTNM)\n\n x_BTNV = self.output_dense(x_BTNM)\n if self.sow_logits:\n self.sow(nnx.Intermediate, 'logits', x_BTNV)\n return x_BTNV\n\nclass TransformerBlock(nnx.Module):\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n sow_weights: bool,\n sow_activations: bool,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.model_dim,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.model_dim,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n # @nnx.remat\n def __call__(self, x_BTNM: jax.Array, pos_index: Tuple[jax.Array, jax.Array] | None = None) -> jax.Array:\n # --- Spatial attention ---\n B, T, N, M = x_BTNM.shape\n z_FNM = einops.rearrange(x_BTNM, ""b t n m -> (b t) n m"")\n z_FNM = self.spatial_norm(z_FNM)\n z_FNM = self.spatial_attention(z_FNM, sow_weights=self.sow_weights)\n z_BTNM = einops.rearrange(z_FNM, ""(b t) n m -> b t n m"", t=T)\n x_BTNM = x_BTNM + z_BTNM\n # --- Temporal attention ---\n z_PTM = einops.rearrange(x_BTNM, ""b t n m -> (b n) t m"")\n z_PTM = self.temporal_norm(z_PTM)\n z_PTM = self.temporal_attention(z_PTM, sow_weights=self.sow_weights)\n z_BTNM = einops.rearrange(z_PTM, ""(b n) t m -> b t n m"", n=N)\n x_BTNM = x_BTNM + z_BTNM\n # --- Feedforward ---\n z_BTNM = self.ffn_norm(x_BTNM)\n z_BTND = self.ffn_dense1(z_BTNM)\n z_BTND = jax.nn.gelu(z_BTND)\n z_BTNM = self.ffn_dense2(z_BTND)\n x_BTNM = x_BTNM + z_BTNM\n if self.sow_activations:\n self.sow(nnx.Intermediate, 'activations', x_BTNM)\n\n return x_BTNM\n\nclass Transformer(nnx.Module):\n """"""\n Dimension keys:\n B: batch size\n T: number of frames\n N: number of patches per frame\n I: number of input features\n M: model dimension\n D: FFN dimension\n V: vocabulary size\n F: number of frames in batch\n P: number of patch positions in batch\n """"""\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n sow_logits: bool = False,\n sow_weights: bool = False,\n sow_activations: bool = False,\n max_len: int = 5000,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_logits = sow_logits\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n\n self.pos_enc = SpatioTemporalPositionalEncoding(self.model_dim, max_len=max_len)\n\n self.blocks: List[TransformerBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n TransformerBlock(\n model_dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n sow_weights=self.sow_weights,\n sow_activations=self.sow_activations,\n rngs=rngs,\n )\n )\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x_BTNI: jax.Array, pos_index: Tuple[jax.Array, jax.Array] | None = None) -> jax.Array:\n x_BTNI = self.input_norm1(x_BTNI)\n x_BTNM = self.input_dense(x_BTNI)\n x_BTNM = self.input_norm2(x_BTNM)\n x_BTNM = self.pos_enc(x_BTNM)\n for block in self.blocks:\n x_BTNM = block(x_BTNM, pos_index)\n\n x_BTNV = self.output_dense(x_BTNM)\n if self.sow_logits:\n self.sow(nnx.Intermediate, 'logits', x_BTNV)\n return x_BTNV\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n """"""\n Dimension keys:\n D: B * T * N\n K: number of latents\n L: latent dimension\n """"""\n def __init__(\n self, latent_dim: int, num_latents: int, dropout: float, rngs: nnx.Rngs\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x_DL: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x_DL = normalize(x_DL)\n normalized_codebook_KL = normalize(self.codebook.value)\n distance_DK = -jnp.matmul(x_DL, normalized_codebook_KL.T)\n if training:\n distance_DK = self.drop(distance_DK)\n\n # --- Get indices and embeddings ---\n indices_D = jnp.argmin(distance_DK, axis=-1)\n z_DL = self.codebook[indices_D]\n\n # --- Straight through estimator ---\n z_q_DL = x_DL + jax.lax.stop_gradient(z_DL - x_DL)\n return z_q_DL, z_DL, x_DL, indices_D\n\n def get_codes(self, indices_E: jax.Array) -> jax.Array:\n return self.codebook[indices_E]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n flax.nnx.MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim),\n but jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim). We reshape to\n ensure compatibility. cuDNN's flash attention additionally requires a sequence length that\n is a multiple of 4. We pad the sequence length to the nearest multiple of 4 and mask\n accordingly. Note that cuDNN requires the mask to be broadcast before calling the attention\n function due to strict shape checking.\n """"""\n\n def attention_fn(query_BTHD, key_BSHD, value_BSHD, bias=None, mask_B111=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _merge_batch_dims(x):\n return einops.rearrange(x, ""... l h k -> (...) l h k"")\n\n def _pad(x, pad_size):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n original_shape = query_BTHD.shape\n T = query_BTHD.shape[-3]\n S = key_BSHD.shape[-3]\n\n # Pad to nearest multiple of 4\n Q = ((T + 3) // 4) * 4\n pad_size_Q = Q - T\n K = ((S + 3) // 4) * 4\n pad_size_K = K - S\n\n query_BQHD = _pad(_merge_batch_dims(query_BTHD), pad_size_Q)\n key_BKHD = _pad(_merge_batch_dims(key_BSHD), pad_size_K)\n value_BKHD = _pad(_merge_batch_dims(value_BSHD), pad_size_K)\n\n attention_mask = jnp.ones((Q, K), dtype=jnp.bool_)\n attention_mask = attention_mask.at[T:, :].set(False)\n attention_mask = attention_mask.at[:, S:].set(False)\n\n mask_11TS = attention_mask[jnp.newaxis, jnp.newaxis, :, :]\n\n bias_4d = jnp.pad(_merge_batch_dims(bias), ((0, 0), (0, 0), (0, pad_size_Q), (0, pad_size_K))) if bias is not None else None\n\n # NOTE: jax.nn.dot_product_attention does not support dropout\n output_4d = jax.nn.dot_product_attention(\n query=query_BQHD,\n key=key_BKHD,\n value=value_BKHD,\n bias=bias_4d,\n mask=mask_11TS,\n implementation=implementation,\n is_causal=is_causal,\n )\n return output_4d[..., :T, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +523,2252301,"utils/nn.py",7559,0,"",python,selection_command +524,2252303,"utils/nn.py",7528,0,"",python,selection_command +525,2252303,"utils/nn.py",7462,0,"",python,selection_command +526,2252304,"utils/nn.py",7424,0,"",python,selection_command +527,2252305,"utils/nn.py",7374,0,"",python,selection_command +528,2252306,"utils/nn.py",7332,0,"",python,selection_command +529,2252307,"utils/nn.py",7286,0,"",python,selection_command +530,2252307,"utils/nn.py",7244,0,"",python,selection_command +531,2252308,"utils/nn.py",7204,0,"",python,selection_command +532,2252309,"utils/nn.py",7179,0,"",python,selection_command +533,2252503,"utils/nn.py",1547,0,"",python,selection_command +534,2253323,"utils/nn.py",1574,0,"",python,selection_command +535,2253579,"utils/nn.py",1592,0,"",python,selection_command +536,2253605,"utils/nn.py",1606,0,"",python,selection_command +537,2253631,"utils/nn.py",1624,0,"",python,selection_command +538,2253664,"utils/nn.py",1646,0,"",python,selection_command +539,2253700,"utils/nn.py",1670,0,"",python,selection_command +540,2253735,"utils/nn.py",1694,0,"",python,selection_command +541,2253761,"utils/nn.py",1726,0,"",python,selection_command +542,2253903,"utils/nn.py",1752,0,"",python,selection_command +543,2254063,"utils/nn.py",1787,0,"",python,selection_command +544,2254650,"utils/nn.py",1811,0,"",python,selection_command +545,2255170,"utils/nn.py",1813,0,"",python,selection_command +546,2255577,"utils/nn.py",2124,0,"",python,selection_command +547,2270643,"utils/nn.py",2138,0,"",python,selection_command +548,2271361,"utils/nn.py",4408,0,"",python,selection_command +549,2277137,"utils/nn.py",4406,0,"",python,selection_command +550,2277319,"utils/nn.py",4400,0,"",python,selection_command +551,2277484,"utils/nn.py",4399,0,"",python,selection_command +552,2277755,"utils/nn.py",4382,0,"",python,selection_command +553,2280545,"train_lam.py",0,0,"",python,tab +554,2282114,"utils/nn.py",0,0,"",python,tab +555,2553194,"utils/nn.py",4985,0,"",python,selection_command +556,2555180,"utils/nn.py",5018,0,"",python,selection_command +557,2556838,"utils/nn.py",4985,0,"",python,selection_command +558,2556969,"utils/nn.py",4952,0,"",python,selection_command +559,2557302,"utils/nn.py",4949,0,"",python,selection_command +560,2557470,"utils/nn.py",4947,0,"",python,selection_command +561,2557644,"utils/nn.py",4940,0,"",python,selection_command +562,2571436,"utils/nn.py",5757,0,"",python,selection_command +563,2572184,"utils/nn.py",6274,0,"",python,selection_command +564,2573118,"utils/nn.py",6287,0,"",python,selection_command +565,2573318,"utils/nn.py",8258,0,"",python,selection_command +566,2613940,"utils/nn.py",8257,0,"",python,selection_command diff --git a/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-1a3a8350-ade5-4f14-90d3-a2023f5be9fa1753600712073-2025_07_27-09.18.39.905/source.csv b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-1a3a8350-ade5-4f14-90d3-a2023f5be9fa1753600712073-2025_07_27-09.18.39.905/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..ab9a0f803a60b55330daff42338847b85dc547e4 --- /dev/null +++ b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-1a3a8350-ade5-4f14-90d3-a2023f5be9fa1753600712073-2025_07_27-09.18.39.905/source.csv @@ -0,0 +1,9658 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,3,"models/dynamics_causal.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=True,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> tuple[jax.Array, jax.Array | None]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n # breakpoint()\n act_embed = self.action_up(batch[""latent_actions""])\n breakpoint()\n # FIXME: this is the culprit!\n # vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n vid_embed += act_embed\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n # breakpoint()\n\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\n\n mask = jnp.ones(vid_embed.shape[:-1])\n return logits, mask\n",python,tab +2,447,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"9:18:39 AM [info] Activating crowd-code\n9:18:39 AM [info] Recording started\n9:18:39 AM [info] Initializing git provider using file system watchers...\n",Log,tab +3,522,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"9:18:40 AM [info] Git repository found\n9:18:40 AM [info] Git provider initialized successfully\n9:18:40 AM [info] Initial git state: [object Object]\n",Log,content +4,45873,"models/dynamics_causal.py",0,0,"",python,tab +5,66798,"TERMINAL",0,0,"undefined[franz.srambical@hai-login2.haicore.berlin:~/jafar] $ git stash",,terminal_command +6,66848,"TERMINAL",0,0,"]633;C",,terminal_output +7,66990,"TERMINAL",0,0,"Saved working directory and index state WIP on causal-transformer-nnx: cf36115 feat: refactor; only send single token to attn\r\n",,terminal_output +8,67038,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar",,terminal_output +9,67268,"models/dynamics_causal.py",1877,352," act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n",python,content +10,75376,"models/dynamics_causal.py",0,0,"Switched from branch 'causal-transformer-nnx' to 'main'",python,git_branch_checkout +11,89656,"README.md",0,0,"

🧞‍♀️ Jasmine: A simple, performant and scalable JAX-based world modeling codebase 🧞‍♀️

\n\n

\n \n \n \n \n

\n\nJasmine is a production-ready JAX-based world modeling codebase. It currently implements the high-level architecture of [Genie: Generative Interactive Environments](https://arxiv.org/abs/2402.15391) (Bruce et al., 2024) with [MaskGIT](https://arxiv.org/abs/2202.04200) (Chang et al., 2022), as well as an autoregressive (causal) baseline. A diffusion baseline is coming soon.\n\nJasmine scales from single hosts to hundreds of xPUs thanks to XLA and strives to be an easily hackable, batteries-included foundation for world modeling research.\n\n

Overview

\n\n- Asynchronous & distributed checkpointing thanks to [orbax.checkpoint](https://github.com/google/orbax)\n - Jasmine also supports mixing and matching hardware topologies (e.g. train on four nodes, load the checkpoint on a single node)\n- Optimized dataloading thanks to [Grain](https://github.com/google/grain)\n - Dataloading scales with the number of processes (i.e. nodes/xPUs)\n- Checkpointing of model weights, optimizer and dataloader states\n- Full reproducibility with **exact** training curves (thanks to seeded dataloading and training, and [JAX' approach to pseudo random numbers](https://docs.jax.dev/en/latest/random-numbers.html))\n- Automatic checkpoint deletion/retention according to specified retention policy thanks to `orbax.checkpoint.CheckpointManager`\n- Mixed precision training using `bfloat16`\n - `int8` training is on the roadmap via [aqt](https://github.com/google/aqt)\n- FlashAttention thanks to [cuDNN SDPA](https://github.com/jax-ml/jax/blob/a155c5a9997924170e0067d552351a9833c12c11/jax/_src/cudnn/fused_attention_stablehlo.py#L842)\n- Frame-level KV cache resets for accelerated spatiotemporal attention in causal baseline (still in PR)\n- Activation checkpointing (even onto host memory if desired)\n- DDP (changing to FSDP requires changing **a single line of code**)\n- WSD learning rate schedule\n - No need to retrain from scratch if you want to train for longer\n- Index-shuffling during dataloading\n- Google-native stack\n - https://github.com/google/orbax for checkpointing\n - https://github.com/google/grain for dataloading\n - https://github.com/google-deepmind/dm_pix for image manipulation\n - https://github.com/google/array_record as the data format\n- Easy model inspection thanks to [treescope](https://github.com/google-deepmind/treescope)\n- Easy model surgery thanks to the new [flax.nnx](https://flax.readthedocs.io/en/latest/guides/linen_to_nnx.html) API\n\n

Setup 🧗

\n\nJasmine requires `python 3.10`, `jax 0.6.2` and `flax 0.10.7`. To install the requirements, run:\n\n```bash\npip install -r requirements.txt\npre-commit install\n```\n\nDownload OpenAI's VPT dataset by running:\n\n```bash\nbash input_pipeline/download/openai/download_index_files.sh\npython input_pipeline/download/openai/download_videos.py\n```\n\nNote: this is a large dataset and may take a while to download.\n\nFor performant distributed training, we additionally preprocess the dataset into `arrayrecords`:\n\n```bash\npython input_pipeline/preprocess/video_to_array_records.py\n```\n\n

Quick Start 🚀

\n\nGenie has three components: a [video tokenizer](models/tokenizer.py), a [latent action model](models/lam.py), and a [dynamics model](models/dynamics.py). Each of these components are trained separately, however, the dynamics model requires a pre-trained video tokenizer (and latent action model).\n\nTo train the video tokenizer (similar for the LAM), run:\n\n```bash\npython train_tokenizer.py --ckpt_dir \n```\n\nOnce the tokenizer and LAM are trained, the dynamics model can be trained with:\n\n```bash\npython train_dynamics.py --tokenizer_checkpoint --lam_checkpoint \n```\n\nLogging with `wandb` is supported. To enable logging, set the `WANDB_API_KEY` environment variable or run:\n\n```bash\nwandb login\n```\n\nTraining can then be logged by setting the `--log` flag:\n\n```bash\npython train_tokenizer.py --log --entity --project \n```\n\n

Citing Jafar 📜

\n\nJasmine was built by [Mihir Mahajan](https://maharajamihir.github.io/), [Alfred Nguyen](https://avocadoali.github.io/) and [Franz Srambical](https://srambical.fr/), but started as a fork of [Jafar](https://github.com/flairox/jafar), built by [Matthew Jackson](https://matthewtjackson.com) and [Timon Willi](https://www.timonwilli.com).\n\nIf you use Jasmine in your work, please cite us, Jafar, and the original Genie paper as follows:\n\n```\n@inproceedings{\n mahajan2025jasmine,\n title={Jasmine: : A simple, performant and scalable JAX-based world modeling codebase},\n author={Mihir Mahajan and Alfred Nguyen and Franz Srambical and Stefan Bauer},\n journal = {p(doom) blog},\n year={2025},\n url={https://pdoom.org/jasmine.html}\n note = {https://pdoom.org/blog.html}\n}\n```\n```\n@inproceedings{\n willi2024jafar,\n title={Jafar: An Open-Source Genie Reimplemention in Jax},\n author={Timon Willi and Matthew Thomas Jackson and Jakob Nicolaus Foerster},\n booktitle={First Workshop on Controllable Video Generation @ ICML 2024},\n year={2024},\n url={https://openreview.net/forum?id=ZZGaQHs9Jb}\n}\n```\n```\n@inproceedings{\n bruce2024genie,\n title={Genie: Generative Interactive Environments},\n author={Jake Bruce and Michael D Dennis and Ashley Edwards and Jack Parker-Holder and Yuge Shi and Edward Hughes and Matthew Lai and Aditi Mavalankar and Richie Steigerwald and Chris Apps and Yusuf Aytar and Sarah Maria Elisabeth Bechtle and Feryal Behbahani and Stephanie C.Y. Chan and Nicolas Heess and Lucy Gonzalez and Simon Osindero and Sherjil Ozair and Scott Reed and Jingwei Zhang and Konrad Zolna and Jeff Clune and Nando de Freitas and Satinder Singh and Tim Rockt{\""a}schel},\n booktitle={Forty-first International Conference on Machine Learning},\n year={2024},\n url={https://openreview.net/forum?id=bJbSbJskOS}\n}\n```\n",markdown,tab +12,90377,"README.md",0,0,"Switched from branch 'main' to 'readme-typo'",markdown,git_branch_checkout +13,91053,"README.md",6374,0,"",markdown,selection_command +14,91750,"README.md",6370,0,"",markdown,selection_command +15,91915,"README.md",6368,0,"",markdown,selection_command +16,91942,"README.md",6315,0,"",markdown,selection_command +17,91977,"README.md",6298,0,"",markdown,selection_command +18,92078,"README.md",6224,0,"",markdown,selection_command +19,92079,"README.md",5733,0,"",markdown,selection_command +20,92079,"README.md",5677,0,"",markdown,selection_command +21,92112,"README.md",5657,0,"",markdown,selection_command +22,92144,"README.md",5641,0,"",markdown,selection_command +23,92171,"README.md",5637,0,"",markdown,selection_command +24,92273,"README.md",5633,0,"",markdown,selection_command +25,92274,"README.md",5631,0,"",markdown,selection_command +26,92274,"README.md",5578,0,"",markdown,selection_command +27,92303,"README.md",5561,0,"",markdown,selection_command +28,92336,"README.md",5484,0,"",markdown,selection_command +29,92377,"README.md",5403,0,"",markdown,selection_command +30,92406,"README.md",5340,0,"",markdown,selection_command +31,92438,"README.md",5320,0,"",markdown,selection_command +32,92479,"README.md",5304,0,"",markdown,selection_command +33,92510,"README.md",5300,0,"",markdown,selection_command +34,92604,"README.md",5296,0,"",markdown,selection_command +35,92605,"README.md",5294,0,"",markdown,selection_command +36,92608,"README.md",5253,0,"",markdown,selection_command +37,92644,"README.md",5212,0,"",markdown,selection_command +38,92677,"README.md",5195,0,"",markdown,selection_command +39,92709,"README.md",5165,0,"",markdown,selection_command +40,92799,"README.md",5082,0,"",markdown,selection_command +41,92799,"README.md",4990,0,"",markdown,selection_command +42,92808,"README.md",4966,0,"",markdown,selection_command +43,92843,"README.md",4950,0,"",markdown,selection_command +44,93205,"README.md",4951,0,"",markdown,selection_command +45,93538,"README.md",4951,13,"",markdown,content +46,93682,"README.md",4951,0,"a",markdown,content +47,93682,"README.md",4952,0,"",markdown,selection_keyboard +48,93847,"README.md",4952,0,"r",markdown,content +49,93848,"README.md",4953,0,"",markdown,selection_keyboard +50,93950,"README.md",4953,0,"t",markdown,content +51,93950,"README.md",4954,0,"",markdown,selection_keyboard +52,94048,"README.md",4954,0,"i",markdown,content +53,94049,"README.md",4955,0,"",markdown,selection_keyboard +54,94174,"README.md",4955,0,"c",markdown,content +55,94175,"README.md",4956,0,"",markdown,selection_keyboard +56,94264,"README.md",4956,0,"l",markdown,content +57,94264,"README.md",4957,0,"",markdown,selection_keyboard +58,94371,"README.md",4957,0,"e",markdown,content +59,94371,"README.md",4958,0,"",markdown,selection_keyboard +60,94555,"README.md",4957,0,"",markdown,selection_command +61,95419,"README.md",4967,0,"",markdown,selection_command +62,95569,"README.md",4991,0,"",markdown,selection_command +63,95747,"README.md",5083,0,"",markdown,selection_command +64,95944,"README.md",5166,0,"",markdown,selection_command +65,100201,"README.md",5083,0,"",markdown,selection_command +66,100321,"README.md",4991,0,"",markdown,selection_command +67,100495,"README.md",4993,0,"",markdown,selection_command +68,100664,"README.md",4995,0,"",markdown,selection_command +69,100809,"README.md",5002,0,"",markdown,selection_command +70,100990,"README.md",5004,0,"",markdown,selection_command +71,101340,"README.md",5003,0,"",markdown,selection_command +72,101557,"README.md",5003,1,"",markdown,content +73,101635,"README.md",5004,0,"",markdown,selection_command +74,102320,"README.md",5003,0,"",markdown,selection_command +75,102404,"README.md",5003,1,"",markdown,content +76,103404,"README.md",4984,0,"",markdown,selection_command +77,167488,"README.md",0,0,"

🧞‍♀️ Jasmine: A simple, performant and scalable JAX-based world modeling codebase 🧞‍♀️

\n\n

\n \n \n \n \n

\n\nJasmine is a production-ready JAX-based world modeling codebase. It currently implements the high-level architecture of [Genie: Generative Interactive Environments](https://arxiv.org/abs/2402.15391) (Bruce et al., 2024) with [MaskGIT](https://arxiv.org/abs/2202.04200) (Chang et al., 2022), as well as an autoregressive (causal) baseline. A diffusion baseline is coming soon.\n\nJasmine scales from single hosts to hundreds of xPUs thanks to XLA and strives to be an easily hackable, batteries-included foundation for world modeling research.\n\n

Overview

\n\n- Asynchronous & distributed checkpointing thanks to [orbax.checkpoint](https://github.com/google/orbax)\n - Jasmine also supports mixing and matching hardware topologies (e.g. train on four nodes, load the checkpoint on a single node)\n- Optimized dataloading thanks to [Grain](https://github.com/google/grain)\n - Dataloading scales with the number of processes (i.e. nodes/xPUs)\n- Checkpointing of model weights, optimizer and dataloader states\n- Full reproducibility with **exact** training curves (thanks to seeded dataloading and training, and [JAX' approach to pseudo random numbers](https://docs.jax.dev/en/latest/random-numbers.html))\n- Automatic checkpoint deletion/retention according to specified retention policy thanks to `orbax.checkpoint.CheckpointManager`\n- Mixed precision training using `bfloat16`\n - `int8` training is on the roadmap via [aqt](https://github.com/google/aqt)\n- FlashAttention thanks to [cuDNN SDPA](https://github.com/jax-ml/jax/blob/a155c5a9997924170e0067d552351a9833c12c11/jax/_src/cudnn/fused_attention_stablehlo.py#L842)\n- Frame-level KV cache resets for accelerated spatiotemporal attention in causal baseline (still in PR)\n- Activation checkpointing (even onto host memory if desired)\n- DDP (changing to FSDP requires changing **a single line of code**)\n- WSD learning rate schedule\n - No need to retrain from scratch if you want to train for longer\n- Index-shuffling during dataloading\n- Google-native stack\n - https://github.com/google/orbax for checkpointing\n - https://github.com/google/grain for dataloading\n - https://github.com/google-deepmind/dm_pix for image manipulation\n - https://github.com/google/array_record as the data format\n- Easy model inspection thanks to [treescope](https://github.com/google-deepmind/treescope)\n- Easy model surgery thanks to the new [flax.nnx](https://flax.readthedocs.io/en/latest/guides/linen_to_nnx.html) API\n\n

Setup 🧗

\n\nJasmine requires `python 3.10`, `jax 0.6.2` and `flax 0.10.7`. To install the requirements, run:\n\n```bash\npip install -r requirements.txt\npre-commit install\n```\n\nDownload OpenAI's VPT dataset by running:\n\n```bash\nbash input_pipeline/download/openai/download_index_files.sh\npython input_pipeline/download/openai/download_videos.py\n```\n\nNote: this is a large dataset and may take a while to download.\n\nFor performant distributed training, we additionally preprocess the dataset into `arrayrecords`:\n\n```bash\npython input_pipeline/preprocess/video_to_array_records.py\n```\n\n

Quick Start 🚀

\n\nGenie has three components: a [video tokenizer](models/tokenizer.py), a [latent action model](models/lam.py), and a [dynamics model](models/dynamics.py). Each of these components are trained separately, however, the dynamics model requires a pre-trained video tokenizer (and latent action model).\n\nTo train the video tokenizer (similar for the LAM), run:\n\n```bash\npython train_tokenizer.py --ckpt_dir \n```\n\nOnce the tokenizer and LAM are trained, the dynamics model can be trained with:\n\n```bash\npython train_dynamics.py --tokenizer_checkpoint --lam_checkpoint \n```\n\nLogging with `wandb` is supported. To enable logging, set the `WANDB_API_KEY` environment variable or run:\n\n```bash\nwandb login\n```\n\nTraining can then be logged by setting the `--log` flag:\n\n```bash\npython train_tokenizer.py --log --entity --project \n```\n\n

Citing Jafar 📜

\n\nJasmine was built by [Mihir Mahajan](https://maharajamihir.github.io/), [Alfred Nguyen](https://avocadoali.github.io/) and [Franz Srambical](https://srambical.fr/), but started as a fork of [Jafar](https://github.com/flairox/jafar), built by [Matthew Jackson](https://matthewtjackson.com) and [Timon Willi](https://www.timonwilli.com).\n\nIf you use Jasmine in your work, please cite us, Jafar, and the original Genie paper as follows:\n\n```\n@article{\n mahajan2025jasmine,\n title={Jasmine: A simple, performant and scalable JAX-based world modeling codebase},\n author={Mihir Mahajan and Alfred Nguyen and Franz Srambical and Stefan Bauer},\n journal = {p(doom) blog},\n year={2025},\n url={https://pdoom.org/jasmine.html}\n note = {https://pdoom.org/blog.html}\n}\n```\n```\n@inproceedings{\n willi2024jafar,\n title={Jafar: An Open-Source Genie Reimplemention in Jax},\n author={Timon Willi and Matthew Thomas Jackson and Jakob Nicolaus Foerster},\n booktitle={First Workshop on Controllable Video Generation @ ICML 2024},\n year={2024},\n url={https://openreview.net/forum?id=ZZGaQHs9Jb}\n}\n```\n```\n@inproceedings{\n bruce2024genie,\n title={Genie: Generative Interactive Environments},\n author={Jake Bruce and Michael D Dennis and Ashley Edwards and Jack Parker-Holder and Yuge Shi and Edward Hughes and Matthew Lai and Aditi Mavalankar and Richie Steigerwald and Chris Apps and Yusuf Aytar and Sarah Maria Elisabeth Bechtle and Feryal Behbahani and Stephanie C.Y. Chan and Nicolas Heess and Lucy Gonzalez and Simon Osindero and Sherjil Ozair and Scott Reed and Jingwei Zhang and Konrad Zolna and Jeff Clune and Nando de Freitas and Satinder Singh and Tim Rockt{\""a}schel},\n booktitle={Forty-first International Conference on Machine Learning},\n year={2024},\n url={https://openreview.net/forum?id=bJbSbJskOS}\n}\n```\n",markdown,tab +78,167533,"README.md",4950,0,"",markdown,selection_command +79,210385,"README.md",0,0,"Switched from branch 'readme-typo' to 'readme-typo-bibtex'",markdown,git_branch_checkout +80,290081,"README.md",0,5292,"

Jafar: A JAX-based Genie Implementation 🧞

\n\n

\n \n \n \n \n

\n\nJafar is a JAX-based implementation of the DeepMind paper ""[Genie: Generative Interactive Environments](https://arxiv.org/abs/2402.15391)"" (Bruce et al., 2024).\n\nJafar supports training of all Genie components and can complete the CoinRun reproducibility experiment (Appendix F) on a single L40S GPU in under a week.\n\n

Setup 🧗

\n\nJafar was built with `python 3.10` and `jax 0.4.30`. To install requirements, run:\n\n```bash\npip install -r requirements.txt\npre-commit install\n```\n\nBefore training the models, generate the CoinRun dataset by running:\n\n```bash\npython generate_dataset.py --num_episodes 10000\n```\n\nNote: this is a large dataset (around 100GB) and may take a while to generate.\n\nFor performant distributed training, we additionally preprocess the dataset into `TFRecord`s:\n\n```bash\npython preprocess_dataset.py\n```\n\n

Quick Start 🚀

\n\nGenie has three components: a [video tokenizer](models/tokenizer.py), a [latent action model](models/lam.py), and a [dynamics model](models/dynamics.py). Each of these components are trained separately, however, the dynamics model requires a pre-trained video tokenizer and latent action model.\n\nTo train the video tokenizer (similar for the LAM), run:\n\n```bash\npython train_tokenizer.py --ckpt_dir \n```\n\nOnce the tokenizer and LAM are trained, the dynamics model can be trained with:\n\n```bash\npython train_dynamics.py --tokenizer_checkpoint --lam_checkpoint \n```\n\nLogging with `wandb` is supported. To enable logging, set the `WANDB_API_KEY` environment variable or run:\n\n```bash\nwandb login\n```\n\nTraining can then be logged by setting the `--log` flag:\n\n```bash\npython train_tokenizer.py --log --entity --project \n```\n\n

Citing Jafar 📜

\n\nJafar was built by [Matthew Jackson](https://matthewtjackson.com) and [Timon Willi](https://www.timonwilli.com).\n\nIf you use Jafar in your work, please cite us and the original Genie paper as follows:\n\n",markdown,content +81,290393,"README.md",0,0,"Switched from branch 'readme-typo-bibtex' to 'causal-transformer-nnx'",markdown,git_branch_checkout +82,294467,"TERMINAL",0,0,"git stash pop",,terminal_command +83,294518,"TERMINAL",0,0,"]633;C",,terminal_output +84,294627,"TERMINAL",0,0,"On branch causal-transformer-nnx\r\nYour branch is up to date with 'origin/causal-transformer-nnx'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: .gitignore\r\n\tmodified: jasmine.py\r\n\tmodified: models/dynamics_causal.py\r\n\tmodified: sample.py\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tbatch_size_benchmark_tokenizer_flash_attention.json\r\n\tbatch_size_benchmark_tokenizer_no_flash_attention.json\r\n\tbenchmark_batch_sizes_tokenizer.py\r\n\tgenerate_arrayrecord_dataset.py\r\n\tsalient_restore_failing_minimal_example.py\r\n\tslurm/\r\n\ttest.ipynb\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\nDropped refs/stash@{0} (a97a7d9e39cde04418f63bc96658e34ccc847ae7)\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +85,300268,"README.md",0,0,"

Jafar: A JAX-based Genie Implementation 🧞

\n\n

\n \n \n \n \n

\n\nJafar is a JAX-based implementation of the DeepMind paper ""[Genie: Generative Interactive Environments](https://arxiv.org/abs/2402.15391)"" (Bruce et al., 2024).\n\nJafar supports training of all Genie components and can complete the CoinRun reproducibility experiment (Appendix F) on a single L40S GPU in under a week.\n\n

Setup 🧗

\n\nJafar was built with `python 3.10` and `jax 0.4.30`. To install requirements, run:\n\n```bash\npip install -r requirements.txt\npre-commit install\n```\n\nBefore training the models, generate the CoinRun dataset by running:\n\n```bash\npython generate_dataset.py --num_episodes 10000\n```\n\nNote: this is a large dataset (around 100GB) and may take a while to generate.\n\nFor performant distributed training, we additionally preprocess the dataset into `TFRecord`s:\n\n```bash\npython preprocess_dataset.py\n```\n\n

Quick Start 🚀

\n\nGenie has three components: a [video tokenizer](models/tokenizer.py), a [latent action model](models/lam.py), and a [dynamics model](models/dynamics.py). Each of these components are trained separately, however, the dynamics model requires a pre-trained video tokenizer and latent action model.\n\nTo train the video tokenizer (similar for the LAM), run:\n\n```bash\npython train_tokenizer.py --ckpt_dir \n```\n\nOnce the tokenizer and LAM are trained, the dynamics model can be trained with:\n\n```bash\npython train_dynamics.py --tokenizer_checkpoint --lam_checkpoint \n```\n\nLogging with `wandb` is supported. To enable logging, set the `WANDB_API_KEY` environment variable or run:\n\n```bash\nwandb login\n```\n\nTraining can then be logged by setting the `--log` flag:\n\n```bash\npython train_tokenizer.py --log --entity --project \n```\n\n

Citing Jafar 📜

\n\nJafar was built by [Matthew Jackson](https://matthewtjackson.com) and [Timon Willi](https://www.timonwilli.com).\n\nIf you use Jafar in your work, please cite us and the original Genie paper as follows:\n\n```\n@inproceedings{\n willi2024jafar,\n title={Jafar: An Open-Source Genie Reimplemention in Jax},\n author={Timon Willi and Matthew Thomas Jackson and Jakob Nicolaus Foerster},\n booktitle={First Workshop on Controllable Video Generation @ ICML 2024},\n year={2024},\n url={https://openreview.net/forum?id=ZZGaQHs9Jb}\n}\n```\n```\n@inproceedings{\n bruce2024genie,\n title={Genie: Generative Interactive Environments},\n author={Jake Bruce and Michael D Dennis and Ashley Edwards and Jack Parker-Holder and Yuge Shi and Edward Hughes and Matthew Lai and Aditi Mavalankar and Richie Steigerwald and Chris Apps and Yusuf Aytar and Sarah Maria Elisabeth Bechtle and Feryal Behbahani and Stephanie C.Y. Chan and Nicolas Heess and Lucy Gonzalez and Simon Osindero and Sherjil Ozair and Scott Reed and Jingwei Zhang and Konrad Zolna and Jeff Clune and Nando de Freitas and Satinder Singh and Tim Rockt{\""a}schel},\n booktitle={Forty-first International Conference on Machine Learning},\n year={2024},\n url={https://openreview.net/forum?id=bJbSbJskOS}\n}\n```\n",markdown,tab +86,301149,"models/dynamics_causal.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=True,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> tuple[jax.Array, jax.Array | None]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n # breakpoint()\n act_embed = self.action_up(batch[""latent_actions""])\n breakpoint()\n # FIXME: this is the culprit!\n # vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n vid_embed += act_embed\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n # breakpoint()\n\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\n\n mask = jnp.ones(vid_embed.shape[:-1])\n return logits, mask\n",python,tab +87,696000,"TERMINAL",0,0,"squeue --me",,terminal_command +88,696014,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +89,699057,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G",,terminal_command +90,699111,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 13990\r\nsalloc: job 13990 queued and waiting for resources\r\n",,terminal_output +91,821294,"TERMINAL",0,0,"^Csalloc: Job allocation 13990 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +92,821353,"TERMINAL",0,0,"^C",,terminal_command +93,821823,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G",,terminal_command +94,821879,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 13991\r\nsalloc: job 13991 queued and waiting for resources\r\n",,terminal_output +95,827107,"TERMINAL",0,0,"",,terminal_focus +96,827643,"TERMINAL",0,0,"source /home/franz.srambical/jafar/.venv/bin/activate",,terminal_command +97,827655,"TERMINAL",0,0,"]633;C]0;franz.srambical@hai-login2:~/jafar",,terminal_output +98,831010,"TERMINAL",0,0,"squeue",,terminal_command +99,831030,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 13991 franz.sram interacti 1 1 PD 2025-07-27T09:32:21 N/A 0:00 1-00:00:00 (ReqNodeNotAvail, Reserved for maintenance)\r\n 13989 xiao.liu interacti 1 16 R 2025-07-27T00:36:01 2025-07-27T00:36:01 8:56:29 23:59:00 hai001\r\n 13979 xiao.liu interacti 1 16 R 2025-07-26T16:58:15 2025-07-26T16:58:15 16:34:15 23:59:00 hai005\r\n 13988 nishant.ku standard 3 192 R 2025-07-26T21:47:33 2025-07-26T21:47:33 11:44:57 1-00:00:00 hai[002,004,006]\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +100,834447,"TERMINAL",0,0,"salloc",,terminal_focus +101,835200,"TERMINAL",0,0,"bash",,terminal_focus +102,837228,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G",,terminal_command +103,837278,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 13992\r\nsalloc: job 13992 queued and waiting for resources\r\n",,terminal_output +104,838232,"TERMINAL",0,0,"^Csalloc: Job allocation 13992 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +105,838368,"TERMINAL",0,0,"^C",,terminal_command +106,846831,"TERMINAL",0,0,"salloc gpus=1",,terminal_command +107,846884,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 13993\r\nsalloc: job 13993 queued and waiting for resources\r\n",,terminal_output +108,847885,"TERMINAL",0,0,"^Csalloc: Job aborted due to signal\r\nsalloc: Job allocation 13993 has been revoked.\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +109,848011,"TERMINAL",0,0,"^C",,terminal_command +110,851905,"TERMINAL",0,0,"salloc --gpus=1",,terminal_command +111,851954,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 13994\r\nsalloc: job 13994 queued and waiting for resources\r\n",,terminal_output +112,856893,"TERMINAL",0,0,"^Csalloc: Job allocation 13994 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +113,865096,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G --time=01:00:00",,terminal_command +114,865148,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 13995\r\n",,terminal_output +115,865246,"TERMINAL",0,0,"salloc: Nodes hai003 are ready for job\r\n",,terminal_output +116,865574,"TERMINAL",0,0,"Running inside SLURM, Job ID 13995.\r\n",,terminal_output +117,865657,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +118,868636,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +119,870520,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +120,870719,"TERMINAL",0,0,"[28@b': bash experiments/sample.sh\r",,terminal_output +121,870815,"TERMINAL",0,0,"[1@a': ba",,terminal_output +122,870869,"TERMINAL",0,0,"[1@s': bas",,terminal_output +123,870963,"TERMINAL",0,0,"[1@h': bash",,terminal_output +124,871424,"TERMINAL",0,0,"\r[24@[franz.srambical@hai003.haicore.berlin:~/jafar] $ bash",,terminal_output +125,871962,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +126,884953,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +127,897930,"TERMINAL",0,0,"2025-07-27 09:33:37.732438: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +128,899431,"TERMINAL",0,0,"2025-07-27 09:33:39.229434: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +129,902960,"TERMINAL",0,0,"2025-07-27 09:33:42.762908: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +130,903690,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/models/dynamics_causal.py(73)__call__()\r\n-> vid_embed += act_embed\r\n",,terminal_output +131,912207,"TERMINAL",0,0,"v",,terminal_output +132,912267,"TERMINAL",0,0,"i",,terminal_output +133,912537,"TERMINAL",0,0,"_",,terminal_output +134,912852,"TERMINAL",0,0," ",,terminal_output +135,912918,"TERMINAL",0,0,"d",,terminal_output +136,913104,"TERMINAL",0,0,"_",,terminal_output +137,913289,"TERMINAL",0,0,"e",,terminal_output +138,913374,"TERMINAL",0,0,"m",,terminal_output +139,913621,"TERMINAL",0,0,"bed",,terminal_output +140,914091,"TERMINAL",0,0,".",,terminal_output +141,914240,"TERMINAL",0,0,"s",,terminal_output +142,914341,"TERMINAL",0,0,"h",,terminal_output +143,914406,"TERMINAL",0,0,"a",,terminal_output +144,914521,"TERMINAL",0,0,"p",,terminal_output +145,914572,"TERMINAL",0,0,"e",,terminal_output +146,914735,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 920, 512)\r\n",,terminal_output +147,917471,"TERMINAL",0,0,"a",,terminal_output +148,917553,"TERMINAL",0,0,"c",,terminal_output +149,917742,"TERMINAL",0,0,"t",,terminal_output +150,917957,"TERMINAL",0,0,"_",,terminal_output +151,918126,"TERMINAL",0,0,"e",,terminal_output +152,918221,"TERMINAL",0,0,"m",,terminal_output +153,918408,"TERMINAL",0,0,"b",,terminal_output +154,918462,"TERMINAL",0,0,"e",,terminal_output +155,918563,"TERMINAL",0,0,"d",,terminal_output +156,918708,"TERMINAL",0,0,".",,terminal_output +157,918928,"TERMINAL",0,0,"s",,terminal_output +158,919046,"TERMINAL",0,0,"h",,terminal_output +159,919108,"TERMINAL",0,0,"a",,terminal_output +160,919210,"TERMINAL",0,0,"p",,terminal_output +161,919287,"TERMINAL",0,0,"e",,terminal_output +162,919378,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 1, 512)\r\n",,terminal_output +163,959548,"TERMINAL",0,0,"c",,terminal_output +164,959778,"TERMINAL",0,0,"\r\n",,terminal_output +165,959851,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 77, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 227, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 132, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 320, in attention_fn\r\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 299, in _fuse_masks\r\n expanded_mask = jnp.pad(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4352, in pad\r\n pad_width = _broadcast_to_pairs(pad_width, np.ndim(array), ""pad_width"")\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 3937, in _broadcast_to_pairs\r\n raise ValueError(f""jnp.pad: {name} with {nd=} has unsupported shape {nvals.shape}. ""\r\nValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n",,terminal_output +166,960693,"TERMINAL",0,0,"(Pdb) ",,terminal_output +167,960874,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +168,964184,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +169,964244,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +170,964521,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +171,976098,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +172,984197,"TERMINAL",0,0,"2025-07-27 09:35:04.002851: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +173,985689,"TERMINAL",0,0,"2025-07-27 09:35:05.486868: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +174,989267,"TERMINAL",0,0,"2025-07-27 09:35:09.031493: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +175,989949,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/models/dynamics_causal.py(73)__call__()\r\n-> vid_embed += act_embed\r\n",,terminal_output +176,991828,"TERMINAL",0,0,"n",,terminal_output +177,995829,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/models/dynamics_causal.py(74)__call__()\r\n-> vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\r\n",,terminal_output +178,1004263,"TERMINAL",0,0,"v",,terminal_output +179,1004511,"TERMINAL",0,0,"id",,terminal_output +180,1004706,"TERMINAL",0,0,"_",,terminal_output +181,1005232,"TERMINAL",0,0,"e",,terminal_output +182,1005313,"TERMINAL",0,0,"m",,terminal_output +183,1005556,"TERMINAL",0,0,"be",,terminal_output +184,1005749,"TERMINAL",0,0,"d.",,terminal_output +185,1005982,"TERMINAL",0,0,"s",,terminal_output +186,1006081,"TERMINAL",0,0,"ha",,terminal_output +187,1006278,"TERMINAL",0,0,"pe",,terminal_output +188,1006564,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 920, 512)\r\n",,terminal_output +189,1013517,"TERMINAL",0,0,"v",,terminal_output +190,1013657,"TERMINAL",0,0,"i",,terminal_output +191,1013726,"TERMINAL",0,0,"d",,terminal_output +192,1013948,"TERMINAL",0,0,"_",,terminal_output +193,1014367,"TERMINAL",0,0,"e",,terminal_output +194,1014468,"TERMINAL",0,0,"m",,terminal_output +195,1014772,"TERMINAL",0,0,"e",,terminal_output +196,1014998,"TERMINAL",0,0,"b",,terminal_output +197,1015269,"TERMINAL",0,0," ",,terminal_output +198,1015476,"TERMINAL",0,0," ",,terminal_output +199,1015613,"TERMINAL",0,0,"b",,terminal_output +200,1015673,"TERMINAL",0,0,"e",,terminal_output +201,1016043,"TERMINAL",0,0,"dd",,terminal_output +202,1016465,"TERMINAL",0,0," ",,terminal_output +203,1016733,"TERMINAL",0,0,"_",,terminal_output +204,1016891,"TERMINAL",0,0,"p",,terminal_output +205,1017095,"TERMINAL",0,0,"ad",,terminal_output +206,1017288,"TERMINAL",0,0,"d",,terminal_output +207,1017370,"TERMINAL",0,0,"e",,terminal_output +208,1017711,"TERMINAL",0,0,"d.",,terminal_output +209,1017810,"TERMINAL",0,0,"s",,terminal_output +210,1017906,"TERMINAL",0,0,"h",,terminal_output +211,1018038,"TERMINAL",0,0,"a",,terminal_output +212,1018140,"TERMINAL",0,0,"p",,terminal_output +213,1018338,"TERMINAL",0,0,"e",,terminal_output +214,1018432,"TERMINAL",0,0,"\r\n(Pdb) *** NameError: name 'vid_embed_padded' is not defined\r\n",,terminal_output +215,1020764,"TERMINAL",0,0,"n",,terminal_output +216,1021248,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/models/dynamics_causal.py(77)__call__()\r\n-> logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n",,terminal_output +217,1021777,"TERMINAL",0,0,"^P",,terminal_output +218,1021907,"TERMINAL",0,0,"^P",,terminal_output +219,1023479,"TERMINAL",0,0,"^[[A",,terminal_output +220,1023867,"TERMINAL",0,0,"^[[A",,terminal_output +221,1024635,"TERMINAL",0,0," ",,terminal_output +222,1025052,"TERMINAL",0,0,"       ",,terminal_output +223,1025154,"TERMINAL",0,0,"    ",,terminal_output +224,1026174,"TERMINAL",0,0,"v",,terminal_output +225,1026298,"TERMINAL",0,0,"i",,terminal_output +226,1026429,"TERMINAL",0,0,"d",,terminal_output +227,1026579,"TERMINAL",0,0,"_",,terminal_output +228,1026727,"TERMINAL",0,0,"e",,terminal_output +229,1026828,"TERMINAL",0,0,"m",,terminal_output +230,1027081,"TERMINAL",0,0,"be",,terminal_output +231,1027154,"TERMINAL",0,0,"d",,terminal_output +232,1027582,"TERMINAL",0,0,"_",,terminal_output +233,1027741,"TERMINAL",0,0,"pa",,terminal_output +234,1027807,"TERMINAL",0,0,"d",,terminal_output +235,1028001,"TERMINAL",0,0,"d",,terminal_output +236,1028110,"TERMINAL",0,0,"e",,terminal_output +237,1028216,"TERMINAL",0,0,"d",,terminal_output +238,1028314,"TERMINAL",0,0,".",,terminal_output +239,1028525,"TERMINAL",0,0,"s",,terminal_output +240,1028663,"TERMINAL",0,0,"ha",,terminal_output +241,1028732,"TERMINAL",0,0,"p",,terminal_output +242,1028838,"TERMINAL",0,0,"e",,terminal_output +243,1028963,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 512)\r\n",,terminal_output +244,1037574,"TERMINAL",0,0,"s\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(221)__call__()\r\n-> def __call__(self, x: jax.Array) -> jax.Array:\r\n",,terminal_output +245,1041109,"models/dynamics_causal.py",2019,0,"",python,selection_command +246,1041263,"models/dynamics_causal.py",2095,0,"",python,selection_command +247,1041416,"models/dynamics_causal.py",2126,0,"",python,selection_command +248,1041632,"models/dynamics_causal.py",2134,0,"",python,selection_command +249,1041757,"models/dynamics_causal.py",2214,0,"",python,selection_command +250,1041923,"models/dynamics_causal.py",2229,0,"",python,selection_command +251,1042065,"models/dynamics_causal.py",2238,0,"",python,selection_command +252,1042201,"models/dynamics_causal.py",2293,0,"",python,selection_command +253,1042284,"models/dynamics_causal.py",2302,0,"",python,selection_command +254,1042520,"models/dynamics_causal.py",2293,0,"",python,selection_command +255,1042686,"models/dynamics_causal.py",2238,0,"",python,selection_command +256,1042800,"models/dynamics_causal.py",2245,0,"",python,selection_command +257,1042968,"models/dynamics_causal.py",2247,0,"",python,selection_command +258,1043205,"models/dynamics_causal.py",2251,0,"",python,selection_command +259,1043268,"models/dynamics_causal.py",2252,0,"",python,selection_command +260,1043587,"models/dynamics_causal.py",966,0,"",python,selection_command +261,1044514,"models/dynamics_causal.py",978,0,"",python,selection_command +262,1044638,"models/dynamics_causal.py",980,0,"",python,selection_command +263,1045144,"utils/nn.py",0,0,"import math\nfrom typing import Tuple, Callable\n\nfrom flax import nnx\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nnx.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n def __init__(self, d_model: int, max_len: int = 5000):\n self.d_model = d_model\n self.max_len = max_len\n\n pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n pe = pe.at[:, 0::2].set(jnp.sin(position * div_term))\n pe = pe.at[:, 1::2].set(jnp.cos(position * div_term))\n self.pe = nnx.Variable(pe)\n\n def __call__(self, x: jax.Array) -> jax.Array:\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nnx.Module):\n def __init__(\n self,\n dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.dim = dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.spatial_pos_enc = PositionalEncoding(self.dim)\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention,\n is_causal=self.spatial_causal,\n ),\n rngs=rngs,\n # decode=self.decode,\n decode=False,\n )\n\n self.temporal_pos_enc = PositionalEncoding(self.dim)\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = self.spatial_pos_enc(x)\n z = self.spatial_norm(z)\n z = self.spatial_attention(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = self.temporal_pos_enc(x)\n z = self.temporal_norm(z)\n z = self.temporal_attention(z)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = self.ffn_norm(x)\n z = self.ffn_dense1(z)\n z = jax.nn.gelu(z)\n z = self.ffn_dense2(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nnx.Module):\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n self.blocks: list[STBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=self.spatial_causal,\n decode=self.decode,\n rngs=rngs,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x: jax.Array) -> jax.Array:\n x = self.input_norm1(x)\n x = self.input_dense(x)\n x = self.input_norm2(x)\n\n for block in self.blocks:\n x = block(x)\n\n x = self.output_dense(x)\n return x # (B, T, E)\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n def __init__(\n self, latent_dim: int, num_latents: int, dropout: float, rngs: nnx.Rngs\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n normalized_codebook = normalize(self.codebook.value)\n distance = -jnp.matmul(x, normalized_codebook.T)\n if training:\n distance = self.drop(distance)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array) -> jax.Array:\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n # NOTE: jax.nn.dot_product_attention does not support dropout\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +264,1045145,"utils/nn.py",4299,0,"",python,selection_command +265,1045837,"utils/nn.py",6148,0,"",python,selection_command +266,1046654,"utils/nn.py",6583,0,"",python,selection_keyboard +267,1053373,"TERMINAL",0,0,"x",,terminal_output +268,1053586,"TERMINAL",0,0,".",,terminal_output +269,1053767,"TERMINAL",0,0,"s",,terminal_output +270,1053850,"TERMINAL",0,0,"h",,terminal_output +271,1053950,"TERMINAL",0,0,"a",,terminal_output +272,1054015,"TERMINAL",0,0,"p",,terminal_output +273,1054119,"TERMINAL",0,0,"e",,terminal_output +274,1054691,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 512)\r\n",,terminal_output +275,1062413,"TERMINAL",0,0,"n",,terminal_output +276,1063296,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(222)__call__()\r\n-> x = self.input_norm1(x)\r\n",,terminal_output +277,1064269,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(223)__call__()\r\n-> x = self.input_dense(x)\r\n",,terminal_output +278,1065501,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(224)__call__()\r\n-> x = self.input_norm2(x)\r\n",,terminal_output +279,1066088,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(226)__call__()\r\n-> for block in self.blocks:\r\n",,terminal_output +280,1067867,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(227)__call__()\r\n-> x = block(x)\r\n",,terminal_output +281,1069492,"TERMINAL",0,0,"s",,terminal_output +282,1069598,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(70)resolve_kwargs_wrapper()\r\n-> @functools.wraps(f)\r\n",,terminal_output +283,1070709,"utils/nn.py",6634,0,"",python,selection_command +284,1070869,"utils/nn.py",6666,0,"",python,selection_command +285,1071127,"utils/nn.py",6698,0,"",python,selection_command +286,1071156,"utils/nn.py",6726,0,"",python,selection_command +287,1071188,"utils/nn.py",6731,0,"",python,selection_command +288,1071313,"utils/nn.py",6765,0,"",python,selection_command +289,1071623,"utils/nn.py",6773,0,"",python,selection_command +290,1071791,"utils/nn.py",6775,0,"",python,selection_command +291,1071931,"utils/nn.py",6777,0,"",python,selection_command +292,1072320,"utils/nn.py",6739,0,"",python,selection_command +293,1072921,"utils/nn.py",6745,0,"",python,selection_command +294,1073044,"utils/nn.py",6748,0,"",python,selection_command +295,1073244,"utils/nn.py",6752,0,"",python,selection_command +296,1073476,"utils/nn.py",6753,0,"",python,selection_command +297,1073792,"utils/nn.py",5745,0,"",python,selection_command +298,1074028,"utils/nn.py",5751,0,"",python,selection_command +299,1074231,"utils/nn.py",5753,0,"",python,selection_command +300,1074695,"utils/nn.py",5757,0,"",python,selection_command +301,1074860,"utils/nn.py",5758,0,"",python,selection_command +302,1075189,"utils/nn.py",929,0,"",python,selection_command +303,1078895,"TERMINAL",0,0,"n",,terminal_output +304,1079326,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(72)resolve_kwargs_wrapper()\r\n-> args = resolve_kwargs(f, args, kwargs)\r\n",,terminal_output +305,1082714,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(73)resolve_kwargs_wrapper()\r\n-> return f(*args)\r\n",,terminal_output +306,1084522,"TERMINAL",0,0,"\r\n",,terminal_output +307,1084650,"TERMINAL",0,0,"(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(73)resolve_kwargs_wrapper()\r\n-> return f(*args)\r\n",,terminal_output +308,1085652,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(73)resolve_kwargs_wrapper()->None\r\n-> return f(*args)\r\n",,terminal_output +309,1086184,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(227)__call__()\r\n-> x = block(x)\r\n",,terminal_output +310,1093118,"TERMINAL",0,0,"s",,terminal_output +311,1095242,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(227)__call__()->None\r\n-> x = block(x)\r\n",,terminal_output +312,1098826,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/models/dynamics_causal.py(77)__call__()\r\n-> logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n",,terminal_output +313,1108435,"TERMINAL",0,0,"s",,terminal_output +314,1108522,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/models/dynamics_causal.py(77)__call__()->None\r\n-> logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n",,terminal_output +315,1109555,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/jasmine.py(352)token_step_fn()\r\n-> next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n",,terminal_output +316,1116264,"TERMINAL",0,0,"q",,terminal_output +317,1116371,"TERMINAL",0,0,"u",,terminal_output +318,1116452,"TERMINAL",0,0,"it",,terminal_output +319,1116745,"TERMINAL",0,0,")(",,terminal_output +320,1117045,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 96, in trace_dispatch\r\n return self.dispatch_exception(frame, arg)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 176, in dispatch_exception\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +321,1117824,"TERMINAL",0,0,"(Pdb) ",,terminal_output +322,1118044,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=13995.1 task 0: running\r\nsrun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +323,1119022,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +324,1119124,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +325,1120080,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +326,1120663,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=13995.2 task 0: running\r\n",,terminal_output +327,1120842,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13995.2\r\nsrun: forcing job termination\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 4, in \r\n import optax\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/optax/__init__.py"", line 22, in \r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T09:37:20.621] error: *** STEP 13995.2 ON hai003 CANCELLED AT 2025-07-27T09:37:20 DUE to SIGNAL Killed ***\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +328,1120980,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +329,1124468,"utils/nn.py",2834,0,"",python,selection_command +330,1125642,"utils/nn.py",3603,0,"",python,selection_keyboard +331,1126002,"utils/nn.py",3633,0,"",python,selection_command +332,1126267,"utils/nn.py",3652,0,"",python,selection_command +333,1126287,"utils/nn.py",3654,0,"",python,selection_command +334,1126313,"utils/nn.py",3667,0,"",python,selection_command +335,1126347,"utils/nn.py",3682,0,"",python,selection_command +336,1126405,"utils/nn.py",3733,0,"",python,selection_command +337,1126444,"utils/nn.py",3769,0,"",python,selection_command +338,1126935,"utils/nn.py",3805,0,"",python,selection_command +339,1127260,"utils/nn.py",3838,0,"",python,selection_command +340,1127490,"utils/nn.py",3876,0,"",python,selection_command +341,1127856,"utils/nn.py",3838,0,"",python,selection_command +342,1128061,"utils/nn.py",3805,0,"",python,selection_command +343,1128640,"utils/nn.py",3838,0,"",python,selection_command +344,1128803,"utils/nn.py",3842,0,"",python,selection_command +345,1129004,"utils/nn.py",3843,0,"",python,selection_command +346,1130584,"utils/nn.py",1818,0,"",python,selection_command +347,1132320,"utils/nn.py",3843,0,"",python,selection_command +348,1139391,"utils/nn.py",3825,0,"\n ",python,content +349,1139597,"utils/nn.py",3834,0,"B",python,content +350,1139597,"utils/nn.py",3835,0,"",python,selection_keyboard +351,1140258,"utils/nn.py",3834,1,"",python,content +352,1140426,"utils/nn.py",3834,0,"b",python,content +353,1140426,"utils/nn.py",3835,0,"",python,selection_keyboard +354,1140529,"utils/nn.py",3835,0,"r",python,content +355,1140530,"utils/nn.py",3836,0,"",python,selection_keyboard +356,1140566,"utils/nn.py",3836,0,"e",python,content +357,1140567,"utils/nn.py",3837,0,"",python,selection_keyboard +358,1140614,"utils/nn.py",3837,0,"a",python,content +359,1140615,"utils/nn.py",3838,0,"",python,selection_keyboard +360,1140710,"utils/nn.py",3838,0,"k",python,content +361,1140711,"utils/nn.py",3839,0,"",python,selection_keyboard +362,1140928,"utils/nn.py",3839,0,"p",python,content +363,1140929,"utils/nn.py",3840,0,"",python,selection_keyboard +364,1140995,"utils/nn.py",3840,0,"o",python,content +365,1140995,"utils/nn.py",3841,0,"",python,selection_keyboard +366,1141028,"utils/nn.py",3841,0,"i",python,content +367,1141028,"utils/nn.py",3842,0,"",python,selection_keyboard +368,1141093,"utils/nn.py",3842,0,"n",python,content +369,1141094,"utils/nn.py",3843,0,"",python,selection_keyboard +370,1141194,"utils/nn.py",3843,0,"t",python,content +371,1141195,"utils/nn.py",3844,0,"",python,selection_keyboard +372,1141469,"utils/nn.py",3844,0,"()",python,content +373,1141469,"utils/nn.py",3845,0,"",python,selection_keyboard +374,1141549,"utils/nn.py",3845,1,")",python,content +375,1141549,"utils/nn.py",3846,0,"",python,selection_keyboard +376,1141780,"utils/nn.py",3845,0,"",python,selection_command +377,1142820,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +378,1142884,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +379,1143193,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +380,1154750,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +381,1159623,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +382,1162088,"TERMINAL",0,0,"c",,terminal_output +383,1162199,"TERMINAL",0,0,"\r\n",,terminal_output +384,1165456,"TERMINAL",0,0,"(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +385,1174056,"TERMINAL",0,0,"s",,terminal_output +386,1174140,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(453)__call__()\r\n-> def __call__(\r\n",,terminal_output +387,1177320,"TERMINAL",0,0,"n",,terminal_output +388,1177423,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(498)__call__()\r\n-> if rngs is None:\r\n",,terminal_output +389,1179721,"utils/nn.py",3866,0,"",python,selection_command +390,1179800,"utils/nn.py",3881,0,"",python,selection_command +391,1180209,"utils/nn.py",3864,0,"",python,selection_command +392,1180437,"utils/nn.py",1818,0,"",python,selection_command +393,1180751,"utils/nn.py",1836,0,"",python,selection_command +394,1180931,"utils/nn.py",1838,0,"",python,selection_command +395,1181096,"utils/nn.py",1841,0,"",python,selection_command +396,1181464,"utils/nn.py",1842,0,"",python,selection_command +397,1181847,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Attention core modules for Flax.""""""\n\nfrom __future__ import annotations\n\nimport functools\nfrom typing import Any\nfrom collections.abc import Callable\nimport math\n\nimport jax\nimport jax.numpy as jnp\nfrom jax import lax, random\n\nfrom flax import nnx\nfrom flax.nnx import rnglib\nfrom flax.nnx.module import Module, first_from\nfrom flax.nnx.nn import initializers\nfrom flax.nnx.nn import dtypes\nfrom flax.nnx.nn.linear import (\n LinearGeneral,\n default_kernel_init,\n)\nfrom flax.nnx.nn.normalization import LayerNorm\nfrom flax.typing import (\n Dtype,\n PromoteDtypeFn,\n Shape,\n Initializer,\n PrecisionLike,\n DotGeneralT,\n)\n\nArray = jax.Array\n\n\ndef dot_product_attention_weights(\n query: Array,\n key: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention weights given query and key.\n\n Used by :func:`dot_product_attention`, which is what you'll most likely use.\n But if you want access to the attention weights for introspection, then\n you can directly call this function and call einsum yourself.\n\n Args:\n query: queries for calculating attention with shape of `[batch..., q_length,\n num_heads, qk_depth_per_head]`.\n key: keys for calculating attention with shape of `[batch..., kv_length,\n num_heads, qk_depth_per_head]`.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs and params)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key)`` and a ``dtype``\n keyword argument, and return a tuple of arrays with the promoted dtype.\n\n Returns:\n Output of shape `[batch..., num_heads, q_length, kv_length]`.\n """"""\n query, key = promote_dtype((query, key), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n\n assert query.ndim == key.ndim, 'q, k must have same rank.'\n assert query.shape[:-3] == key.shape[:-3], 'q, k batch dims must match.'\n assert query.shape[-2] == key.shape[-2], 'q, k num_heads must match.'\n assert query.shape[-1] == key.shape[-1], 'q, k depths must match.'\n\n # calculate attention matrix\n depth = query.shape[-1]\n query = query / jnp.sqrt(depth).astype(dtype)\n # attn weight shape is (batch..., num_heads, q_length, kv_length)\n attn_weights = jnp.einsum(\n '...qhd,...khd->...hqk', query, key, precision=precision\n )\n\n # apply attention bias: masking, dropout, proximity bias, etc.\n if bias is not None:\n attn_weights = attn_weights + bias\n # apply attention mask\n if mask is not None:\n big_neg = jnp.finfo(dtype).min\n attn_weights = jnp.where(mask, attn_weights, big_neg)\n\n # normalize the attention weights\n attn_weights = jax.nn.softmax(attn_weights).astype(dtype)\n\n if module:\n module.sow(nnx.Intermediate, 'attention_weights', attn_weights)\n\n # apply attention dropout\n if not deterministic and dropout_rate > 0.0:\n keep_prob = 1.0 - dropout_rate\n if broadcast_dropout:\n # dropout is broadcast across the batch + head dimensions\n dropout_shape = tuple([1] * (key.ndim - 2)) + attn_weights.shape[-2:]\n keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape) # type: ignore\n else:\n keep = random.bernoulli(dropout_rng, keep_prob, attn_weights.shape) # type: ignore\n multiplier = keep.astype(dtype) / jnp.asarray(keep_prob, dtype=dtype)\n attn_weights = attn_weights * multiplier\n\n return attn_weights\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n )\n\n # return weighted sum over values for each query position\n return jnp.einsum(\n '...hqk,...khd->...qhd', attn_weights, value, precision=precision\n )\n\n\nclass MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n\n\n# mask-making utility functions\n\n\ndef make_attention_mask(\n query_input: Array,\n key_input: Array,\n pairwise_fn: Callable[..., Any] = jnp.multiply,\n extra_batch_dims: int = 0,\n dtype: Dtype = jnp.float32,\n):\n """"""Mask-making helper for attention weights.\n\n In case of 1d inputs (i.e., `[batch..., len_q]`, `[batch..., len_kv]`, the\n attention weights will be `[batch..., heads, len_q, len_kv]` and this\n function will produce `[batch..., 1, len_q, len_kv]`.\n\n Args:\n query_input: a batched, flat input of query_length size\n key_input: a batched, flat input of key_length size\n pairwise_fn: broadcasting elementwise comparison function\n extra_batch_dims: number of extra batch dims to add singleton axes for, none\n by default\n dtype: mask return dtype\n\n Returns:\n A `[batch..., 1, len_q, len_kv]` shaped mask for 1d attention.\n """"""\n mask = pairwise_fn(\n jnp.expand_dims(query_input, axis=-1), jnp.expand_dims(key_input, axis=-2)\n )\n mask = jnp.expand_dims(mask, axis=-3)\n mask = jnp.expand_dims(mask, axis=tuple(range(extra_batch_dims)))\n return mask.astype(dtype)\n\n\ndef make_causal_mask(\n x: Array, extra_batch_dims: int = 0, dtype: Dtype = jnp.float32\n) -> Array:\n """"""Make a causal mask for self-attention.\n\n In case of 1d inputs (i.e., `[batch..., len]`, the self-attention weights\n will be `[batch..., heads, len, len]` and this function will produce a\n causal mask of shape `[batch..., 1, len, len]`.\n\n Args:\n x: input array of shape `[batch..., len]`\n extra_batch_dims: number of batch dims to add singleton axes for, none by\n default\n dtype: mask return dtype\n\n Returns:\n A `[batch..., 1, len, len]` shaped causal mask for 1d attention.\n """"""\n idxs = jnp.broadcast_to(jnp.arange(x.shape[-1], dtype=jnp.int32), x.shape)\n return make_attention_mask(\n idxs,\n idxs,\n jnp.greater_equal,\n extra_batch_dims=extra_batch_dims,\n dtype=dtype,\n )\n\n\ndef combine_masks(\n *masks: Array | None, dtype: Dtype = jnp.float32\n) -> Array | None:\n """"""Combine attention masks.\n\n Args:\n *masks: set of attention mask arguments to combine, some can be None.\n dtype: dtype for the returned mask.\n\n Returns:\n Combined mask, reduced by logical and, returns None if no masks given.\n """"""\n masks_list = [m for m in masks if m is not None]\n if not masks_list:\n return None\n assert all(\n map(lambda x: x.ndim == masks_list[0].ndim, masks_list)\n ), f'masks must have same rank: {tuple(map(lambda x: x.ndim, masks_list))}'\n mask, *other_masks = masks_list\n for other_mask in other_masks:\n mask = jnp.logical_and(mask, other_mask)\n return mask.astype(dtype)\n",python,tab +398,1181848,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +399,1182655,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12332,0,"",python,selection_command +400,1183822,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14092,0,"",python,selection_command +401,1196171,"TERMINAL",0,0,"n",,terminal_output +402,1196357,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(499)__call__()\r\n-> rngs = self.rngs\r\n",,terminal_output +403,1198918,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Attention core modules for Flax.""""""\n\nfrom __future__ import annotations\n\nimport functools\nfrom typing import Any\nfrom collections.abc import Callable\nimport math\n\nimport jax\nimport jax.numpy as jnp\nfrom jax import lax, random\n\nfrom flax import nnx\nfrom flax.nnx import rnglib\nfrom flax.nnx.module import Module, first_from\nfrom flax.nnx.nn import initializers\nfrom flax.nnx.nn import dtypes\nfrom flax.nnx.nn.linear import (\n LinearGeneral,\n default_kernel_init,\n)\nfrom flax.nnx.nn.normalization import LayerNorm\nfrom flax.typing import (\n Dtype,\n PromoteDtypeFn,\n Shape,\n Initializer,\n PrecisionLike,\n DotGeneralT,\n)\n\nArray = jax.Array\n\n\ndef dot_product_attention_weights(\n query: Array,\n key: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention weights given query and key.\n\n Used by :func:`dot_product_attention`, which is what you'll most likely use.\n But if you want access to the attention weights for introspection, then\n you can directly call this function and call einsum yourself.\n\n Args:\n query: queries for calculating attention with shape of `[batch..., q_length,\n num_heads, qk_depth_per_head]`.\n key: keys for calculating attention with shape of `[batch..., kv_length,\n num_heads, qk_depth_per_head]`.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs and params)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key)`` and a ``dtype``\n keyword argument, and return a tuple of arrays with the promoted dtype.\n\n Returns:\n Output of shape `[batch..., num_heads, q_length, kv_length]`.\n """"""\n query, key = promote_dtype((query, key), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n\n assert query.ndim == key.ndim, 'q, k must have same rank.'\n assert query.shape[:-3] == key.shape[:-3], 'q, k batch dims must match.'\n assert query.shape[-2] == key.shape[-2], 'q, k num_heads must match.'\n assert query.shape[-1] == key.shape[-1], 'q, k depths must match.'\n\n # calculate attention matrix\n depth = query.shape[-1]\n query = query / jnp.sqrt(depth).astype(dtype)\n # attn weight shape is (batch..., num_heads, q_length, kv_length)\n attn_weights = jnp.einsum(\n '...qhd,...khd->...hqk', query, key, precision=precision\n )\n\n # apply attention bias: masking, dropout, proximity bias, etc.\n if bias is not None:\n attn_weights = attn_weights + bias\n # apply attention mask\n if mask is not None:\n big_neg = jnp.finfo(dtype).min\n attn_weights = jnp.where(mask, attn_weights, big_neg)\n\n # normalize the attention weights\n attn_weights = jax.nn.softmax(attn_weights).astype(dtype)\n\n if module:\n module.sow(nnx.Intermediate, 'attention_weights', attn_weights)\n\n # apply attention dropout\n if not deterministic and dropout_rate > 0.0:\n keep_prob = 1.0 - dropout_rate\n if broadcast_dropout:\n # dropout is broadcast across the batch + head dimensions\n dropout_shape = tuple([1] * (key.ndim - 2)) + attn_weights.shape[-2:]\n keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape) # type: ignore\n else:\n keep = random.bernoulli(dropout_rng, keep_prob, attn_weights.shape) # type: ignore\n multiplier = keep.astype(dtype) / jnp.asarray(keep_prob, dtype=dtype)\n attn_weights = attn_weights * multiplier\n\n return attn_weights\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n )\n\n # return weighted sum over values for each query position\n return jnp.einsum(\n '...hqk,...khd->...qhd', attn_weights, value, precision=precision\n )\n\n\nclass MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n\n\n# mask-making utility functions\n\n\ndef make_attention_mask(\n query_input: Array,\n key_input: Array,\n pairwise_fn: Callable[..., Any] = jnp.multiply,\n extra_batch_dims: int = 0,\n dtype: Dtype = jnp.float32,\n):\n """"""Mask-making helper for attention weights.\n\n In case of 1d inputs (i.e., `[batch..., len_q]`, `[batch..., len_kv]`, the\n attention weights will be `[batch..., heads, len_q, len_kv]` and this\n function will produce `[batch..., 1, len_q, len_kv]`.\n\n Args:\n query_input: a batched, flat input of query_length size\n key_input: a batched, flat input of key_length size\n pairwise_fn: broadcasting elementwise comparison function\n extra_batch_dims: number of extra batch dims to add singleton axes for, none\n by default\n dtype: mask return dtype\n\n Returns:\n A `[batch..., 1, len_q, len_kv]` shaped mask for 1d attention.\n """"""\n mask = pairwise_fn(\n jnp.expand_dims(query_input, axis=-1), jnp.expand_dims(key_input, axis=-2)\n )\n mask = jnp.expand_dims(mask, axis=-3)\n mask = jnp.expand_dims(mask, axis=tuple(range(extra_batch_dims)))\n return mask.astype(dtype)\n\n\ndef make_causal_mask(\n x: Array, extra_batch_dims: int = 0, dtype: Dtype = jnp.float32\n) -> Array:\n """"""Make a causal mask for self-attention.\n\n In case of 1d inputs (i.e., `[batch..., len]`, the self-attention weights\n will be `[batch..., heads, len, len]` and this function will produce a\n causal mask of shape `[batch..., 1, len, len]`.\n\n Args:\n x: input array of shape `[batch..., len]`\n extra_batch_dims: number of batch dims to add singleton axes for, none by\n default\n dtype: mask return dtype\n\n Returns:\n A `[batch..., 1, len, len]` shaped causal mask for 1d attention.\n """"""\n idxs = jnp.broadcast_to(jnp.arange(x.shape[-1], dtype=jnp.int32), x.shape)\n return make_attention_mask(\n idxs,\n idxs,\n jnp.greater_equal,\n extra_batch_dims=extra_batch_dims,\n dtype=dtype,\n )\n\n\ndef combine_masks(\n *masks: Array | None, dtype: Dtype = jnp.float32\n) -> Array | None:\n """"""Combine attention masks.\n\n Args:\n *masks: set of attention mask arguments to combine, some can be None.\n dtype: dtype for the returned mask.\n\n Returns:\n Combined mask, reduced by logical and, returns None if no masks given.\n """"""\n masks_list = [m for m in masks if m is not None]\n if not masks_list:\n return None\n assert all(\n map(lambda x: x.ndim == masks_list[0].ndim, masks_list)\n ), f'masks must have same rank: {tuple(map(lambda x: x.ndim, masks_list))}'\n mask, *other_masks = masks_list\n for other_mask in other_masks:\n mask = jnp.logical_and(mask, other_mask)\n return mask.astype(dtype)\n",python,tab +404,1198918,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18951,0,"",python,selection_command +405,1203445,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +406,1222766,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +407,1222767,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16883,0,"",python,selection_command +408,1249472,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +409,1254275,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12408,0,"",python,selection_mouse +410,1254276,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12407,0,"",python,selection_command +411,1255075,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",10880,0,"",python,selection_command +412,1255663,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9543,0,"",python,selection_command +413,1255838,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8980,0,"",python,selection_command +414,1256134,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7678,0,"",python,selection_command +415,1256395,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6046,0,"",python,selection_command +416,1256569,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5038,0,"",python,selection_command +417,1256716,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",3956,0,"",python,selection_command +418,1256887,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",2250,0,"",python,selection_command +419,1256958,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",1210,0,"",python,selection_command +420,1257218,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",621,0,"",python,selection_command +421,1257242,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,selection_command +422,1259302,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",1231,0,"",python,selection_command +423,1259552,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,0,"",python,selection_command +424,1260353,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8650,0,"",python,selection_command +425,1261088,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12499,0,"",python,selection_command +426,1261602,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16885,0,"",python,selection_command +427,1262127,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23108,0,"",python,selection_command +428,1263635,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16885,0,"",python,selection_command +429,1263798,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12499,0,"",python,selection_command +430,1265043,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16885,0,"",python,selection_command +431,1265542,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23108,0,"",python,selection_command +432,1266706,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16885,0,"",python,selection_command +433,1267754,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19381,0,"",python,selection_command +434,1268973,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19376,0,"",python,selection_command +435,1269220,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19354,0,"",python,selection_command +436,1269269,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19329,0,"",python,selection_command +437,1269290,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19303,0,"",python,selection_command +438,1269317,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19293,0,"",python,selection_command +439,1269341,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19231,0,"",python,selection_command +440,1269373,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19148,0,"",python,selection_command +441,1269407,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19082,0,"",python,selection_command +442,1269440,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19056,0,"",python,selection_command +443,1269474,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19025,0,"",python,selection_command +444,1272096,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20279,0,"",python,selection_command +445,1272712,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,selection_command +446,1272774,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,0,"",python,selection_command +447,1273491,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20279,0,"",python,selection_command +448,1274008,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22252,0,"",python,selection_command +449,1274423,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,0,"",python,selection_command +450,1276224,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22252,0,"",python,selection_command +451,1276396,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20279,0,"",python,selection_command +452,1276802,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,0,"",python,selection_command +453,1278170,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12329,0,"",python,selection_command +454,1279798,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14061,0,"",python,selection_command +455,1280947,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16123,0,"",python,selection_command +456,1281611,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18636,0,"",python,selection_command +457,1285514,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18672,0,"",python,selection_command +458,1285757,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18753,0,"",python,selection_command +459,1285780,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18835,0,"",python,selection_command +460,1285838,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18869,0,"",python,selection_command +461,1285853,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18878,0,"",python,selection_command +462,1285889,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18891,0,"",python,selection_command +463,1285912,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18949,0,"",python,selection_command +464,1285946,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18959,0,"",python,selection_command +465,1285981,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18980,0,"",python,selection_command +466,1286011,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18995,0,"",python,selection_command +467,1286044,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19004,0,"",python,selection_command +468,1286078,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19029,0,"",python,selection_command +469,1286111,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19060,0,"",python,selection_command +470,1286146,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19086,0,"",python,selection_command +471,1286178,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19152,0,"",python,selection_command +472,1286213,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19235,0,"",python,selection_command +473,1286245,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19297,0,"",python,selection_command +474,1286279,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19307,0,"",python,selection_command +475,1286311,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19333,0,"",python,selection_command +476,1286345,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19358,0,"",python,selection_command +477,1286378,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19376,0,"",python,selection_command +478,1286411,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19385,0,"",python,selection_command +479,1286445,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19432,0,"",python,selection_command +480,1286478,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19456,0,"",python,selection_command +481,1286512,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19523,0,"",python,selection_command +482,1286544,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19571,0,"",python,selection_command +483,1286577,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19573,0,"",python,selection_command +484,1286612,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19582,0,"",python,selection_command +485,1286643,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19615,0,"",python,selection_command +486,1286677,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19644,0,"",python,selection_command +487,1286712,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19669,0,"",python,selection_command +488,1286746,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19678,0,"",python,selection_command +489,1286779,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19704,0,"",python,selection_command +490,1286811,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19771,0,"",python,selection_command +491,1286845,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19849,0,"",python,selection_command +492,1286938,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19771,0,"",python,selection_command +493,1287202,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19704,0,"",python,selection_command +494,1287235,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19678,0,"",python,selection_command +495,1287252,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19669,0,"",python,selection_command +496,1291053,"TERMINAL",0,0,"n",,terminal_output +497,1291352,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(501)__call__()\r\n-> if inputs_k is None:\r\n",,terminal_output +498,1293161,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(502)__call__()\r\n-> if inputs_v is not None:\r\n",,terminal_output +499,1294410,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(508)__call__()\r\n-> inputs_k = inputs_q\r\n",,terminal_output +500,1294722,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(509)__call__()\r\n-> if inputs_v is None:\r\n",,terminal_output +501,1295109,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(510)__call__()\r\n-> inputs_v = inputs_k\r\n",,terminal_output +502,1295625,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(512)__call__()\r\n-> if inputs_q.shape[-1] != self.in_features:\r\n",,terminal_output +503,1296109,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(518)__call__()\r\n-> query = self.query(inputs_q)\r\n",,terminal_output +504,1299609,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(519)__call__()\r\n-> key = self.key(inputs_k)\r\n",,terminal_output +505,1300204,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(520)__call__()\r\n-> value = self.value(inputs_v)\r\n",,terminal_output +506,1300724,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(522)__call__()\r\n-> if self.normalize_qk:\r\n",,terminal_output +507,1301497,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(531)__call__()\r\n-> decode = first_from(\r\n",,terminal_output +508,1302982,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20649,0,"",python,selection_command +509,1305061,"TERMINAL",0,0,"n",,terminal_output +510,1305499,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(532)__call__()\r\n-> decode,\r\n",,terminal_output +511,1310415,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21945,0,"",python,selection_command +512,1315528,"TERMINAL",0,0,"n",,terminal_output +513,1315758,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(533)__call__()\r\n-> self.decode,\r\n",,terminal_output +514,1322077,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(534)__call__()\r\n-> error_msg=""""""No `decode` argument was provided to MultiHeadAttention\r\n",,terminal_output +515,1322645,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(531)__call__()\r\n-> decode = first_from(\r\n",,terminal_output +516,1324160,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(538)__call__()\r\n-> if decode:\r\n",,terminal_output +517,1326099,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(583)__call__()\r\n-> self.dropout_rate > 0.0\r\n",,terminal_output +518,1337629,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21946,0,"",python,selection_command +519,1337868,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21955,0,"",python,selection_command +520,1337891,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21985,0,"",python,selection_command +521,1337917,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22042,0,"",python,selection_command +522,1337948,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22076,0,"",python,selection_command +523,1337981,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22099,0,"",python,selection_command +524,1338016,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22127,0,"",python,selection_command +525,1338048,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22211,0,"",python,selection_command +526,1338082,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22286,0,"",python,selection_command +527,1338115,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22294,0,"",python,selection_command +528,1338149,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22322,0,"",python,selection_command +529,1338189,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22347,0,"",python,selection_command +530,1338217,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22375,0,"",python,selection_command +531,1338249,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22436,0,"",python,selection_command +532,1338284,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22511,0,"",python,selection_command +533,1338327,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22523,0,"",python,selection_command +534,1338351,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22560,0,"",python,selection_command +535,1338382,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22572,0,"",python,selection_command +536,1338427,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22599,0,"",python,selection_command +537,1338449,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22609,0,"",python,selection_command +538,1338484,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22636,0,"",python,selection_command +539,1338517,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22661,0,"",python,selection_command +540,1338593,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22636,0,"",python,selection_command +541,1338861,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22609,0,"",python,selection_command +542,1338879,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22599,0,"",python,selection_command +543,1338912,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22572,0,"",python,selection_command +544,1338943,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22560,0,"",python,selection_command +545,1338981,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22523,0,"",python,selection_command +546,1339014,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22511,0,"",python,selection_command +547,1339049,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22436,0,"",python,selection_command +548,1339099,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22375,0,"",python,selection_command +549,1339139,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22436,0,"",python,selection_command +550,1339399,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22511,0,"",python,selection_command +551,1339423,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22523,0,"",python,selection_command +552,1339455,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22560,0,"",python,selection_command +553,1339489,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22572,0,"",python,selection_command +554,1339515,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22599,0,"",python,selection_command +555,1339550,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22609,0,"",python,selection_command +556,1339582,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22636,0,"",python,selection_command +557,1339615,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22661,0,"",python,selection_command +558,1341041,"TERMINAL",0,0,"n",,terminal_output +559,1341264,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(601)__call__()\r\n-> deterministic = True\r\n",,terminal_output +560,1341643,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(602)__call__()\r\n-> dropout_rng = None\r\n",,terminal_output +561,1342810,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(605)__call__()\r\n-> x = self.attention_fn(\r\n",,terminal_output +562,1365570,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22662,0,"",python,selection_command +563,1365735,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22684,0,"",python,selection_command +564,1372136,"TERMINAL",0,0,"s",,terminal_output +565,1372299,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(606)__call__()\r\n-> query,\r\n",,terminal_output +566,1375138,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(607)__call__()\r\n-> key,\r\n",,terminal_output +567,1375589,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(608)__call__()\r\n-> value,\r\n",,terminal_output +568,1376006,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(609)__call__()\r\n-> mask=mask,\r\n",,terminal_output +569,1377925,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(610)__call__()\r\n-> dropout_rng=dropout_rng,\r\n",,terminal_output +570,1378351,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(611)__call__()\r\n-> dropout_rate=self.dropout_rate,\r\n",,terminal_output +571,1378711,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(612)__call__()\r\n-> broadcast_dropout=self.broadcast_dropout,\r\n",,terminal_output +572,1380138,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(613)__call__()\r\n-> deterministic=deterministic,\r\n",,terminal_output +573,1380524,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(614)__call__()\r\n-> dtype=self.dtype,\r\n",,terminal_output +574,1380924,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(615)__call__()\r\n-> precision=self.precision,\r\n",,terminal_output +575,1381294,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(616)__call__()\r\n-> module=self if sow_weights else None,\r\n",,terminal_output +576,1382843,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(605)__call__()\r\n-> x = self.attention_fn(\r\n",,terminal_output +577,1385260,"TERMINAL",0,0,"s",,terminal_output +578,1385579,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(289)attention_fn()\r\n-> def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\r\n",,terminal_output +579,1390600,"utils/nn.py",0,0,"",python,tab +580,1393396,"utils/nn.py",2092,0,"",python,selection_command +581,1394337,"utils/nn.py",2119,0,"",python,selection_command +582,1395377,"utils/nn.py",8241,0,"",python,selection_command +583,1396585,"utils/nn.py",8329,0,"",python,selection_command +584,1396840,"utils/nn.py",8337,0,"",python,selection_command +585,1397641,"utils/nn.py",8404,0,"",python,selection_command +586,1397887,"utils/nn.py",8409,0,"",python,selection_command +587,1397906,"utils/nn.py",8505,0,"",python,selection_command +588,1397938,"utils/nn.py",8580,0,"",python,selection_command +589,1397963,"utils/nn.py",8585,0,"",python,selection_command +590,1397996,"utils/nn.py",8670,0,"",python,selection_command +591,1398033,"utils/nn.py",8768,0,"",python,selection_command +592,1398911,"utils/nn.py",8808,0,"",python,selection_command +593,1399076,"utils/nn.py",8812,0,"",python,selection_command +594,1399213,"utils/nn.py",8817,0,"",python,selection_command +595,1401533,"utils/nn.py",10065,0,"",python,selection_command +596,1402069,"utils/nn.py",10783,0,"",python,selection_command +597,1402931,"utils/nn.py",10759,0,"",python,selection_command +598,1403182,"utils/nn.py",10758,0,"",python,selection_command +599,1403209,"utils/nn.py",10679,0,"",python,selection_command +600,1403243,"utils/nn.py",10669,0,"",python,selection_command +601,1403274,"utils/nn.py",10636,0,"",python,selection_command +602,1403309,"utils/nn.py",10593,0,"",python,selection_command +603,1403340,"utils/nn.py",10567,0,"",python,selection_command +604,1403374,"utils/nn.py",10541,0,"",python,selection_command +605,1403407,"utils/nn.py",10513,0,"",python,selection_command +606,1403440,"utils/nn.py",10489,0,"",python,selection_command +607,1403474,"utils/nn.py",10461,0,"",python,selection_command +608,1403507,"utils/nn.py",10411,0,"",python,selection_command +609,1403541,"utils/nn.py",10341,0,"",python,selection_command +610,1403574,"utils/nn.py",10340,0,"",python,selection_command +611,1403608,"utils/nn.py",10269,0,"",python,selection_command +612,1403640,"utils/nn.py",10268,0,"",python,selection_command +613,1403676,"utils/nn.py",10182,0,"",python,selection_command +614,1403707,"utils/nn.py",10172,0,"",python,selection_command +615,1403741,"utils/nn.py",10086,0,"",python,selection_command +616,1403774,"utils/nn.py",10066,0,"",python,selection_command +617,1403807,"utils/nn.py",10065,0,"",python,selection_command +618,1403841,"utils/nn.py",9989,0,"",python,selection_command +619,1403874,"utils/nn.py",9913,0,"",python,selection_command +620,1403907,"utils/nn.py",9828,0,"",python,selection_command +621,1403941,"utils/nn.py",9827,0,"",python,selection_command +622,1403974,"utils/nn.py",9784,0,"",python,selection_command +623,1404008,"utils/nn.py",9745,0,"",python,selection_command +624,1404042,"utils/nn.py",9702,0,"",python,selection_command +625,1404827,"TERMINAL",0,0,"n",,terminal_output +626,1405085,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(290)attention_fn()\r\n-> implementation = ""cudnn"" if use_flash_attention else None\r\n",,terminal_output +627,1409198,"TERMINAL",0,0,"q",,terminal_output +628,1409264,"TERMINAL",0,0,"u",,terminal_output +629,1409342,"TERMINAL",0,0,"e",,terminal_output +630,1409412,"TERMINAL",0,0,"r",,terminal_output +631,1409479,"TERMINAL",0,0,"y",,terminal_output +632,1409705,"TERMINAL",0,0,".",,terminal_output +633,1409849,"TERMINAL",0,0,"s",,terminal_output +634,1409929,"TERMINAL",0,0,"h",,terminal_output +635,1410033,"TERMINAL",0,0,"a",,terminal_output +636,1410112,"TERMINAL",0,0,"p",,terminal_output +637,1410317,"TERMINAL",0,0,"e",,terminal_output +638,1410524,"TERMINAL",0,0,"\r\n(Pdb) (1, 2, 61, 8, 64)\r\n",,terminal_output +639,1419809,"utils/nn.py",9701,0,"",python,selection_command +640,1420059,"utils/nn.py",9648,0,"",python,selection_command +641,1420091,"utils/nn.py",9589,0,"",python,selection_command +642,1420118,"utils/nn.py",9550,0,"",python,selection_command +643,1420152,"utils/nn.py",9549,0,"",python,selection_command +644,1420185,"utils/nn.py",9506,0,"",python,selection_command +645,1420218,"utils/nn.py",9469,0,"",python,selection_command +646,1420252,"utils/nn.py",9468,0,"",python,selection_command +647,1420287,"utils/nn.py",9402,0,"",python,selection_command +648,1420318,"utils/nn.py",9388,0,"",python,selection_command +649,1420353,"utils/nn.py",9307,0,"",python,selection_command +650,1420386,"utils/nn.py",9270,0,"",python,selection_command +651,1420420,"utils/nn.py",9223,0,"",python,selection_command +652,1420452,"utils/nn.py",9141,0,"",python,selection_command +653,1420491,"utils/nn.py",9140,0,"",python,selection_command +654,1420520,"utils/nn.py",9069,0,"",python,selection_command +655,1420558,"utils/nn.py",9048,0,"",python,selection_command +656,1420585,"utils/nn.py",9047,0,"",python,selection_command +657,1420617,"utils/nn.py",8980,0,"",python,selection_command +658,1420650,"utils/nn.py",8953,0,"",python,selection_command +659,1420685,"utils/nn.py",8952,0,"",python,selection_command +660,1420782,"utils/nn.py",8886,0,"",python,selection_command +661,1420952,"utils/nn.py",8813,0,"",python,selection_command +662,1421158,"utils/nn.py",8885,0,"\n ",python,content +663,1422076,"utils/nn.py",8894,0,"#",python,content +664,1422077,"utils/nn.py",8895,0,"",python,selection_keyboard +665,1422124,"utils/nn.py",8895,0," ",python,content +666,1422125,"utils/nn.py",8896,0,"",python,selection_keyboard +667,1422732,"utils/nn.py",8896,0,"q",python,content +668,1422732,"utils/nn.py",8897,0,"",python,selection_keyboard +669,1422831,"utils/nn.py",8897,0,"u",python,content +670,1422831,"utils/nn.py",8898,0,"",python,selection_keyboard +671,1422897,"utils/nn.py",8898,0,"e",python,content +672,1422897,"utils/nn.py",8899,0,"",python,selection_keyboard +673,1422963,"utils/nn.py",8899,0,"r",python,content +674,1422963,"utils/nn.py",8900,0,"",python,selection_keyboard +675,1423146,"utils/nn.py",8900,0,"y",python,content +676,1423146,"utils/nn.py",8901,0,"",python,selection_keyboard +677,1423431,"utils/nn.py",8901,0,".",python,content +678,1423432,"utils/nn.py",8902,0,"",python,selection_keyboard +679,1423577,"utils/nn.py",8902,0,"s",python,content +680,1423636,"utils/nn.py",8903,0,"h",python,content +681,1423636,"utils/nn.py",8904,0,"",python,selection_keyboard +682,1423695,"utils/nn.py",8904,0,"a",python,content +683,1423695,"utils/nn.py",8905,0,"",python,selection_keyboard +684,1423782,"utils/nn.py",8905,0,"p",python,content +685,1423783,"utils/nn.py",8906,0,"",python,selection_keyboard +686,1423880,"utils/nn.py",8906,0,"e",python,content +687,1423880,"utils/nn.py",8907,0,"",python,selection_keyboard +688,1424838,"utils/nn.py",8907,0," ",python,content +689,1424839,"utils/nn.py",8908,0,"",python,selection_keyboard +690,1425546,"utils/nn.py",8908,0,"()",python,content +691,1425547,"utils/nn.py",8909,0,"",python,selection_keyboard +692,1426734,"utils/nn.py",8909,0,"1",python,content +693,1426815,"utils/nn.py",8910,0,",",python,content +694,1426815,"utils/nn.py",8911,0,"",python,selection_keyboard +695,1427000,"utils/nn.py",8911,0," ",python,content +696,1427000,"utils/nn.py",8912,0,"",python,selection_keyboard +697,1427185,"utils/nn.py",8912,0,"2",python,content +698,1427185,"utils/nn.py",8913,0,"",python,selection_keyboard +699,1427316,"utils/nn.py",8913,0,",",python,content +700,1427317,"utils/nn.py",8914,0,"",python,selection_keyboard +701,1427469,"utils/nn.py",8914,0," ",python,content +702,1427470,"utils/nn.py",8915,0,"",python,selection_keyboard +703,1427986,"utils/nn.py",8915,0,"6",python,content +704,1427987,"utils/nn.py",8916,0,"",python,selection_keyboard +705,1428173,"utils/nn.py",8916,0,"1",python,content +706,1428301,"utils/nn.py",8917,0,",",python,content +707,1428301,"utils/nn.py",8918,0,"",python,selection_keyboard +708,1428486,"utils/nn.py",8918,0," ",python,content +709,1428487,"utils/nn.py",8919,0,"",python,selection_keyboard +710,1428792,"utils/nn.py",8919,0,"8",python,content +711,1428793,"utils/nn.py",8920,0,"",python,selection_keyboard +712,1429005,"utils/nn.py",8920,0,",",python,content +713,1429005,"utils/nn.py",8921,0,"",python,selection_keyboard +714,1429082,"utils/nn.py",8921,0," ",python,content +715,1429082,"utils/nn.py",8922,0,"",python,selection_keyboard +716,1430249,"utils/nn.py",8922,0,"6",python,content +717,1430249,"utils/nn.py",8923,0,"",python,selection_keyboard +718,1430304,"utils/nn.py",8923,0,"4",python,content +719,1430304,"utils/nn.py",8924,0,"",python,selection_keyboard +720,1430547,"utils/nn.py",8923,0,"",python,selection_command +721,1438542,"utils/nn.py",8922,0,"",python,selection_command +722,1438689,"utils/nn.py",8920,0,"",python,selection_command +723,1438825,"utils/nn.py",8919,0,"",python,selection_command +724,1438961,"utils/nn.py",8917,0,"",python,selection_command +725,1439254,"utils/nn.py",8915,0,"",python,selection_command +726,1453074,"TERMINAL",0,0,"k",,terminal_output +727,1453503,"TERMINAL",0,0,"e",,terminal_output +728,1453599,"TERMINAL",0,0,"y",,terminal_output +729,1453974,"TERMINAL",0,0,".",,terminal_output +730,1454136,"TERMINAL",0,0,"s",,terminal_output +731,1454213,"TERMINAL",0,0,"h",,terminal_output +732,1454301,"TERMINAL",0,0,"a",,terminal_output +733,1454383,"TERMINAL",0,0,"p",,terminal_output +734,1454470,"TERMINAL",0,0,"e",,terminal_output +735,1454589,"TERMINAL",0,0,"\r\n(Pdb) (1, 2, 61, 8, 64)\r\n",,terminal_output +736,1456932,"TERMINAL",0,0,"va",,terminal_output +737,1457017,"TERMINAL",0,0,"l",,terminal_output +738,1457136,"TERMINAL",0,0,"ue",,terminal_output +739,1457267,"TERMINAL",0,0,".",,terminal_output +740,1457368,"TERMINAL",0,0,"s",,terminal_output +741,1457438,"TERMINAL",0,0,"h",,terminal_output +742,1457521,"TERMINAL",0,0,"a",,terminal_output +743,1457600,"TERMINAL",0,0,"p",,terminal_output +744,1457687,"TERMINAL",0,0,"e",,terminal_output +745,1457787,"TERMINAL",0,0,"\r\n(Pdb) (1, 2, 61, 8, 64)\r\n",,terminal_output +746,1471934,"utils/nn.py",8913,0,"",python,selection_command +747,1472054,"utils/nn.py",8912,0,"",python,selection_command +748,1472195,"utils/nn.py",8910,0,"",python,selection_command +749,1472312,"utils/nn.py",8909,0,"",python,selection_command +750,1472457,"utils/nn.py",8908,0,"",python,selection_command +751,1472578,"utils/nn.py",8902,0,"",python,selection_command +752,1472726,"utils/nn.py",8901,0,"",python,selection_command +753,1472936,"utils/nn.py",8896,0,"",python,selection_command +754,1473273,"utils/nn.py",8896,5,"",python,content +755,1473759,"utils/nn.py",8896,0,"q",python,content +756,1473759,"utils/nn.py",8897,0,"",python,selection_keyboard +757,1473886,"utils/nn.py",8897,0,"k",python,content +758,1473886,"utils/nn.py",8898,0,"",python,selection_keyboard +759,1473964,"utils/nn.py",8898,0,"v",python,content +760,1473964,"utils/nn.py",8899,0,"",python,selection_keyboard +761,1474183,"utils/nn.py",8898,0,"",python,selection_command +762,1478306,"utils/nn.py",8825,0,"",python,selection_command +763,1478793,"utils/nn.py",10808,0,"",python,selection_command +764,1480311,"utils/nn.py",9694,0,"",python,selection_command +765,1481212,"utils/nn.py",8670,0,"",python,selection_keyboard +766,1481544,"utils/nn.py",8585,0,"",python,selection_command +767,1481803,"utils/nn.py",8580,0,"",python,selection_command +768,1481826,"utils/nn.py",8505,0,"",python,selection_command +769,1481850,"utils/nn.py",8409,0,"",python,selection_command +770,1481883,"utils/nn.py",8404,0,"",python,selection_command +771,1482027,"utils/nn.py",8337,0,"",python,selection_command +772,1482176,"utils/nn.py",8329,0,"",python,selection_command +773,1482347,"utils/nn.py",8241,0,"",python,selection_command +774,1483871,"utils/nn.py",2877,26,"_create_flash_attention_fn",python,selection_command +775,1484098,"utils/nn.py",2902,0,"",python,selection_command +776,1484283,"utils/nn.py",2826,0,"",python,selection_command +777,1484535,"utils/nn.py",2784,0,"",python,selection_command +778,1484563,"utils/nn.py",2745,0,"",python,selection_command +779,1484591,"utils/nn.py",2710,0,"",python,selection_command +780,1484627,"utils/nn.py",2676,0,"",python,selection_command +781,1484658,"utils/nn.py",2638,0,"",python,selection_command +782,1484693,"utils/nn.py",2580,0,"",python,selection_command +783,1484725,"utils/nn.py",2570,0,"",python,selection_command +784,1484758,"utils/nn.py",2547,0,"",python,selection_command +785,1484791,"utils/nn.py",2517,0,"",python,selection_command +786,1484826,"utils/nn.py",2475,0,"",python,selection_command +787,1484858,"utils/nn.py",2440,0,"",python,selection_command +788,1484892,"utils/nn.py",2396,0,"",python,selection_command +789,1484925,"utils/nn.py",2335,0,"",python,selection_command +790,1484958,"utils/nn.py",2330,0,"",python,selection_command +791,1484992,"utils/nn.py",2324,0,"",python,selection_command +792,1485025,"utils/nn.py",2298,0,"",python,selection_command +793,1485058,"utils/nn.py",2264,0,"",python,selection_command +794,1485094,"utils/nn.py",2241,0,"",python,selection_command +795,1485125,"utils/nn.py",2226,0,"",python,selection_command +796,1485159,"utils/nn.py",2179,0,"",python,selection_command +797,1485191,"utils/nn.py",2137,0,"",python,selection_command +798,1485225,"utils/nn.py",2084,0,"",python,selection_command +799,1485393,"utils/nn.py",2054,0,"",python,selection_command +800,1485576,"utils/nn.py",2062,0,"",python,selection_command +801,1485661,"utils/nn.py",2092,0,"",python,selection_command +802,1485812,"utils/nn.py",2104,0,"",python,selection_command +803,1485950,"utils/nn.py",2105,0,"",python,selection_command +804,1487800,"TERMINAL",0,0,"c",,terminal_output +805,1487907,"TERMINAL",0,0,"\r\n",,terminal_output +806,1487986,"TERMINAL",0,0,"(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +807,1489612,"TERMINAL",0,0,"c\r\n",,terminal_output +808,1489665,"TERMINAL",0,0,"(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +809,1490183,"TERMINAL",0,0,"c",,terminal_output +810,1490278,"TERMINAL",0,0,"\r\n",,terminal_output +811,1490392,"TERMINAL",0,0,"2025-07-27 09:43:30.192557: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +812,1490698,"TERMINAL",0,0,"\r\n",,terminal_output +813,1491891,"TERMINAL",0,0,"2025-07-27 09:43:31.695121: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +814,1492414,"TERMINAL",0,0,"(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +815,1494821,"TERMINAL",0,0,"c",,terminal_output +816,1495557,"TERMINAL",0,0,"\r\n",,terminal_output +817,1498096,"TERMINAL",0,0,"(Pdb) (Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +818,1498489,"TERMINAL",0,0,"\r\n",,terminal_output +819,1502156,"TERMINAL",0,0,"d",,terminal_output +820,1503926,"TERMINAL",0,0,"\r\n(Pdb) (Pdb) *** Newest frame\r\n",,terminal_output +821,1505583,"TERMINAL",0,0,"c",,terminal_output +822,1505844,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +823,1506813,"TERMINAL",0,0,"d",,terminal_output +824,1506930,"TERMINAL",0,0,"\r\n(Pdb) *** Newest frame\r\n",,terminal_output +825,1508199,"TERMINAL",0,0,"c",,terminal_output +826,1508327,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +827,1509651,"TERMINAL",0,0,"d",,terminal_output +828,1509833,"TERMINAL",0,0,"e",,terminal_output +829,1509980,"TERMINAL",0,0,"le",,terminal_output +830,1510051,"TERMINAL",0,0,"t",,terminal_output +831,1510172,"TERMINAL",0,0,"e",,terminal_output +832,1510349,"TERMINAL",0,0,"\r\n(Pdb) *** NameError: name 'delete' is not defined\r\n",,terminal_output +833,1532335,"TERMINAL",0,0,"c",,terminal_output +834,1532453,"TERMINAL",0,0,"\r\n",,terminal_output +835,1532892,"TERMINAL",0,0,"2025-07-27 09:44:12.692617: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +836,1533516,"TERMINAL",0,0,"(Pdb) > /fast/home/franz.srambical/jafar/models/dynamics_causal.py(73)__call__()\r\n-> vid_embed += act_embed\r\n",,terminal_output +837,1533698,"TERMINAL",0,0,"c",,terminal_output +838,1533801,"TERMINAL",0,0,"l",,terminal_output +839,1533929,"TERMINAL",0,0,"ea",,terminal_output +840,1534002,"TERMINAL",0,0,"r",,terminal_output +841,1534100,"TERMINAL",0,0,"\r\n",,terminal_output +842,1544494,"TERMINAL",0,0,"\r\n",,terminal_output +843,1544995,"TERMINAL",0,0,"d",,terminal_output +844,1545106,"TERMINAL",0,0,"i",,terminal_output +845,1545182,"TERMINAL",0,0,"s",,terminal_output +846,1545292,"TERMINAL",0,0,"a",,terminal_output +847,1545440,"TERMINAL",0,0,"b",,terminal_output +848,1545527,"TERMINAL",0,0,"l",,terminal_output +849,1545682,"TERMINAL",0,0,"e",,terminal_output +850,1545813,"TERMINAL",0,0,"\r\n",,terminal_output +851,1546800,"TERMINAL",0,0,"c",,terminal_output +852,1546913,"TERMINAL",0,0,"\r\n(Pdb) Clear all breaks? (Pdb) (Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +853,1550184,"TERMINAL",0,0,"c",,terminal_output +854,1550291,"TERMINAL",0,0,"l",,terminal_output +855,1550397,"TERMINAL",0,0,"e",,terminal_output +856,1550452,"TERMINAL",0,0,"a",,terminal_output +857,1550737,"TERMINAL",0,0,"r",,terminal_output +858,1550900,"TERMINAL",0,0,"\r\n",,terminal_output +859,1553769,"TERMINAL",0,0,"\r\n",,terminal_output +860,1566414,"TERMINAL",0,0,"d",,terminal_output +861,1566545,"TERMINAL",0,0,"is",,terminal_output +862,1566715,"TERMINAL",0,0,"a",,terminal_output +863,1566829,"TERMINAL",0,0,"b",,terminal_output +864,1566897,"TERMINAL",0,0,"l",,terminal_output +865,1567000,"TERMINAL",0,0,"e",,terminal_output +866,1567111,"TERMINAL",0,0,"\r\n",,terminal_output +867,1570116,"TERMINAL",0,0,"\r\n",,terminal_output +868,1574098,"TERMINAL",0,0,"c",,terminal_output +869,1574186,"TERMINAL",0,0,"\r\n",,terminal_output +870,1574266,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 77, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 228, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 133, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 321, in attention_fn\r\n mask_4d = (\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 300, in _fuse_masks\r\n mask_bool = mask.astype(jnp.bool_)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4352, in pad\r\n pad_width = _broadcast_to_pairs(pad_width, np.ndim(array), ""pad_width"")\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 3937, in _broadcast_to_pairs\r\n raise ValueError(f""jnp.pad: {name} with {nd=} has unsupported shape {nvals.shape}. ""\r\nValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n",,terminal_output +871,1575053,"TERMINAL",0,0,"(Pdb) Clear all breaks? (Pdb) (Pdb) (Pdb) ",,terminal_output +872,1575280,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +873,1588964,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"import math\nfrom typing import Tuple, Callable\n\nfrom flax import nnx\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nnx.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n def __init__(self, d_model: int, max_len: int = 5000):\n self.d_model = d_model\n self.max_len = max_len\n\n pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n pe = pe.at[:, 0::2].set(jnp.sin(position * div_term))\n pe = pe.at[:, 1::2].set(jnp.cos(position * div_term))\n self.pe = nnx.Variable(pe)\n\n def __call__(self, x: jax.Array) -> jax.Array:\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nnx.Module):\n def __init__(\n self,\n dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.dim = dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.spatial_pos_enc = PositionalEncoding(self.dim)\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention,\n is_causal=self.spatial_causal,\n ),\n rngs=rngs,\n # decode=self.decode,\n decode=False,\n )\n\n self.temporal_pos_enc = PositionalEncoding(self.dim)\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = self.spatial_pos_enc(x)\n z = self.spatial_norm(z)\n breakpoint()\n z = self.spatial_attention(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = self.temporal_pos_enc(x)\n z = self.temporal_norm(z)\n z = self.temporal_attention(z)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = self.ffn_norm(x)\n z = self.ffn_dense1(z)\n z = jax.nn.gelu(z)\n z = self.ffn_dense2(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nnx.Module):\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n self.blocks: list[STBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=self.spatial_causal,\n decode=self.decode,\n rngs=rngs,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x: jax.Array) -> jax.Array:\n x = self.input_norm1(x)\n x = self.input_dense(x)\n x = self.input_norm2(x)\n\n for block in self.blocks:\n x = block(x)\n\n x = self.output_dense(x)\n return x # (B, T, E)\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n def __init__(\n self, latent_dim: int, num_latents: int, dropout: float, rngs: nnx.Rngs\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n normalized_codebook = normalize(self.codebook.value)\n distance = -jnp.matmul(x, normalized_codebook.T)\n if training:\n distance = self.drop(distance)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array) -> jax.Array:\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n # qkv.shape (1, 2, 61, 8, 64)\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n # NOTE: jax.nn.dot_product_attention does not support dropout\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +874,1588964,"/fast/home/franz.srambical/jafar/utils/nn.py",4041,0,"",python,selection_command +875,1613227,"/fast/home/franz.srambical/jafar/utils/nn.py",4080,0,"",python,selection_command +876,1614195,"/fast/home/franz.srambical/jafar/utils/nn.py",10821,0,"",python,selection_command +877,1614958,"/fast/home/franz.srambical/jafar/utils/nn.py",9694,0,"",python,selection_command +878,1615887,"/fast/home/franz.srambical/jafar/utils/nn.py",9635,0,"",python,selection_command +879,1616129,"/fast/home/franz.srambical/jafar/utils/nn.py",9596,0,"",python,selection_command +880,1616153,"/fast/home/franz.srambical/jafar/utils/nn.py",9587,0,"",python,selection_command +881,1616179,"/fast/home/franz.srambical/jafar/utils/nn.py",9552,0,"",python,selection_command +882,1616212,"/fast/home/franz.srambical/jafar/utils/nn.py",9515,0,"",python,selection_command +883,1616244,"/fast/home/franz.srambical/jafar/utils/nn.py",9506,0,"",python,selection_command +884,1616284,"/fast/home/franz.srambical/jafar/utils/nn.py",9448,0,"",python,selection_command +885,1616314,"/fast/home/franz.srambical/jafar/utils/nn.py",9434,0,"",python,selection_command +886,1616346,"/fast/home/franz.srambical/jafar/utils/nn.py",9353,0,"",python,selection_command +887,1616377,"/fast/home/franz.srambical/jafar/utils/nn.py",9316,0,"",python,selection_command +888,1616415,"/fast/home/franz.srambical/jafar/utils/nn.py",9269,0,"",python,selection_command +889,1616448,"/fast/home/franz.srambical/jafar/utils/nn.py",9187,0,"",python,selection_command +890,1616487,"/fast/home/franz.srambical/jafar/utils/nn.py",9178,0,"",python,selection_command +891,1616525,"/fast/home/franz.srambical/jafar/utils/nn.py",9115,0,"",python,selection_command +892,1616556,"/fast/home/franz.srambical/jafar/utils/nn.py",9094,0,"",python,selection_command +893,1616587,"/fast/home/franz.srambical/jafar/utils/nn.py",9085,0,"",python,selection_command +894,1616611,"/fast/home/franz.srambical/jafar/utils/nn.py",9026,0,"",python,selection_command +895,1616648,"/fast/home/franz.srambical/jafar/utils/nn.py",8999,0,"",python,selection_command +896,1616681,"/fast/home/franz.srambical/jafar/utils/nn.py",8990,0,"",python,selection_command +897,1616713,"/fast/home/franz.srambical/jafar/utils/nn.py",8932,0,"",python,selection_command +898,1616826,"/fast/home/franz.srambical/jafar/utils/nn.py",8894,0,"",python,selection_command +899,1622686,"/fast/home/franz.srambical/jafar/utils/nn.py",8821,0,"",python,selection_command +900,1622933,"/fast/home/franz.srambical/jafar/utils/nn.py",8812,0,"",python,selection_command +901,1622959,"/fast/home/franz.srambical/jafar/utils/nn.py",8810,0,"",python,selection_command +902,1622985,"/fast/home/franz.srambical/jafar/utils/nn.py",8772,0,"",python,selection_command +903,1623017,"/fast/home/franz.srambical/jafar/utils/nn.py",8674,0,"",python,selection_command +904,1623051,"/fast/home/franz.srambical/jafar/utils/nn.py",8589,0,"",python,selection_command +905,1623085,"/fast/home/franz.srambical/jafar/utils/nn.py",8580,0,"",python,selection_command +906,1623116,"/fast/home/franz.srambical/jafar/utils/nn.py",8509,0,"",python,selection_command +907,1623152,"/fast/home/franz.srambical/jafar/utils/nn.py",8413,0,"",python,selection_command +908,1623192,"/fast/home/franz.srambical/jafar/utils/nn.py",8404,0,"",python,selection_command +909,1623328,"/fast/home/franz.srambical/jafar/utils/nn.py",8341,0,"",python,selection_command +910,1623484,"/fast/home/franz.srambical/jafar/utils/nn.py",8331,0,"",python,selection_command +911,1623634,"/fast/home/franz.srambical/jafar/utils/nn.py",8245,0,"",python,selection_command +912,1623828,"/fast/home/franz.srambical/jafar/utils/nn.py",8236,0,"",python,selection_command +913,1623996,"/fast/home/franz.srambical/jafar/utils/nn.py",8245,0,"",python,selection_command +914,1627153,"/fast/home/franz.srambical/jafar/utils/nn.py",2877,26,"_create_flash_attention_fn",python,selection_command +915,1627262,"/fast/home/franz.srambical/jafar/utils/nn.py",2902,0,"",python,selection_command +916,1627649,"/fast/home/franz.srambical/jafar/utils/nn.py",2830,0,"",python,selection_command +917,1627890,"/fast/home/franz.srambical/jafar/utils/nn.py",2788,0,"",python,selection_command +918,1627913,"/fast/home/franz.srambical/jafar/utils/nn.py",2749,0,"",python,selection_command +919,1627938,"/fast/home/franz.srambical/jafar/utils/nn.py",2714,0,"",python,selection_command +920,1627972,"/fast/home/franz.srambical/jafar/utils/nn.py",2680,0,"",python,selection_command +921,1628011,"/fast/home/franz.srambical/jafar/utils/nn.py",2642,0,"",python,selection_command +922,1628153,"/fast/home/franz.srambical/jafar/utils/nn.py",2584,0,"",python,selection_command +923,1628589,"/fast/home/franz.srambical/jafar/utils/nn.py",2588,0,"",python,selection_command +924,1628722,"/fast/home/franz.srambical/jafar/utils/nn.py",2589,0,"",python,selection_command +925,1629080,"/fast/home/franz.srambical/jafar/utils/nn.py",4058,0,"",python,selection_command +926,1629615,"/fast/home/franz.srambical/jafar/utils/nn.py",4024,0,"",python,selection_command +927,1629875,"/fast/home/franz.srambical/jafar/utils/nn.py",3987,0,"",python,selection_command +928,1629899,"/fast/home/franz.srambical/jafar/utils/nn.py",3958,0,"",python,selection_command +929,1629930,"/fast/home/franz.srambical/jafar/utils/nn.py",3921,0,"",python,selection_command +930,1629965,"/fast/home/franz.srambical/jafar/utils/nn.py",3903,0,"",python,selection_command +931,1629999,"/fast/home/franz.srambical/jafar/utils/nn.py",3901,0,"",python,selection_command +932,1630332,"/fast/home/franz.srambical/jafar/utils/nn.py",3864,0,"",python,selection_command +933,1633640,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +934,1633896,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +935,1645569,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +936,1650416,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +937,1652704,"TERMINAL",0,0,"z",,terminal_output +938,1652817,"TERMINAL",0,0,".",,terminal_output +939,1652934,"TERMINAL",0,0,"s",,terminal_output +940,1653032,"TERMINAL",0,0,"h",,terminal_output +941,1653117,"TERMINAL",0,0,"a",,terminal_output +942,1653184,"TERMINAL",0,0,"p",,terminal_output +943,1653354,"TERMINAL",0,0,"e",,terminal_output +944,1653462,"TERMINAL",0,0,"\r\n(Pdb) (1, 2, 61, 512)\r\n",,terminal_output +945,1656528,"/fast/home/franz.srambical/jafar/utils/nn.py",3846,0,"\n ",python,content +946,1656922,"/fast/home/franz.srambical/jafar/utils/nn.py",3855,0,"#",python,content +947,1656922,"/fast/home/franz.srambical/jafar/utils/nn.py",3856,0,"",python,selection_keyboard +948,1657005,"/fast/home/franz.srambical/jafar/utils/nn.py",3856,0," ",python,content +949,1657005,"/fast/home/franz.srambical/jafar/utils/nn.py",3857,0,"",python,selection_keyboard +950,1657541,"/fast/home/franz.srambical/jafar/utils/nn.py",3857,0,"z",python,content +951,1657542,"/fast/home/franz.srambical/jafar/utils/nn.py",3858,0,"",python,selection_keyboard +952,1657658,"/fast/home/franz.srambical/jafar/utils/nn.py",3858,0,".",python,content +953,1657658,"/fast/home/franz.srambical/jafar/utils/nn.py",3859,0,"",python,selection_keyboard +954,1657759,"/fast/home/franz.srambical/jafar/utils/nn.py",3859,0,"s",python,content +955,1657759,"/fast/home/franz.srambical/jafar/utils/nn.py",3860,0,"",python,selection_keyboard +956,1657855,"/fast/home/franz.srambical/jafar/utils/nn.py",3860,0,"h",python,content +957,1657855,"/fast/home/franz.srambical/jafar/utils/nn.py",3861,0,"",python,selection_keyboard +958,1657939,"/fast/home/franz.srambical/jafar/utils/nn.py",3861,0,"a",python,content +959,1657940,"/fast/home/franz.srambical/jafar/utils/nn.py",3862,0,"",python,selection_keyboard +960,1658023,"/fast/home/franz.srambical/jafar/utils/nn.py",3862,0,"p",python,content +961,1658024,"/fast/home/franz.srambical/jafar/utils/nn.py",3863,0,"",python,selection_keyboard +962,1658189,"/fast/home/franz.srambical/jafar/utils/nn.py",3863,0,"e",python,content +963,1658189,"/fast/home/franz.srambical/jafar/utils/nn.py",3864,0,"",python,selection_keyboard +964,1658487,"/fast/home/franz.srambical/jafar/utils/nn.py",3864,0," (1, 2, 61, 8, 64)",python,content +965,1658789,"/fast/home/franz.srambical/jafar/utils/nn.py",3881,0,"",python,selection_command +966,1661897,"/fast/home/franz.srambical/jafar/utils/nn.py",3879,0,"",python,selection_command +967,1662143,"/fast/home/franz.srambical/jafar/utils/nn.py",3877,0,"",python,selection_command +968,1662177,"/fast/home/franz.srambical/jafar/utils/nn.py",3876,0,"",python,selection_command +969,1662201,"/fast/home/franz.srambical/jafar/utils/nn.py",3874,0,"",python,selection_command +970,1662233,"/fast/home/franz.srambical/jafar/utils/nn.py",3872,0,"",python,selection_command +971,1662268,"/fast/home/franz.srambical/jafar/utils/nn.py",3870,0,"",python,selection_command +972,1665316,"/fast/home/franz.srambical/jafar/utils/nn.py",3869,0,"",python,selection_command +973,1665488,"/fast/home/franz.srambical/jafar/utils/nn.py",3867,0,"",python,selection_command +974,1667637,"/fast/home/franz.srambical/jafar/utils/nn.py",3845,0,"",python,selection_command +975,1667769,"/fast/home/franz.srambical/jafar/utils/nn.py",3813,0,"",python,selection_command +976,1668168,"/fast/home/franz.srambical/jafar/utils/nn.py",3777,0,"",python,selection_command +977,1669568,"/fast/home/franz.srambical/jafar/utils/nn.py",3813,0,"",python,selection_command +978,1669718,"/fast/home/franz.srambical/jafar/utils/nn.py",3845,0,"",python,selection_command +979,1670113,"/fast/home/franz.srambical/jafar/utils/nn.py",3826,21,"",python,content +980,1670138,"/fast/home/franz.srambical/jafar/utils/nn.py",3834,0,"",python,selection_command +981,1670352,"/fast/home/franz.srambical/jafar/utils/nn.py",3801,0,"",python,selection_command +982,1670515,"/fast/home/franz.srambical/jafar/utils/nn.py",3765,0,"",python,selection_command +983,1670638,"/fast/home/franz.srambical/jafar/utils/nn.py",3729,0,"",python,selection_command +984,1670850,"/fast/home/franz.srambical/jafar/utils/nn.py",3756,0,"\n breakpoint()",python,content +985,1670853,"/fast/home/franz.srambical/jafar/utils/nn.py",3765,0,"",python,selection_command +986,1673449,"TERMINAL",0,0,"q",,terminal_output +987,1673541,"TERMINAL",0,0,"ui",,terminal_output +988,1673985,"TERMINAL",0,0,"t",,terminal_output +989,1674192,"TERMINAL",0,0,"()",,terminal_output +990,1674396,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 193, in \r\n action_batch = jasmine.vq_encode(batch, training=False)\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 391, in vq_encode\r\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\r\n File ""/fast/home/franz.srambical/jafar/models/lam.py"", line 133, in vq_encode\r\n z = self.encoder(padded_patches) # (B, T, N, E)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 228, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 126, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 126, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +991,1674990,"TERMINAL",0,0,"(Pdb) ",,terminal_output +992,1675206,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +993,1675536,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +994,1675780,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +995,1687039,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +996,1691802,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> # z.shape (1, 2, 61, 8, 64)\r\n",,terminal_output +997,1701025,"TERMINAL",0,0,"z",,terminal_output +998,1701875,"TERMINAL",0,0,".",,terminal_output +999,1702137,"TERMINAL",0,0,"s",,terminal_output +1000,1702240,"TERMINAL",0,0,"h",,terminal_output +1001,1702341,"TERMINAL",0,0,"a",,terminal_output +1002,1702439,"TERMINAL",0,0,"p",,terminal_output +1003,1702614,"TERMINAL",0,0,"e",,terminal_output +1004,1702872,"TERMINAL",0,0,"\r\n(Pdb) (1, 2, 61, 512)\r\n",,terminal_output +1005,1705046,"TERMINAL",0,0,"l",,terminal_output +1006,1705238,"TERMINAL",0,0,"\r\n(Pdb) 121 \t def __call__(self, x: jax.Array) -> jax.Array:\r\n122 \t # --- Spatial attention ---\r\n123 \t breakpoint()\r\n124 \t z = self.spatial_pos_enc(x)\r\n125 \t z = self.spatial_norm(z)\r\n126 ->\t # z.shape (1, 2, 61, 8, 64)\r\n127 \t z = self.spatial_attention(z)\r\n128 \t x = x + z\r\n129 \t\r\n130 \t # --- Temporal attention ---\r\n131 \t x = x.swapaxes(1, 2)\r\n",,terminal_output +1007,1722908,"/fast/home/franz.srambical/jafar/utils/nn.py",3757,21,"",python,content +1008,1722933,"/fast/home/franz.srambical/jafar/utils/nn.py",3765,0,"",python,selection_command +1009,1723089,"/fast/home/franz.srambical/jafar/utils/nn.py",3729,0,"",python,selection_command +1010,1725596,"/fast/home/franz.srambical/jafar/utils/nn.py",2249,0,"",python,selection_command +1011,1726179,"/fast/home/franz.srambical/jafar/utils/nn.py",787,0,"",python,selection_command +1012,1726860,"/fast/home/franz.srambical/jafar/utils/nn.py",814,0,"",python,selection_command +1013,1727094,"/fast/home/franz.srambical/jafar/utils/nn.py",823,0,"",python,selection_command +1014,1727125,"/fast/home/franz.srambical/jafar/utils/nn.py",874,0,"",python,selection_command +1015,1727154,"/fast/home/franz.srambical/jafar/utils/nn.py",912,0,"",python,selection_command +1016,1727206,"/fast/home/franz.srambical/jafar/utils/nn.py",921,0,"",python,selection_command +1017,1727240,"/fast/home/franz.srambical/jafar/utils/nn.py",922,0,"",python,selection_command +1018,1727254,"/fast/home/franz.srambical/jafar/utils/nn.py",931,0,"",python,selection_command +1019,1727528,"/fast/home/franz.srambical/jafar/utils/nn.py",922,0,"",python,selection_command +1020,1727730,"/fast/home/franz.srambical/jafar/utils/nn.py",931,0,"",python,selection_command +1021,1728303,"/fast/home/franz.srambical/jafar/utils/nn.py",958,0,"",python,selection_command +1022,1728753,"/fast/home/franz.srambical/jafar/utils/nn.py",931,0,"",python,selection_command +1023,1731449,"/fast/home/franz.srambical/jafar/utils/nn.py",5897,7,"STBlock",python,selection_command +1024,1731759,"/fast/home/franz.srambical/jafar/utils/nn.py",5903,0,"",python,selection_command +1025,1734436,"/fast/home/franz.srambical/jafar/utils/nn.py",5914,0,"",python,selection_command +1026,1734678,"/fast/home/franz.srambical/jafar/utils/nn.py",5954,0,"",python,selection_command +1027,1734702,"/fast/home/franz.srambical/jafar/utils/nn.py",5996,0,"",python,selection_command +1028,1734732,"/fast/home/franz.srambical/jafar/utils/nn.py",6042,0,"",python,selection_command +1029,1734771,"/fast/home/franz.srambical/jafar/utils/nn.py",6084,0,"",python,selection_command +1030,1734801,"/fast/home/franz.srambical/jafar/utils/nn.py",6134,0,"",python,selection_command +1031,1734832,"/fast/home/franz.srambical/jafar/utils/nn.py",6172,0,"",python,selection_command +1032,1734869,"/fast/home/franz.srambical/jafar/utils/nn.py",6238,0,"",python,selection_command +1033,1734899,"/fast/home/franz.srambical/jafar/utils/nn.py",6294,0,"",python,selection_command +1034,1734933,"/fast/home/franz.srambical/jafar/utils/nn.py",6334,0,"",python,selection_command +1035,1734965,"/fast/home/franz.srambical/jafar/utils/nn.py",6365,0,"",python,selection_command +1036,1735000,"/fast/home/franz.srambical/jafar/utils/nn.py",6383,0,"",python,selection_command +1037,1735031,"/fast/home/franz.srambical/jafar/utils/nn.py",6389,0,"",python,selection_command +1038,1735065,"/fast/home/franz.srambical/jafar/utils/nn.py",6398,0,"",python,selection_command +1039,1735099,"/fast/home/franz.srambical/jafar/utils/nn.py",6438,0,"",python,selection_command +1040,1735138,"/fast/home/franz.srambical/jafar/utils/nn.py",6478,0,"",python,selection_command +1041,1735169,"/fast/home/franz.srambical/jafar/utils/nn.py",6517,0,"",python,selection_command +1042,1735200,"/fast/home/franz.srambical/jafar/utils/nn.py",6559,0,"",python,selection_command +1043,1735235,"/fast/home/franz.srambical/jafar/utils/nn.py",6589,0,"",python,selection_command +1044,1735267,"/fast/home/franz.srambical/jafar/utils/nn.py",6612,0,"",python,selection_command +1045,1735300,"/fast/home/franz.srambical/jafar/utils/nn.py",6614,0,"",python,selection_command +1046,1735333,"/fast/home/franz.srambical/jafar/utils/nn.py",6623,0,"",python,selection_command +1047,1735367,"/fast/home/franz.srambical/jafar/utils/nn.py",6674,0,"",python,selection_command +1048,1735403,"/fast/home/franz.srambical/jafar/utils/nn.py",6706,0,"",python,selection_command +1049,1735602,"/fast/home/franz.srambical/jafar/utils/nn.py",6738,0,"",python,selection_command +1050,1735776,"/fast/home/franz.srambical/jafar/utils/nn.py",6762,0,"",python,selection_command +1051,1735928,"/fast/home/franz.srambical/jafar/utils/nn.py",6771,0,"",python,selection_command +1052,1736092,"/fast/home/franz.srambical/jafar/utils/nn.py",6805,0,"",python,selection_command +1053,1736343,"/fast/home/franz.srambical/jafar/utils/nn.py",6771,0,"",python,selection_command +1054,1737642,"/fast/home/franz.srambical/jafar/utils/nn.py",6796,0,"\n ",python,content +1055,1738078,"/fast/home/franz.srambical/jafar/utils/nn.py",6809,0,"b",python,content +1056,1738078,"/fast/home/franz.srambical/jafar/utils/nn.py",6810,0,"",python,selection_keyboard +1057,1738114,"/fast/home/franz.srambical/jafar/utils/nn.py",6810,0,"r",python,content +1058,1738114,"/fast/home/franz.srambical/jafar/utils/nn.py",6811,0,"",python,selection_keyboard +1059,1738189,"/fast/home/franz.srambical/jafar/utils/nn.py",6811,0,"e",python,content +1060,1738189,"/fast/home/franz.srambical/jafar/utils/nn.py",6812,0,"",python,selection_keyboard +1061,1738210,"/fast/home/franz.srambical/jafar/utils/nn.py",6812,0,"a",python,content +1062,1738210,"/fast/home/franz.srambical/jafar/utils/nn.py",6813,0,"",python,selection_keyboard +1063,1738273,"/fast/home/franz.srambical/jafar/utils/nn.py",6813,0,"k",python,content +1064,1738274,"/fast/home/franz.srambical/jafar/utils/nn.py",6814,0,"",python,selection_keyboard +1065,1738405,"/fast/home/franz.srambical/jafar/utils/nn.py",6814,0,"p",python,content +1066,1738405,"/fast/home/franz.srambical/jafar/utils/nn.py",6815,0,"",python,selection_keyboard +1067,1738473,"/fast/home/franz.srambical/jafar/utils/nn.py",6815,0,"o",python,content +1068,1738474,"/fast/home/franz.srambical/jafar/utils/nn.py",6816,0,"",python,selection_keyboard +1069,1738513,"/fast/home/franz.srambical/jafar/utils/nn.py",6816,0,"i",python,content +1070,1738513,"/fast/home/franz.srambical/jafar/utils/nn.py",6817,0,"",python,selection_keyboard +1071,1738577,"/fast/home/franz.srambical/jafar/utils/nn.py",6817,0,"n",python,content +1072,1738577,"/fast/home/franz.srambical/jafar/utils/nn.py",6818,0,"",python,selection_keyboard +1073,1738627,"/fast/home/franz.srambical/jafar/utils/nn.py",6818,0,"t",python,content +1074,1738627,"/fast/home/franz.srambical/jafar/utils/nn.py",6819,0,"",python,selection_keyboard +1075,1738932,"/fast/home/franz.srambical/jafar/utils/nn.py",6819,0,"()",python,content +1076,1738933,"/fast/home/franz.srambical/jafar/utils/nn.py",6820,0,"",python,selection_keyboard +1077,1738947,"/fast/home/franz.srambical/jafar/utils/nn.py",6820,1,")",python,content +1078,1738947,"/fast/home/franz.srambical/jafar/utils/nn.py",6821,0,"",python,selection_keyboard +1079,1739478,"/fast/home/franz.srambical/jafar/utils/nn.py",6820,0,"",python,selection_command +1080,1741299,"TERMINAL",0,0,"q",,terminal_output +1081,1741824,"TERMINAL",0,0,"uit",,terminal_output +1082,1742069,"TERMINAL",0,0,"()",,terminal_output +1083,1742318,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 193, in \r\n action_batch = jasmine.vq_encode(batch, training=False)\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 391, in vq_encode\r\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\r\n File ""/fast/home/franz.srambical/jafar/models/lam.py"", line 133, in vq_encode\r\n z = self.encoder(padded_patches) # (B, T, N, E)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 228, in __call__\r\n breakpoint()\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 126, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 126, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +1084,1742900,"TERMINAL",0,0,"(Pdb) ",,terminal_output +1085,1743103,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +1086,1743222,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +1087,1743293,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +1088,1743375,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +1089,1743722,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1090,1746193,"/fast/home/franz.srambical/jafar/utils/nn.py",6821,0,"\n ",python,content +1091,1746313,"/fast/home/franz.srambical/jafar/utils/nn.py",6834,0,"x",python,content +1092,1746313,"/fast/home/franz.srambical/jafar/utils/nn.py",6835,0,"",python,selection_keyboard +1093,1746395,"/fast/home/franz.srambical/jafar/utils/nn.py",6835,0,".",python,content +1094,1746396,"/fast/home/franz.srambical/jafar/utils/nn.py",6836,0,"",python,selection_keyboard +1095,1746706,"/fast/home/franz.srambical/jafar/utils/nn.py",6835,1,"",python,content +1096,1746833,"/fast/home/franz.srambical/jafar/utils/nn.py",6834,1,"",python,content +1097,1747002,"/fast/home/franz.srambical/jafar/utils/nn.py",6834,0,"$",python,content +1098,1747002,"/fast/home/franz.srambical/jafar/utils/nn.py",6835,0,"",python,selection_keyboard +1099,1747588,"/fast/home/franz.srambical/jafar/utils/nn.py",6834,1,"",python,content +1100,1747732,"/fast/home/franz.srambical/jafar/utils/nn.py",6834,0,"#",python,content +1101,1747732,"/fast/home/franz.srambical/jafar/utils/nn.py",6835,0,"",python,selection_keyboard +1102,1748026,"/fast/home/franz.srambical/jafar/utils/nn.py",6835,0,".",python,content +1103,1748026,"/fast/home/franz.srambical/jafar/utils/nn.py",6836,0,"",python,selection_keyboard +1104,1748189,"/fast/home/franz.srambical/jafar/utils/nn.py",6836,0,"x",python,content +1105,1748189,"/fast/home/franz.srambical/jafar/utils/nn.py",6837,0,"",python,selection_keyboard +1106,1748705,"/fast/home/franz.srambical/jafar/utils/nn.py",6836,1,"",python,content +1107,1748809,"/fast/home/franz.srambical/jafar/utils/nn.py",6835,1,"",python,content +1108,1748866,"/fast/home/franz.srambical/jafar/utils/nn.py",6835,0,"x",python,content +1109,1748866,"/fast/home/franz.srambical/jafar/utils/nn.py",6836,0,"",python,selection_keyboard +1110,1748987,"/fast/home/franz.srambical/jafar/utils/nn.py",6836,0,".",python,content +1111,1748988,"/fast/home/franz.srambical/jafar/utils/nn.py",6837,0,"",python,selection_keyboard +1112,1749071,"/fast/home/franz.srambical/jafar/utils/nn.py",6837,0,"s",python,content +1113,1749071,"/fast/home/franz.srambical/jafar/utils/nn.py",6838,0,"",python,selection_keyboard +1114,1749167,"/fast/home/franz.srambical/jafar/utils/nn.py",6838,0,"h",python,content +1115,1749167,"/fast/home/franz.srambical/jafar/utils/nn.py",6839,0,"",python,selection_keyboard +1116,1749290,"/fast/home/franz.srambical/jafar/utils/nn.py",6839,0,"a",python,content +1117,1749290,"/fast/home/franz.srambical/jafar/utils/nn.py",6840,0,"",python,selection_keyboard +1118,1749331,"/fast/home/franz.srambical/jafar/utils/nn.py",6840,0,"p",python,content +1119,1749331,"/fast/home/franz.srambical/jafar/utils/nn.py",6841,0,"",python,selection_keyboard +1120,1749572,"/fast/home/franz.srambical/jafar/utils/nn.py",6841,0,"e",python,content +1121,1749572,"/fast/home/franz.srambical/jafar/utils/nn.py",6842,0,"",python,selection_keyboard +1122,1750466,"/fast/home/franz.srambical/jafar/utils/nn.py",6842,0," ",python,content +1123,1750466,"/fast/home/franz.srambical/jafar/utils/nn.py",6843,0,"",python,selection_keyboard +1124,1755358,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +1125,1760204,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(229)__call__()\r\n-> x = block(x)\r\n",,terminal_output +1126,1760873,"TERMINAL",0,0,"x",,terminal_output +1127,1760987,"TERMINAL",0,0,".",,terminal_output +1128,1761123,"TERMINAL",0,0,"s",,terminal_output +1129,1761248,"TERMINAL",0,0,"h",,terminal_output +1130,1761321,"TERMINAL",0,0,"a",,terminal_output +1131,1761443,"TERMINAL",0,0,"p",,terminal_output +1132,1761509,"TERMINAL",0,0,"e",,terminal_output +1133,1761691,"TERMINAL",0,0,"\r\n(Pdb) (1, 2, 61, 512)\r\n",,terminal_output +1134,1763967,"/fast/home/franz.srambical/jafar/utils/nn.py",6843,0,"()",python,content +1135,1763967,"/fast/home/franz.srambical/jafar/utils/nn.py",6844,0,"",python,selection_keyboard +1136,1764199,"/fast/home/franz.srambical/jafar/utils/nn.py",6844,0,"1",python,content +1137,1764200,"/fast/home/franz.srambical/jafar/utils/nn.py",6845,0,"",python,selection_keyboard +1138,1764345,"/fast/home/franz.srambical/jafar/utils/nn.py",6845,0,",",python,content +1139,1764345,"/fast/home/franz.srambical/jafar/utils/nn.py",6846,0,"",python,selection_keyboard +1140,1764982,"/fast/home/franz.srambical/jafar/utils/nn.py",6846,0," ",python,content +1141,1764982,"/fast/home/franz.srambical/jafar/utils/nn.py",6847,0,"",python,selection_keyboard +1142,1765474,"/fast/home/franz.srambical/jafar/utils/nn.py",6847,0,"2, 61, 8, 64",python,content +1143,1765474,"/fast/home/franz.srambical/jafar/utils/nn.py",6835,0," ",python,content +1144,1765680,"/fast/home/franz.srambical/jafar/utils/nn.py",6859,0,"",python,selection_command +1145,1766515,"/fast/home/franz.srambical/jafar/utils/nn.py",6822,0,"",python,selection_command +1146,1769175,"/fast/home/franz.srambical/jafar/utils/nn.py",6834,0,"",python,selection_command +1147,1769436,"/fast/home/franz.srambical/jafar/utils/nn.py",6836,0,"",python,selection_command +1148,1769445,"/fast/home/franz.srambical/jafar/utils/nn.py",6837,0,"",python,selection_command +1149,1769481,"/fast/home/franz.srambical/jafar/utils/nn.py",6838,0,"",python,selection_command +1150,1769514,"/fast/home/franz.srambical/jafar/utils/nn.py",6844,0,"",python,selection_command +1151,1769555,"/fast/home/franz.srambical/jafar/utils/nn.py",6845,0,"",python,selection_command +1152,1769585,"/fast/home/franz.srambical/jafar/utils/nn.py",6846,0,"",python,selection_command +1153,1769839,"/fast/home/franz.srambical/jafar/utils/nn.py",6848,0,"",python,selection_command +1154,1769972,"/fast/home/franz.srambical/jafar/utils/nn.py",6849,0,"",python,selection_command +1155,1770317,"/fast/home/franz.srambical/jafar/utils/nn.py",6851,0,"",python,selection_command +1156,1770592,"/fast/home/franz.srambical/jafar/utils/nn.py",6853,0,"",python,selection_command +1157,1770926,"/fast/home/franz.srambical/jafar/utils/nn.py",6855,0,"",python,selection_command +1158,1771123,"/fast/home/franz.srambical/jafar/utils/nn.py",6855,1,"8",python,selection_command +1159,1771189,"/fast/home/franz.srambical/jafar/utils/nn.py",6855,2,"8,",python,selection_command +1160,1771308,"/fast/home/franz.srambical/jafar/utils/nn.py",6855,5,"8, 64",python,selection_command +1161,1771650,"/fast/home/franz.srambical/jafar/utils/nn.py",6855,5,"",python,content +1162,1772289,"/fast/home/franz.srambical/jafar/utils/nn.py",6855,0,"5",python,content +1163,1772290,"/fast/home/franz.srambical/jafar/utils/nn.py",6856,0,"",python,selection_keyboard +1164,1772412,"/fast/home/franz.srambical/jafar/utils/nn.py",6856,0,"1",python,content +1165,1772413,"/fast/home/franz.srambical/jafar/utils/nn.py",6857,0,"",python,selection_keyboard +1166,1772470,"/fast/home/franz.srambical/jafar/utils/nn.py",6857,0,"2",python,content +1167,1772470,"/fast/home/franz.srambical/jafar/utils/nn.py",6858,0,"",python,selection_keyboard +1168,1772692,"/fast/home/franz.srambical/jafar/utils/nn.py",6857,0,"",python,selection_command +1169,1776173,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=13995.6 task 0: running\r\n",,terminal_output +1170,1776327,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13995.6\r\nsrun: forcing job termination\r\n(Pdb) --KeyboardInterrupt--\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T09:48:16.101] error: *** STEP 13995.6 ON hai003 CANCELLED AT 2025-07-27T09:48:16 DUE to SIGNAL Killed ***\r\n",,terminal_output +1171,1776419,"TERMINAL",0,0,"(Pdb) ",,terminal_output +1172,1776696,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +1173,1778826,"TERMINAL",0,0,"\r[franz.srambical@hai003.haicore.berlin:~/jafar] $ \r[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +1174,1781781,"TERMINAL",0,0,"\r[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +1175,1783201,"/fast/home/franz.srambical/jafar/utils/nn.py",6820,0,"",python,selection_command +1176,1783511,"/fast/home/franz.srambical/jafar/utils/nn.py",6797,25,"",python,content +1177,1783535,"/fast/home/franz.srambical/jafar/utils/nn.py",6809,0,"",python,selection_command +1178,1785588,"/fast/home/franz.srambical/jafar/utils/nn.py",6775,0,"",python,selection_command +1179,1786030,"/fast/home/franz.srambical/jafar/utils/nn.py",5776,0,"",python,selection_command +1180,1786448,"/fast/home/franz.srambical/jafar/utils/nn.py",4844,0,"",python,selection_command +1181,1787096,"/fast/home/franz.srambical/jafar/utils/nn.py",4813,0,"",python,selection_command +1182,1787350,"/fast/home/franz.srambical/jafar/utils/nn.py",4778,0,"",python,selection_command +1183,1787367,"/fast/home/franz.srambical/jafar/utils/nn.py",4743,0,"",python,selection_command +1184,1787397,"/fast/home/franz.srambical/jafar/utils/nn.py",4733,0,"",python,selection_command +1185,1787433,"/fast/home/franz.srambical/jafar/utils/nn.py",4712,0,"",python,selection_command +1186,1787461,"/fast/home/franz.srambical/jafar/utils/nn.py",4690,0,"",python,selection_command +1187,1787496,"/fast/home/franz.srambical/jafar/utils/nn.py",4660,0,"",python,selection_command +1188,1787530,"/fast/home/franz.srambical/jafar/utils/nn.py",4625,0,"",python,selection_command +1189,1787562,"/fast/home/franz.srambical/jafar/utils/nn.py",4599,0,"",python,selection_command +1190,1787595,"/fast/home/franz.srambical/jafar/utils/nn.py",4567,0,"",python,selection_command +1191,1787629,"/fast/home/franz.srambical/jafar/utils/nn.py",4543,0,"",python,selection_command +1192,1787668,"/fast/home/franz.srambical/jafar/utils/nn.py",4519,0,"",python,selection_command +1193,1787698,"/fast/home/franz.srambical/jafar/utils/nn.py",4494,0,"",python,selection_command +1194,1787733,"/fast/home/franz.srambical/jafar/utils/nn.py",4472,0,"",python,selection_command +1195,1787764,"/fast/home/franz.srambical/jafar/utils/nn.py",4450,0,"",python,selection_command +1196,1787798,"/fast/home/franz.srambical/jafar/utils/nn.py",4426,0,"",python,selection_command +1197,1787841,"/fast/home/franz.srambical/jafar/utils/nn.py",4402,0,"",python,selection_command +1198,1787995,"/fast/home/franz.srambical/jafar/utils/nn.py",4388,0,"",python,selection_command +1199,1788129,"/fast/home/franz.srambical/jafar/utils/nn.py",4370,0,"",python,selection_command +1200,1788278,"/fast/home/franz.srambical/jafar/utils/nn.py",4337,0,"",python,selection_command +1201,1789144,"/fast/home/franz.srambical/jafar/utils/nn.py",4335,0,"",python,selection_command +1202,1790825,"/fast/home/franz.srambical/jafar/utils/nn.py",4368,0,"",python,selection_command +1203,1791093,"/fast/home/franz.srambical/jafar/utils/nn.py",4335,0,"",python,selection_command +1204,1798182,"utils/nn.py",0,0,"",python,tab +1205,1799887,"utils/nn.py",1110,0,"",python,selection_command +1206,1800520,"utils/nn.py",119,0,"",python,selection_command +1207,1802348,"utils/nn.py",4335,0,"",python,selection_command +1208,1808775,"models/dynamics_causal.py",0,0,"",python,tab +1209,1808775,"models/dynamics_causal.py",980,13,"STTransformer",python,selection_command +1210,1809047,"models/dynamics_causal.py",992,0,"",python,selection_command +1211,1813310,"models/dynamics_causal.py",953,0,"",python,selection_command +1212,1813404,"models/dynamics_causal.py",961,0,"",python,selection_command +1213,1813652,"models/dynamics_causal.py",965,0,"",python,selection_command +1214,1813886,"models/dynamics_causal.py",966,0,"",python,selection_command +1215,1814122,"models/dynamics_causal.py",978,0,"",python,selection_command +1216,1814337,"models/dynamics_causal.py",980,0,"",python,selection_command +1217,1814633,"models/dynamics_causal.py",978,0,"",python,selection_command +1218,1814806,"models/dynamics_causal.py",966,0,"",python,selection_command +1219,1814991,"models/dynamics_causal.py",965,0,"",python,selection_command +1220,1815294,"models/dynamics_causal.py",966,0,"",python,selection_command +1221,1815611,"models/dynamics_causal.py",2252,0,"",python,selection_command +1222,1819736,"models/dynamics_causal.py",2229,0,"",python,selection_command +1223,1819983,"models/dynamics_causal.py",2227,0,"",python,selection_command +1224,1820005,"models/dynamics_causal.py",2148,0,"",python,selection_command +1225,1820040,"models/dynamics_causal.py",2117,0,"",python,selection_command +1226,1820065,"models/dynamics_causal.py",2041,0,"",python,selection_command +1227,1820241,"models/dynamics_causal.py",2003,0,"",python,selection_command +1228,1820411,"models/dynamics_causal.py",1979,0,"",python,selection_command +1229,1820764,"models/dynamics_causal.py",1960,21,"",python,content +1230,1820768,"models/dynamics_causal.py",1968,0,"",python,selection_command +1231,1820938,"models/dynamics_causal.py",2006,0,"",python,selection_command +1232,1821190,"models/dynamics_causal.py",2082,0,"",python,selection_command +1233,1821222,"models/dynamics_causal.py",2113,0,"",python,selection_command +1234,1821461,"models/dynamics_causal.py",2193,0,"",python,selection_command +1235,1821630,"models/dynamics_causal.py",2208,0,"",python,selection_command +1236,1822421,"models/dynamics_causal.py",2208,0,"\n breakpoint()",python,content +1237,1822430,"models/dynamics_causal.py",2217,0,"",python,selection_command +1238,1824608,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +1239,1825550,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1240,1837105,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +1241,1845386,"TERMINAL",0,0,"2025-07-27 09:49:25.186297: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1242,1847011,"TERMINAL",0,0,"2025-07-27 09:49:26.742513: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1243,1850600,"TERMINAL",0,0,"2025-07-27 09:49:30.400543: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1244,1851343,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/models/dynamics_causal.py(77)__call__()\r\n-> logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n",,terminal_output +1245,1863913,"TERMINAL",0,0,"v",,terminal_output +1246,1863990,"TERMINAL",0,0,"i",,terminal_output +1247,1864074,"TERMINAL",0,0,"d",,terminal_output +1248,1864357,"TERMINAL",0,0,"_",,terminal_output +1249,1864513,"TERMINAL",0,0,"e",,terminal_output +1250,1864610,"TERMINAL",0,0,"m",,terminal_output +1251,1864812,"TERMINAL",0,0,"be",,terminal_output +1252,1864891,"TERMINAL",0,0,"d",,terminal_output +1253,1865059,"TERMINAL",0,0,"d",,terminal_output +1254,1865445,"TERMINAL",0,0," ",,terminal_output +1255,1865598,"TERMINAL",0,0,"_",,terminal_output +1256,1865880,"TERMINAL",0,0,"pad",,terminal_output +1257,1866065,"TERMINAL",0,0,"d",,terminal_output +1258,1866210,"TERMINAL",0,0,"e",,terminal_output +1259,1866312,"TERMINAL",0,0,"s",,terminal_output +1260,1866593,"TERMINAL",0,0,"d",,terminal_output +1261,1866705,"TERMINAL",0,0," ",,terminal_output +1262,1866864,"TERMINAL",0,0," ",,terminal_output +1263,1866945,"TERMINAL",0,0,"d",,terminal_output +1264,1867086,"TERMINAL",0,0,".",,terminal_output +1265,1867264,"TERMINAL",0,0,"s",,terminal_output +1266,1867479,"TERMINAL",0,0,"h",,terminal_output +1267,1867557,"TERMINAL",0,0,"a",,terminal_output +1268,1867660,"TERMINAL",0,0,"p",,terminal_output +1269,1867744,"TERMINAL",0,0,"e",,terminal_output +1270,1867877,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 512)\r\n",,terminal_output +1271,1869838,"models/dynamics_causal.py",2229,0,"\n ",python,content +1272,1870022,"models/dynamics_causal.py",2238,0,"#",python,content +1273,1870022,"models/dynamics_causal.py",2239,0,"",python,selection_keyboard +1274,1871041,"models/dynamics_causal.py",2239,0," vid_embed_padded.shape (1, 2, 62, 8, 64)",python,content +1275,1871413,"models/dynamics_causal.py",2279,0,"",python,selection_command +1276,1871615,"models/dynamics_causal.py",2278,0,"",python,selection_command +1277,1871838,"models/dynamics_causal.py",2277,0,"",python,selection_command +1278,1871989,"models/dynamics_causal.py",2275,0,"",python,selection_command +1279,1872119,"models/dynamics_causal.py",2274,0,"",python,selection_command +1280,1872252,"models/dynamics_causal.py",2272,0,"",python,selection_command +1281,1872384,"models/dynamics_causal.py",2270,0,"",python,selection_command +1282,1872540,"models/dynamics_causal.py",2268,0,"",python,selection_command +1283,1872679,"models/dynamics_causal.py",2267,0,"",python,selection_command +1284,1873572,"models/dynamics_causal.py",2267,1,"1",python,content +1285,1874318,"models/dynamics_causal.py",2268,0,"",python,selection_command +1286,1874492,"models/dynamics_causal.py",2270,0,"",python,selection_command +1287,1875004,"models/dynamics_causal.py",2270,2,"",python,content +1288,1875978,"models/dynamics_causal.py",2270,0,"9",python,content +1289,1875979,"models/dynamics_causal.py",2271,0,"",python,selection_keyboard +1290,1876075,"models/dynamics_causal.py",2271,0,"2",python,content +1291,1876075,"models/dynamics_causal.py",2272,0,"",python,selection_keyboard +1292,1876120,"models/dynamics_causal.py",2272,0,"1",python,content +1293,1876120,"models/dynamics_causal.py",2273,0,"",python,selection_keyboard +1294,1876343,"models/dynamics_causal.py",2272,0,"",python,selection_command +1295,1876486,"models/dynamics_causal.py",2273,0,"",python,selection_command +1296,1876654,"models/dynamics_causal.py",2274,0,"",python,selection_command +1297,1876846,"models/dynamics_causal.py",2275,0,"",python,selection_command +1298,1877825,"models/dynamics_causal.py",2275,1,"8",python,selection_command +1299,1877839,"models/dynamics_causal.py",2275,2,"8,",python,selection_command +1300,1878023,"models/dynamics_causal.py",2275,5,"8, 64",python,selection_command +1301,1878201,"models/dynamics_causal.py",2275,5,"",python,content +1302,1878791,"models/dynamics_causal.py",2275,0,"5",python,content +1303,1878792,"models/dynamics_causal.py",2276,0,"",python,selection_keyboard +1304,1878913,"models/dynamics_causal.py",2276,0,"1",python,content +1305,1878914,"models/dynamics_causal.py",2277,0,"",python,selection_keyboard +1306,1878984,"models/dynamics_causal.py",2277,0,"2",python,content +1307,1878984,"models/dynamics_causal.py",2278,0,"",python,selection_keyboard +1308,1879177,"models/dynamics_causal.py",2277,0,"",python,selection_command +1309,1881107,"TERMINAL",0,0,"s",,terminal_output +1310,1882406,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(222)__call__()\r\n-> def __call__(self, x: jax.Array) -> jax.Array:\r\n",,terminal_output +1311,1884494,"TERMINAL",0,0,"n",,terminal_output +1312,1885893,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(223)__call__()\r\n-> x = self.input_norm1(x)\r\n",,terminal_output +1313,1887181,"models/dynamics_causal.py",2327,0,"",python,selection_command +1314,1887705,"models/dynamics_causal.py",2314,0,"",python,selection_command +1315,1887850,"models/dynamics_causal.py",2313,0,"",python,selection_command +1316,1887993,"models/dynamics_causal.py",2302,0,"",python,selection_command +1317,1888126,"models/dynamics_causal.py",2301,0,"",python,selection_command +1318,1888498,"models/dynamics_causal.py",2302,0,"",python,selection_command +1319,1891890,"models/dynamics_causal.py",2252,0,"",python,selection_command +1320,1892119,"models/dynamics_causal.py",2228,0,"",python,selection_command +1321,1892150,"models/dynamics_causal.py",2208,0,"",python,selection_command +1322,1892182,"models/dynamics_causal.py",2206,0,"",python,selection_command +1323,1892215,"models/dynamics_causal.py",2127,0,"",python,selection_command +1324,1892253,"models/dynamics_causal.py",2096,0,"",python,selection_command +1325,1892282,"models/dynamics_causal.py",2020,0,"",python,selection_command +1326,1892316,"models/dynamics_causal.py",1982,0,"",python,selection_command +1327,1892350,"models/dynamics_causal.py",1922,0,"",python,selection_command +1328,1892518,"models/dynamics_causal.py",1898,0,"",python,selection_command +1329,1892889,"models/dynamics_causal.py",1877,23,"",python,content +1330,1892895,"models/dynamics_causal.py",1885,0,"",python,selection_command +1331,1893030,"models/dynamics_causal.py",1945,0,"",python,selection_command +1332,1893288,"models/dynamics_causal.py",1983,0,"",python,selection_command +1333,1893315,"models/dynamics_causal.py",2059,0,"",python,selection_command +1334,1893347,"models/dynamics_causal.py",2090,0,"",python,selection_command +1335,1893581,"models/dynamics_causal.py",2170,0,"",python,selection_command +1336,1893912,"models/dynamics_causal.py",2162,23,"",python,content +1337,1894616,"models/dynamics_causal.py",2163,0,"",python,selection_command +1338,1894788,"models/dynamics_causal.py",2184,0,"",python,selection_command +1339,1895637,"models/dynamics_causal.py",2234,0,"",python,selection_command +1340,1895782,"models/dynamics_causal.py",2242,0,"",python,selection_command +1341,1895932,"models/dynamics_causal.py",2249,0,"",python,selection_command +1342,1896129,"models/dynamics_causal.py",2251,0,"",python,selection_command +1343,1896388,"models/dynamics_causal.py",2255,0,"",python,selection_command +1344,1896404,"models/dynamics_causal.py",2256,0,"",python,selection_command +1345,1896499,"models/dynamics_causal.py",2267,0,"",python,selection_command +1346,1896937,"models/dynamics_causal.py",2256,0,"",python,selection_command +1347,1898434,"models/dynamics_causal.py",966,0,"",python,selection_command +1348,1900193,"models/dynamics_causal.py",2256,0,"",python,selection_command +1349,1901276,"models/dynamics_causal.py",966,0,"",python,selection_command +1350,1901576,"models/dynamics_causal.py",978,0,"",python,selection_command +1351,1901709,"models/dynamics_causal.py",980,0,"",python,selection_command +1352,1902080,"utils/nn.py",0,0,"",python,tab +1353,1902080,"utils/nn.py",4335,0,"",python,selection_command +1354,1902546,"utils/nn.py",6146,0,"",python,selection_command +1355,1904469,"utils/nn.py",6184,0,"",python,selection_command +1356,1904719,"utils/nn.py",6250,0,"",python,selection_command +1357,1904739,"utils/nn.py",6306,0,"",python,selection_command +1358,1904773,"utils/nn.py",6346,0,"",python,selection_command +1359,1904806,"utils/nn.py",6373,0,"",python,selection_command +1360,1904840,"utils/nn.py",6387,0,"",python,selection_command +1361,1904874,"utils/nn.py",6389,0,"",python,selection_command +1362,1904907,"utils/nn.py",6410,0,"",python,selection_command +1363,1904939,"utils/nn.py",6450,0,"",python,selection_command +1364,1904970,"utils/nn.py",6490,0,"",python,selection_command +1365,1905006,"utils/nn.py",6529,0,"",python,selection_command +1366,1905042,"utils/nn.py",6571,0,"",python,selection_command +1367,1905073,"utils/nn.py",6601,0,"",python,selection_command +1368,1905132,"utils/nn.py",6612,0,"",python,selection_command +1369,1905151,"utils/nn.py",6614,0,"",python,selection_command +1370,1905179,"utils/nn.py",6635,0,"",python,selection_command +1371,1905205,"utils/nn.py",6686,0,"",python,selection_command +1372,1905530,"utils/nn.py",6635,0,"",python,selection_command +1373,1907469,"utils/nn.py",6686,0,"",python,selection_command +1374,1909445,"TERMINAL",0,0,"x",,terminal_output +1375,1909647,"TERMINAL",0,0,".",,terminal_output +1376,1909862,"TERMINAL",0,0,"s",,terminal_output +1377,1909920,"TERMINAL",0,0,"h",,terminal_output +1378,1910111,"TERMINAL",0,0,"a",,terminal_output +1379,1910196,"TERMINAL",0,0,"p",,terminal_output +1380,1910504,"TERMINAL",0,0,"e",,terminal_output +1381,1910775,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 512)\r\n",,terminal_output +1382,1912868,"utils/nn.py",6665,0,"\n ",python,content +1383,1913014,"utils/nn.py",6674,0,"#",python,content +1384,1913015,"utils/nn.py",6675,0,"",python,selection_keyboard +1385,1913036,"utils/nn.py",6675,0," ",python,content +1386,1913036,"utils/nn.py",6676,0,"",python,selection_keyboard +1387,1914342,"utils/nn.py",6676,0,"x.shape (1, 1, 921, 512)",python,content +1388,1914840,"utils/nn.py",6699,0,"",python,selection_command +1389,1916803,"TERMINAL",0,0,"n",,terminal_output +1390,1917129,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(224)__call__()\r\n-> x = self.input_dense(x)\r\n",,terminal_output +1391,1917810,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(225)__call__()\r\n-> x = self.input_norm2(x)\r\n",,terminal_output +1392,1921666,"TERMINAL",0,0,"x",,terminal_output +1393,1921809,"TERMINAL",0,0,".",,terminal_output +1394,1921938,"TERMINAL",0,0,"s",,terminal_output +1395,1922043,"TERMINAL",0,0,"h",,terminal_output +1396,1922234,"TERMINAL",0,0,"ap",,terminal_output +1397,1922288,"TERMINAL",0,0,"e",,terminal_output +1398,1922462,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 512)\r\n",,terminal_output +1399,1923613,"TERMINAL",0,0,"n",,terminal_output +1400,1923885,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(227)__call__()\r\n-> for block in self.blocks:\r\n",,terminal_output +1401,1924942,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(229)__call__()\r\n-> x = block(x)\r\n",,terminal_output +1402,1925907,"TERMINAL",0,0,"x",,terminal_output +1403,1925983,"TERMINAL",0,0,".",,terminal_output +1404,1926136,"TERMINAL",0,0,"s",,terminal_output +1405,1926333,"TERMINAL",0,0,"h",,terminal_output +1406,1926434,"TERMINAL",0,0,"ap",,terminal_output +1407,1926503,"TERMINAL",0,0,"e",,terminal_output +1408,1926721,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 512)\r\n",,terminal_output +1409,1928017,"utils/nn.py",6731,0,"",python,selection_command +1410,1928232,"utils/nn.py",6763,0,"",python,selection_command +1411,1928308,"utils/nn.py",6795,0,"",python,selection_command +1412,1928453,"utils/nn.py",6797,0,"",python,selection_command +1413,1928595,"utils/nn.py",6830,0,"",python,selection_command +1414,1928757,"utils/nn.py",6865,0,"",python,selection_command +1415,1929114,"utils/nn.py",6863,0,"",python,selection_command +1416,1929280,"utils/nn.py",6861,0,"",python,selection_command +1417,1929403,"utils/nn.py",6859,0,"",python,selection_command +1418,1929555,"utils/nn.py",6858,0,"",python,selection_command +1419,1930039,"utils/nn.py",6858,11,"",python,content +1420,1930475,"utils/nn.py",6858,0,",",python,content +1421,1930475,"utils/nn.py",6859,0,"",python,selection_keyboard +1422,1930753,"utils/nn.py",6858,1,"",python,content +1423,1930890,"utils/nn.py",6858,0,"1",python,content +1424,1930891,"utils/nn.py",6859,0,"",python,selection_keyboard +1425,1930988,"utils/nn.py",6859,0,",",python,content +1426,1930989,"utils/nn.py",6860,0,"",python,selection_keyboard +1427,1931378,"utils/nn.py",6860,0," ",python,content +1428,1931378,"utils/nn.py",6861,0,"",python,selection_keyboard +1429,1932424,"utils/nn.py",6861,0,"921, 512)",python,content +1430,1932605,"utils/nn.py",6869,0,"",python,selection_command +1431,1935749,"TERMINAL",0,0,"s",,terminal_output +1432,1935949,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(70)resolve_kwargs_wrapper()\r\n-> @functools.wraps(f)\r\n",,terminal_output +1433,1938910,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(72)resolve_kwargs_wrapper()\r\n-> args = resolve_kwargs(f, args, kwargs)\r\n",,terminal_output +1434,1941123,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(62)resolve_kwargs()\r\n-> def resolve_kwargs(\r\n",,terminal_output +1435,1943214,"TERMINAL",0,0,"n",,terminal_output +1436,1943307,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(67)resolve_kwargs()\r\n-> if isinstance(fun, Missing):\r\n",,terminal_output +1437,1945009,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(79)resolve_kwargs()\r\n-> if isinstance(args, Missing):\r\n",,terminal_output +1438,1945342,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(81)resolve_kwargs()\r\n-> if isinstance(kwargs, Missing):\r\n",,terminal_output +1439,1945883,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(84)resolve_kwargs()\r\n-> if isinstance(fun, functools.partial):\r\n",,terminal_output +1440,1946254,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(87)resolve_kwargs()\r\n-> ba = inspect.signature(fun).bind(*args, **kwargs)\r\n",,terminal_output +1441,1946778,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(88)resolve_kwargs()\r\n-> ba.apply_defaults()\r\n",,terminal_output +1442,1947212,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(89)resolve_kwargs()\r\n-> if ba.kwargs:\r\n",,terminal_output +1443,1947629,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(92)resolve_kwargs()\r\n-> return ba.args\r\n",,terminal_output +1444,1948156,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(92)resolve_kwargs()->(STBlock( # Pa...1567400>\r\n )\r\n), Traced)\r\n-> return ba.args\r\n",,terminal_output +1445,1950579,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(73)resolve_kwargs_wrapper()\r\n-> return f(*args)\r\n",,terminal_output +1446,1952263,"TERMINAL",0,0,"s",,terminal_output +1447,1952459,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2048)update_context_manager_wrapper()\r\n-> @functools.wraps(f)\r\n",,terminal_output +1448,1956201,"TERMINAL",0,0,"n",,terminal_output +1449,1956350,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2050)update_context_manager_wrapper()\r\n-> with self:\r\n",,terminal_output +1450,1957497,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2051)update_context_manager_wrapper()\r\n-> return f(*args, **kwargs)\r\n",,terminal_output +1451,1959274,"TERMINAL",0,0,"s\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2048)update_context_manager_wrapper()\r\n-> @functools.wraps(f)\r\n",,terminal_output +1452,1961156,"TERMINAL",0,0,"n\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2050)update_context_manager_wrapper()\r\n-> with self:\r\n",,terminal_output +1453,1962152,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2051)update_context_manager_wrapper()\r\n-> return f(*args, **kwargs)\r\n",,terminal_output +1454,1963013,"TERMINAL",0,0,"\r\n",,terminal_output +1455,1963139,"TERMINAL",0,0,"(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2051)update_context_manager_wrapper()\r\n-> return f(*args, **kwargs)\r\n",,terminal_output +1456,1964128,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2050)update_context_manager_wrapper()\r\n-> with self:\r\n",,terminal_output +1457,1965658,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2051)update_context_manager_wrapper()->None\r\n-> return f(*args, **kwargs)\r\n",,terminal_output +1458,1966323,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2051)update_context_manager_wrapper()\r\n-> return f(*args, **kwargs)\r\n",,terminal_output +1459,1966900,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2050)update_context_manager_wrapper()\r\n-> with self:\r\n",,terminal_output +1460,1968194,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2051)update_context_manager_wrapper()->None\r\n-> return f(*args, **kwargs)\r\n",,terminal_output +1461,1968779,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(73)resolve_kwargs_wrapper()\r\n-> return f(*args)\r\n",,terminal_output +1462,1969383,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(73)resolve_kwargs_wrapper()->None\r\n-> return f(*args)\r\n",,terminal_output +1463,1969901,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(229)__call__()\r\n-> # x.shape (1, 1, 921, 512)\r\n",,terminal_output +1464,1975970,"TERMINAL",0,0,"l",,terminal_output +1465,1976203,"TERMINAL",0,0,"\r\n(Pdb) 224 \t x = self.input_norm1(x)\r\n225 \t x = self.input_dense(x)\r\n226 \t x = self.input_norm2(x)\r\n227 \t\r\n228 \t for block in self.blocks:\r\n229 ->\t # x.shape (1, 1, 921, 512)\r\n230 \t x = block(x)\r\n231 \t\r\n232 \t x = self.output_dense(x)\r\n233 \t return x # (B, T, E)\r\n234 \t\r\n",,terminal_output +1466,1979701,"TERMINAL",0,0,"n",,terminal_output +1467,1979995,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(229)__call__()->None\r\n-> # x.shape (1, 1, 921, 512)\r\n",,terminal_output +1468,1999466,"utils/nn.py",6894,0,"",python,selection_command +1469,1999648,"utils/nn.py",6893,0,"",python,selection_command +1470,1999823,"utils/nn.py",6892,0,"",python,selection_command +1471,1999997,"utils/nn.py",6887,0,"",python,selection_command +1472,2000527,"utils/nn.py",6810,0,"",python,selection_command +1473,2001548,"utils/nn.py",6797,0,"",python,selection_command +1474,2001861,"utils/nn.py",6810,0,"",python,selection_command +1475,2002186,"utils/nn.py",6816,0,"",python,selection_command +1476,2002342,"utils/nn.py",6819,0,"",python,selection_command +1477,2002591,"utils/nn.py",6823,0,"",python,selection_command +1478,2002614,"utils/nn.py",6824,0,"",python,selection_command +1479,2002642,"utils/nn.py",6830,0,"",python,selection_command +1480,2003033,"utils/nn.py",6824,0,"",python,selection_command +1481,2003385,"utils/nn.py",5781,0,"",python,selection_command +1482,2003565,"utils/nn.py",5787,0,"",python,selection_command +1483,2003757,"utils/nn.py",5789,0,"",python,selection_command +1484,2003883,"utils/nn.py",5793,0,"",python,selection_command +1485,2004072,"utils/nn.py",5794,0,"",python,selection_command +1486,2004456,"utils/nn.py",929,0,"",python,selection_command +1487,2005584,"utils/nn.py",956,0,"",python,selection_command +1488,2005833,"utils/nn.py",974,0,"",python,selection_command +1489,2005855,"utils/nn.py",988,0,"",python,selection_command +1490,2005890,"utils/nn.py",1006,0,"",python,selection_command +1491,2005921,"utils/nn.py",1028,0,"",python,selection_command +1492,2005957,"utils/nn.py",1052,0,"",python,selection_command +1493,2005989,"utils/nn.py",1076,0,"",python,selection_command +1494,2006022,"utils/nn.py",1108,0,"",python,selection_command +1495,2006056,"utils/nn.py",1134,0,"",python,selection_command +1496,2006088,"utils/nn.py",1169,0,"",python,selection_command +1497,2006120,"utils/nn.py",1199,0,"",python,selection_command +1498,2006154,"utils/nn.py",1221,0,"",python,selection_command +1499,2006188,"utils/nn.py",1244,0,"",python,selection_command +1500,2006281,"utils/nn.py",1252,0,"",python,selection_command +1501,2006282,"utils/nn.py",1275,0,"",python,selection_command +1502,2006287,"utils/nn.py",1306,0,"",python,selection_command +1503,2006321,"utils/nn.py",1341,0,"",python,selection_command +1504,2006355,"utils/nn.py",1372,0,"",python,selection_command +1505,2006388,"utils/nn.py",1411,0,"",python,selection_command +1506,2006422,"utils/nn.py",1438,0,"",python,selection_command +1507,2006456,"utils/nn.py",1493,0,"",python,selection_command +1508,2006670,"utils/nn.py",1538,0,"",python,selection_command +1509,2006928,"utils/nn.py",1561,0,"",python,selection_command +1510,2006944,"utils/nn.py",1568,0,"",python,selection_command +1511,2006974,"utils/nn.py",1628,0,"",python,selection_command +1512,2007009,"utils/nn.py",1671,0,"",python,selection_command +1513,2007044,"utils/nn.py",1706,0,"",python,selection_command +1514,2007075,"utils/nn.py",1748,0,"",python,selection_command +1515,2007112,"utils/nn.py",1778,0,"",python,selection_command +1516,2007144,"utils/nn.py",1801,0,"",python,selection_command +1517,2007183,"utils/nn.py",1811,0,"",python,selection_command +1518,2007231,"utils/nn.py",1868,0,"",python,selection_command +1519,2007246,"utils/nn.py",1906,0,"",python,selection_command +1520,2007279,"utils/nn.py",1940,0,"",python,selection_command +1521,2007317,"utils/nn.py",1975,0,"",python,selection_command +1522,2007351,"utils/nn.py",2014,0,"",python,selection_command +1523,2007385,"utils/nn.py",2056,0,"",python,selection_command +1524,2007417,"utils/nn.py",2086,0,"",python,selection_command +1525,2007450,"utils/nn.py",2139,0,"",python,selection_command +1526,2007484,"utils/nn.py",2181,0,"",python,selection_command +1527,2007517,"utils/nn.py",2228,0,"",python,selection_command +1528,2007553,"utils/nn.py",2243,0,"",python,selection_command +1529,2007585,"utils/nn.py",2266,0,"",python,selection_command +1530,2007618,"utils/nn.py",2300,0,"",python,selection_command +1531,2007652,"utils/nn.py",2326,0,"",python,selection_command +1532,2007685,"utils/nn.py",2330,0,"",python,selection_command +1533,2007721,"utils/nn.py",2337,0,"",python,selection_command +1534,2007753,"utils/nn.py",2398,0,"",python,selection_command +1535,2007785,"utils/nn.py",2442,0,"",python,selection_command +1536,2007819,"utils/nn.py",2477,0,"",python,selection_command +1537,2007852,"utils/nn.py",2519,0,"",python,selection_command +1538,2008202,"utils/nn.py",2549,0,"",python,selection_command +1539,2008455,"utils/nn.py",2572,0,"",python,selection_command +1540,2008476,"utils/nn.py",2582,0,"",python,selection_command +1541,2008503,"utils/nn.py",2640,0,"",python,selection_command +1542,2008535,"utils/nn.py",2678,0,"",python,selection_command +1543,2008568,"utils/nn.py",2712,0,"",python,selection_command +1544,2008602,"utils/nn.py",2747,0,"",python,selection_command +1545,2008635,"utils/nn.py",2786,0,"",python,selection_command +1546,2008667,"utils/nn.py",2828,0,"",python,selection_command +1547,2008704,"utils/nn.py",2858,0,"",python,selection_command +1548,2008737,"utils/nn.py",2911,0,"",python,selection_command +1549,2008769,"utils/nn.py",2968,0,"",python,selection_command +1550,2008804,"utils/nn.py",2983,0,"",python,selection_command +1551,2008836,"utils/nn.py",3006,0,"",python,selection_command +1552,2008869,"utils/nn.py",3038,0,"",python,selection_command +1553,2008902,"utils/nn.py",3042,0,"",python,selection_command +1554,2008935,"utils/nn.py",3049,0,"",python,selection_command +1555,2008970,"utils/nn.py",3088,0,"",python,selection_command +1556,2009017,"utils/nn.py",3123,0,"",python,selection_command +1557,2009048,"utils/nn.py",3165,0,"",python,selection_command +1558,2009068,"utils/nn.py",3195,0,"",python,selection_command +1559,2009100,"utils/nn.py",3218,0,"",python,selection_command +1560,2009135,"utils/nn.py",3228,0,"",python,selection_command +1561,2009167,"utils/nn.py",3266,0,"",python,selection_command +1562,2009202,"utils/nn.py",3300,0,"",python,selection_command +1563,2009236,"utils/nn.py",3339,0,"",python,selection_command +1564,2013759,"utils/nn.py",3381,0,"",python,selection_command +1565,2014009,"utils/nn.py",3411,0,"",python,selection_command +1566,2014030,"utils/nn.py",3434,0,"",python,selection_command +1567,2014064,"utils/nn.py",3444,0,"",python,selection_command +1568,2014095,"utils/nn.py",3482,0,"",python,selection_command +1569,2014517,"utils/nn.py",3520,0,"",python,selection_command +1570,2014770,"utils/nn.py",3555,0,"",python,selection_command +1571,2014783,"utils/nn.py",3597,0,"",python,selection_command +1572,2014817,"utils/nn.py",3627,0,"",python,selection_command +1573,2014851,"utils/nn.py",3650,0,"",python,selection_command +1574,2014882,"utils/nn.py",3654,0,"",python,selection_command +1575,2014917,"utils/nn.py",3661,0,"",python,selection_command +1576,2014952,"utils/nn.py",3676,0,"",python,selection_command +1577,2014984,"utils/nn.py",3727,0,"",python,selection_command +1578,2015018,"utils/nn.py",3763,0,"",python,selection_command +1579,2015050,"utils/nn.py",3799,0,"",python,selection_command +1580,2015084,"utils/nn.py",3832,0,"",python,selection_command +1581,2015117,"utils/nn.py",3868,0,"",python,selection_command +1582,2015150,"utils/nn.py",3906,0,"",python,selection_command +1583,2015185,"utils/nn.py",3918,0,"",python,selection_command +1584,2015218,"utils/nn.py",3925,0,"",python,selection_command +1585,2015252,"utils/nn.py",3962,0,"",python,selection_command +1586,2015293,"utils/nn.py",3991,0,"",python,selection_command +1587,2015326,"utils/nn.py",4028,0,"",python,selection_command +1588,2015358,"utils/nn.py",4062,0,"",python,selection_command +1589,2015391,"utils/nn.py",4101,0,"",python,selection_command +1590,2015424,"utils/nn.py",4119,0,"",python,selection_command +1591,2016944,"utils/nn.py",4101,0,"",python,selection_command +1592,2017197,"utils/nn.py",4062,0,"",python,selection_command +1593,2017218,"utils/nn.py",4028,0,"",python,selection_command +1594,2017258,"utils/nn.py",3991,0,"",python,selection_command +1595,2017279,"utils/nn.py",3962,0,"",python,selection_command +1596,2017310,"utils/nn.py",3925,0,"",python,selection_command +1597,2017344,"utils/nn.py",3918,0,"",python,selection_command +1598,2017383,"utils/nn.py",3906,0,"",python,selection_command +1599,2017440,"utils/nn.py",3868,0,"",python,selection_command +1600,2024160,"utils/nn.py",3861,0,"\n ",python,content +1601,2024495,"utils/nn.py",3870,0,"i",python,content +1602,2024496,"utils/nn.py",3871,0,"",python,selection_keyboard +1603,2024524,"utils/nn.py",3871,0,"f",python,content +1604,2024524,"utils/nn.py",3872,0,"",python,selection_keyboard +1605,2024590,"utils/nn.py",3872,0," ",python,content +1606,2024590,"utils/nn.py",3873,0,"",python,selection_keyboard +1607,2027918,"utils/nn.py",3873,0,"x",python,content +1608,2027918,"utils/nn.py",3874,0,"",python,selection_keyboard +1609,2031864,"utils/nn.py",3874,0,".",python,content +1610,2031864,"utils/nn.py",3875,0,"",python,selection_keyboard +1611,2032042,"utils/nn.py",3875,0,"s",python,content +1612,2032043,"utils/nn.py",3876,0,"",python,selection_keyboard +1613,2032134,"utils/nn.py",3876,0,"h",python,content +1614,2032134,"utils/nn.py",3877,0,"",python,selection_keyboard +1615,2032199,"utils/nn.py",3877,0,"a",python,content +1616,2032199,"utils/nn.py",3878,0,"",python,selection_keyboard +1617,2032340,"utils/nn.py",3878,0,"p",python,content +1618,2032341,"utils/nn.py",3879,0,"",python,selection_keyboard +1619,2032419,"utils/nn.py",3879,0,"e",python,content +1620,2032420,"utils/nn.py",3880,0,"",python,selection_keyboard +1621,2032558,"utils/nn.py",3880,0," ",python,content +1622,2032559,"utils/nn.py",3881,0,"",python,selection_keyboard +1623,2032716,"utils/nn.py",3881,0,"=",python,content +1624,2032717,"utils/nn.py",3882,0,"",python,selection_keyboard +1625,2032833,"utils/nn.py",3882,0,"=",python,content +1626,2032833,"utils/nn.py",3883,0,"",python,selection_keyboard +1627,2034073,"utils/nn.py",3883,0," ",python,content +1628,2034073,"utils/nn.py",3884,0,"",python,selection_keyboard +1629,2034360,"utils/nn.py",3884,0,"(1, 1, 921, 512):",python,content +1630,2034611,"utils/nn.py",3900,0,"",python,selection_command +1631,2036428,"utils/nn.py",3901,0,"",python,selection_command +1632,2037304,"utils/nn.py",3901,0,"\n ",python,content +1633,2038207,"utils/nn.py",3914,0,"breakpoint()",python,content +1634,2038460,"utils/nn.py",3925,0,"",python,selection_command +1635,2038978,"utils/nn.py",3885,0,"",python,selection_command +1636,2039425,"utils/nn.py",3925,0,"",python,selection_command +1637,2039641,"utils/nn.py",3885,0,"",python,selection_command +1638,2040779,"utils/nn.py",3925,0,"",python,selection_command +1639,2040951,"utils/nn.py",3885,0,"",python,selection_command +1640,2042806,"TERMINAL",0,0,"q",,terminal_output +1641,2042994,"TERMINAL",0,0,"uit",,terminal_output +1642,2043273,"TERMINAL",0,0,"()",,terminal_output +1643,2043502,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 77, in __call__\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 229, in __call__\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 94, in trace_dispatch\r\n return self.dispatch_return(frame, arg)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 156, in dispatch_return\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +1644,2044330,"TERMINAL",0,0,"(Pdb) ^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=13995.7 task 0: running\r\n",,terminal_output +1645,2044556,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +1646,2044876,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +1647,2045244,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +1648,2045498,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1649,2048379,"utils/nn.py",3849,0,"",python,selection_command +1650,2057094,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +1651,2065257,"TERMINAL",0,0,"2025-07-27 09:53:05.049691: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1652,2066789,"TERMINAL",0,0,"2025-07-27 09:53:06.535645: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1653,2070332,"TERMINAL",0,0,"2025-07-27 09:53:10.105608: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1654,2070930,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/models/dynamics_causal.py(76)__call__()\r\n-> logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n",,terminal_output +1655,2081781,"TERMINAL",0,0,"c",,terminal_output +1656,2081887,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(128)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +1657,2101355,"TERMINAL",0,0,"x.",,terminal_output +1658,2101693,"TERMINAL",0,0,"s",,terminal_output +1659,2101798,"TERMINAL",0,0,"h",,terminal_output +1660,2101902,"TERMINAL",0,0,"a",,terminal_output +1661,2102016,"TERMINAL",0,0,"p",,terminal_output +1662,2102117,"TERMINAL",0,0,"e",,terminal_output +1663,2105758,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 512)\r\n",,terminal_output +1664,2111337,"TERMINAL",0,0,"z",,terminal_output +1665,2111488,"TERMINAL",0,0,".",,terminal_output +1666,2111562,"TERMINAL",0,0,"s",,terminal_output +1667,2111668,"TERMINAL",0,0,"h",,terminal_output +1668,2111747,"TERMINAL",0,0,"a",,terminal_output +1669,2111819,"TERMINAL",0,0,"p",,terminal_output +1670,2111940,"TERMINAL",0,0,"e",,terminal_output +1671,2112031,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 512)\r\n",,terminal_output +1672,2114316,"TERMINAL",0,0,"1",,terminal_output +1673,2115151,"TERMINAL",0,0," ",,terminal_output +1674,2117590,"utils/nn.py",3861,0,"\n # z.shape (1, 2, 61, 8, 64)",python,content +1675,2117595,"utils/nn.py",3870,0,"",python,selection_command +1676,2117737,"utils/nn.py",3872,0,"",python,selection_command +1677,2117888,"utils/nn.py",3873,0,"",python,selection_command +1678,2118135,"utils/nn.py",3874,0,"",python,selection_command +1679,2118238,"utils/nn.py",3880,0,"",python,selection_command +1680,2118573,"utils/nn.py",3881,0,"",python,selection_command +1681,2118717,"utils/nn.py",3882,0,"",python,selection_command +1682,2118914,"utils/nn.py",3884,0,"",python,selection_command +1683,2119349,"utils/nn.py",3884,1,"1",python,content +1684,2119624,"utils/nn.py",3885,0,"",python,selection_command +1685,2119791,"utils/nn.py",3887,0,"",python,selection_command +1686,2120601,"utils/nn.py",3887,1,"6",python,selection_command +1687,2120653,"utils/nn.py",3887,2,"61",python,selection_command +1688,2120793,"utils/nn.py",3887,3,"61,",python,selection_command +1689,2120994,"utils/nn.py",3887,5,"61, 8",python,selection_command +1690,2121160,"utils/nn.py",3887,6,"61, 8,",python,selection_command +1691,2121345,"utils/nn.py",3887,9,"61, 8, 64",python,selection_command +1692,2121968,"utils/nn.py",3887,9,"",python,content +1693,2122443,"utils/nn.py",3887,0,"9",python,content +1694,2122443,"utils/nn.py",3888,0,"",python,selection_keyboard +1695,2122525,"utils/nn.py",3888,0,"2",python,content +1696,2122526,"utils/nn.py",3889,0,"",python,selection_keyboard +1697,2122603,"utils/nn.py",3889,0,"1",python,content +1698,2122603,"utils/nn.py",3890,0,"",python,selection_keyboard +1699,2122699,"utils/nn.py",3890,0,",",python,content +1700,2122700,"utils/nn.py",3891,0,"",python,selection_keyboard +1701,2123462,"utils/nn.py",3891,0," 512",python,content +1702,2123656,"utils/nn.py",3894,0,"",python,selection_command +1703,2124428,"utils/nn.py",3862,0,"",python,selection_command +1704,2128364,"utils/nn.py",3870,0,"",python,selection_command +1705,2128666,"utils/nn.py",3834,0,"",python,selection_command +1706,2129058,"utils/nn.py",3826,36,"",python,content +1707,2129088,"utils/nn.py",3834,0,"",python,selection_command +1708,2141052,"utils/nn.py",3826,35,"",python,content +1709,2141081,"utils/nn.py",3834,0,"",python,selection_command +1710,2141187,"utils/nn.py",3874,0,"",python,selection_command +1711,2141343,"utils/nn.py",3899,0,"",python,selection_command +1712,2141569,"utils/nn.py",3874,0,"",python,selection_command +1713,2141883,"utils/nn.py",3890,0,"\n # z.shape (1, 1, 921, 512)",python,content +1714,2141886,"utils/nn.py",3899,0,"",python,selection_command +1715,2143107,"utils/nn.py",3934,0,"",python,selection_command +1716,2143237,"utils/nn.py",3936,0,"",python,selection_command +1717,2143407,"utils/nn.py",3938,0,"",python,selection_command +1718,2143579,"utils/nn.py",3942,0,"",python,selection_command +1719,2143707,"utils/nn.py",3943,0,"",python,selection_command +1720,2150365,"utils/nn.py",1818,0,"",python,selection_command +1721,2154608,"utils/nn.py",2864,0,"",python,selection_command +1722,2155966,"utils/nn.py",3729,0,"",python,selection_command +1723,2159749,"TERMINAL",0,0,"s",,terminal_output +1724,2160050,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(453)__call__()\r\n-> def __call__(\r\n",,terminal_output +1725,2161746,"TERMINAL",0,0,"n",,terminal_output +1726,2162038,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(498)__call__()\r\n-> if rngs is None:\r\n",,terminal_output +1727,2164644,"utils/nn.py",3678,0,"",python,selection_command +1728,2165596,"utils/nn.py",3729,0,"",python,selection_command +1729,2165742,"utils/nn.py",3765,0,"",python,selection_command +1730,2166091,"utils/nn.py",3801,0,"",python,selection_command +1731,2166415,"utils/nn.py",3834,0,"",python,selection_command +1732,2166669,"utils/nn.py",3874,0,"",python,selection_command +1733,2166692,"utils/nn.py",3899,0,"",python,selection_command +1734,2166724,"utils/nn.py",3934,0,"",python,selection_command +1735,2167111,"utils/nn.py",3972,0,"",python,selection_command +1736,2167364,"utils/nn.py",3982,0,"",python,selection_command +1737,2167393,"utils/nn.py",3991,0,"",python,selection_command +1738,2167423,"utils/nn.py",4028,0,"",python,selection_command +1739,2167457,"utils/nn.py",4057,0,"",python,selection_command +1740,2167491,"utils/nn.py",4094,0,"",python,selection_command +1741,2167525,"utils/nn.py",4128,0,"",python,selection_command +1742,2167598,"utils/nn.py",4094,0,"",python,selection_command +1743,2167857,"utils/nn.py",4057,0,"",python,selection_command +1744,2167880,"utils/nn.py",4028,0,"",python,selection_command +1745,2167918,"utils/nn.py",3991,0,"",python,selection_command +1746,2167951,"utils/nn.py",3982,0,"",python,selection_command +1747,2168078,"utils/nn.py",3972,0,"",python,selection_command +1748,2168208,"utils/nn.py",3934,0,"",python,selection_command +1749,2168670,"utils/nn.py",3936,0,"",python,selection_command +1750,2168801,"utils/nn.py",3938,0,"",python,selection_command +1751,2168952,"utils/nn.py",3942,0,"",python,selection_command +1752,2169118,"utils/nn.py",3943,0,"",python,selection_command +1753,2169246,"utils/nn.py",3960,0,"",python,selection_command +1754,2169544,"utils/nn.py",3943,0,"",python,selection_command +1755,2169790,"utils/nn.py",1818,0,"",python,selection_command +1756,2170365,"utils/nn.py",1836,0,"",python,selection_command +1757,2170532,"utils/nn.py",1838,0,"",python,selection_command +1758,2170691,"utils/nn.py",1841,0,"",python,selection_command +1759,2170853,"utils/nn.py",1842,0,"",python,selection_command +1760,2171258,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +1761,2171258,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +1762,2171810,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12332,0,"",python,selection_command +1763,2172173,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14092,0,"",python,selection_command +1764,2172445,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16150,0,"",python,selection_command +1765,2172726,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18670,0,"",python,selection_command +1766,2177431,"TERMINAL",0,0,"n",,terminal_output +1767,2177719,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(499)__call__()\r\n-> rngs = self.rngs\r\n",,terminal_output +1768,2178110,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(501)__call__()\r\n-> if inputs_k is None:\r\n",,terminal_output +1769,2178288,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(502)__call__()\r\n-> if inputs_v is not None:\r\n",,terminal_output +1770,2178674,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(508)__call__()\r\n-> inputs_k = inputs_q\r\n",,terminal_output +1771,2178919,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(509)__call__()\r\n-> if inputs_v is None:\r\n",,terminal_output +1772,2179402,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(510)__call__()\r\n-> inputs_v = inputs_k\r\n",,terminal_output +1773,2179770,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(512)__call__()\r\n-> if inputs_q.shape[-1] != self.in_features:\r\n",,terminal_output +1774,2180187,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(518)__call__()\r\n-> query = self.query(inputs_q)\r\n",,terminal_output +1775,2181434,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(519)__call__()\r\n-> key = self.key(inputs_k)\r\n",,terminal_output +1776,2182087,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(520)__call__()\r\n-> value = self.value(inputs_v)\r\n",,terminal_output +1777,2182557,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(522)__call__()\r\n-> if self.normalize_qk:\r\n",,terminal_output +1778,2182958,"TERMINAL",0,0,"q",,terminal_output +1779,2183021,"TERMINAL",0,0,"u",,terminal_output +1780,2183133,"TERMINAL",0,0,"e",,terminal_output +1781,2183233,"TERMINAL",0,0,"r",,terminal_output +1782,2183871,"TERMINAL",0,0,"y",,terminal_output +1783,2184088,"TERMINAL",0,0,"_",,terminal_output +1784,2184455,"TERMINAL",0,0," ",,terminal_output +1785,2184641,"TERMINAL",0,0,".",,terminal_output +1786,2184904,"TERMINAL",0,0,"s",,terminal_output +1787,2185003,"TERMINAL",0,0,"h",,terminal_output +1788,2185087,"TERMINAL",0,0,"a",,terminal_output +1789,2185163,"TERMINAL",0,0,"p",,terminal_output +1790,2185269,"TERMINAL",0,0,"e",,terminal_output +1791,2185400,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 8, 64)\r\n",,terminal_output +1792,2189993,"TERMINAL",0,0,"k",,terminal_output +1793,2190106,"TERMINAL",0,0,"e",,terminal_output +1794,2190211,"TERMINAL",0,0,"y",,terminal_output +1795,2190443,"TERMINAL",0,0,".",,terminal_output +1796,2190578,"TERMINAL",0,0,"s",,terminal_output +1797,2190807,"TERMINAL",0,0,"h",,terminal_output +1798,2191006,"TERMINAL",0,0,"a",,terminal_output +1799,2191139,"TERMINAL",0,0,"p",,terminal_output +1800,2191221,"TERMINAL",0,0,"e",,terminal_output +1801,2191426,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 8, 64)\r\n",,terminal_output +1802,2193458,"TERMINAL",0,0,"v",,terminal_output +1803,2193545,"TERMINAL",0,0,"a",,terminal_output +1804,2193704,"TERMINAL",0,0,"lu",,terminal_output +1805,2193773,"TERMINAL",0,0,"e",,terminal_output +1806,2193978,"TERMINAL",0,0,".",,terminal_output +1807,2194227,"TERMINAL",0,0,"s",,terminal_output +1808,2194325,"TERMINAL",0,0,"h",,terminal_output +1809,2194442,"TERMINAL",0,0,"a",,terminal_output +1810,2194513,"TERMINAL",0,0,"p",,terminal_output +1811,2194627,"TERMINAL",0,0,"e",,terminal_output +1812,2194796,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 8, 64)\r\n",,terminal_output +1813,2202409,"TERMINAL",0,0,"n",,terminal_output +1814,2202599,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(531)__call__()\r\n-> decode = first_from(\r\n",,terminal_output +1815,2203224,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(532)__call__()\r\n-> decode,\r\n",,terminal_output +1816,2203593,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(533)__call__()\r\n-> self.decode,\r\n",,terminal_output +1817,2203806,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(534)__call__()\r\n-> error_msg=""""""No `decode` argument was provided to MultiHeadAttention\r\n",,terminal_output +1818,2204090,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(531)__call__()\r\n-> decode = first_from(\r\n",,terminal_output +1819,2204324,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(538)__call__()\r\n-> if decode:\r\n",,terminal_output +1820,2204512,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(583)__call__()\r\n-> self.dropout_rate > 0.0\r\n",,terminal_output +1821,2204741,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(601)__call__()\r\n-> deterministic = True\r\n",,terminal_output +1822,2204991,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(602)__call__()\r\n-> dropout_rng = None\r\n",,terminal_output +1823,2205209,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(605)__call__()\r\n-> x = self.attention_fn(\r\n",,terminal_output +1824,2206457,"TERMINAL",0,0,"s",,terminal_output +1825,2206526,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(606)__call__()\r\n-> query,\r\n",,terminal_output +1826,2209461,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(607)__call__()\r\n-> key,\r\n",,terminal_output +1827,2209947,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(608)__call__()\r\n-> value,\r\n",,terminal_output +1828,2210216,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(609)__call__()\r\n-> mask=mask,\r\n",,terminal_output +1829,2210731,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(610)__call__()\r\n-> dropout_rng=dropout_rng,\r\n",,terminal_output +1830,2211136,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(611)__call__()\r\n-> dropout_rate=self.dropout_rate,\r\n",,terminal_output +1831,2211564,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(612)__call__()\r\n-> broadcast_dropout=self.broadcast_dropout,\r\n",,terminal_output +1832,2212036,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(613)__call__()\r\n-> deterministic=deterministic,\r\n",,terminal_output +1833,2212499,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(614)__call__()\r\n-> dtype=self.dtype,\r\n",,terminal_output +1834,2212962,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(615)__call__()\r\n-> precision=self.precision,\r\n",,terminal_output +1835,2213480,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(616)__call__()\r\n-> module=self if sow_weights else None,\r\n",,terminal_output +1836,2214217,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(605)__call__()\r\n-> x = self.attention_fn(\r\n",,terminal_output +1837,2215632,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(293)attention_fn()\r\n-> def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\r\n",,terminal_output +1838,2218897,"utils/nn.py",0,0,"",python,tab +1839,2220462,"utils/nn.py",10974,0,"",python,selection_command +1840,2221129,"utils/nn.py",10950,0,"",python,selection_command +1841,2221478,"utils/nn.py",10459,0,"",python,selection_command +1842,2221928,"utils/nn.py",9514,0,"",python,selection_command +1843,2222422,"utils/nn.py",9477,0,"",python,selection_command +1844,2222676,"utils/nn.py",9430,0,"",python,selection_command +1845,2222701,"utils/nn.py",9348,0,"",python,selection_command +1846,2222737,"utils/nn.py",9331,0,"",python,selection_command +1847,2222766,"utils/nn.py",9276,0,"",python,selection_command +1848,2222802,"utils/nn.py",9255,0,"",python,selection_command +1849,2222835,"utils/nn.py",9238,0,"",python,selection_command +1850,2222869,"utils/nn.py",9187,0,"",python,selection_command +1851,2222902,"utils/nn.py",9160,0,"",python,selection_command +1852,2223178,"utils/nn.py",9143,0,"",python,selection_command +1853,2223326,"utils/nn.py",9093,0,"",python,selection_command +1854,2223478,"utils/nn.py",9055,0,"",python,selection_command +1855,2225859,"TERMINAL",0,0,"q",,terminal_output +1856,2226123,"TERMINAL",0,0,"u",,terminal_output +1857,2226166,"TERMINAL",0,0,"e",,terminal_output +1858,2226234,"TERMINAL",0,0,"r",,terminal_output +1859,2226322,"TERMINAL",0,0,".",,terminal_output +1860,2226595,"TERMINAL",0,0,"s",,terminal_output +1861,2227059,"TERMINAL",0,0," ",,terminal_output +1862,2227516,"TERMINAL",0,0," ",,terminal_output +1863,2227689,"TERMINAL",0,0,"y",,terminal_output +1864,2228516,"TERMINAL",0,0,"     ",,terminal_output +1865,2228820,"TERMINAL",0,0,"n",,terminal_output +1866,2229007,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(295)attention_fn()\r\n-> implementation = ""cudnn"" if use_flash_attention else None\r\n",,terminal_output +1867,2229724,"TERMINAL",0,0,"u",,terminal_output +1868,2230043,"TERMINAL",0,0," ",,terminal_output +1869,2230232,"TERMINAL",0,0,"q",,terminal_output +1870,2230325,"TERMINAL",0,0,"u",,terminal_output +1871,2230426,"TERMINAL",0,0,"e",,terminal_output +1872,2230524,"TERMINAL",0,0,"ry",,terminal_output +1873,2230959,"TERMINAL",0,0,".",,terminal_output +1874,2231044,"TERMINAL",0,0,".",,terminal_output +1875,2231326,"TERMINAL",0,0,"sh",,terminal_output +1876,2231547,"TERMINAL",0,0," ",,terminal_output +1877,2231662,"TERMINAL",0,0," ",,terminal_output +1878,2231791,"TERMINAL",0,0," ",,terminal_output +1879,2231898,"TERMINAL",0,0,"s",,terminal_output +1880,2231957,"TERMINAL",0,0,"h",,terminal_output +1881,2232044,"TERMINAL",0,0,"a",,terminal_output +1882,2232132,"TERMINAL",0,0,"p",,terminal_output +1883,2232225,"TERMINAL",0,0,"e",,terminal_output +1884,2232328,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 8, 64)\r\n",,terminal_output +1885,2232741,"TERMINAL",0,0,"k",,terminal_output +1886,2232829,"TERMINAL",0,0,"e",,terminal_output +1887,2232924,"TERMINAL",0,0,"y",,terminal_output +1888,2233098,"TERMINAL",0,0,".",,terminal_output +1889,2233337,"TERMINAL",0,0,"sh",,terminal_output +1890,2233409,"TERMINAL",0,0,"a",,terminal_output +1891,2233494,"TERMINAL",0,0,"p",,terminal_output +1892,2233610,"TERMINAL",0,0,"e",,terminal_output +1893,2233727,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 8, 64)\r\n",,terminal_output +1894,2234479,"TERMINAL",0,0,"v",,terminal_output +1895,2234563,"TERMINAL",0,0,"a",,terminal_output +1896,2234623,"TERMINAL",0,0,"l",,terminal_output +1897,2234692,"TERMINAL",0,0,"u",,terminal_output +1898,2234775,"TERMINAL",0,0,"e",,terminal_output +1899,2234897,"TERMINAL",0,0,".",,terminal_output +1900,2235045,"TERMINAL",0,0,"s",,terminal_output +1901,2235098,"TERMINAL",0,0,"h",,terminal_output +1902,2235236,"TERMINAL",0,0,"a",,terminal_output +1903,2235340,"TERMINAL",0,0,"p",,terminal_output +1904,2235440,"TERMINAL",0,0,"e",,terminal_output +1905,2235561,"TERMINAL",0,0,"\r\n(Pdb) (1, 1, 921, 8, 64)\r\n",,terminal_output +1906,2236593,"utils/nn.py",9059,0,"",python,selection_command +1907,2236735,"utils/nn.py",9060,0,"",python,selection_command +1908,2236872,"utils/nn.py",9061,0,"",python,selection_command +1909,2237092,"utils/nn.py",9063,0,"",python,selection_command +1910,2238045,"utils/nn.py",9063,13,"",python,content +1911,2238921,"utils/nn.py",9063,0,"1",python,content +1912,2238921,"utils/nn.py",9064,0,"",python,selection_keyboard +1913,2239047,"utils/nn.py",9064,0,",",python,content +1914,2239047,"utils/nn.py",9065,0,"",python,selection_keyboard +1915,2239195,"utils/nn.py",9065,0," ",python,content +1916,2239196,"utils/nn.py",9066,0,"",python,selection_keyboard +1917,2239580,"utils/nn.py",9066,0,"9",python,content +1918,2239580,"utils/nn.py",9067,0,"",python,selection_keyboard +1919,2239748,"utils/nn.py",9067,0,"1",python,content +1920,2239748,"utils/nn.py",9068,0,"",python,selection_keyboard +1921,2240077,"utils/nn.py",9067,1,"",python,content +1922,2240155,"utils/nn.py",9067,0,"2",python,content +1923,2240156,"utils/nn.py",9068,0,"",python,selection_keyboard +1924,2240225,"utils/nn.py",9068,0,"1",python,content +1925,2240225,"utils/nn.py",9069,0,"",python,selection_keyboard +1926,2240271,"utils/nn.py",9069,0,",",python,content +1927,2240271,"utils/nn.py",9070,0,"",python,selection_keyboard +1928,2240454,"utils/nn.py",9070,0," ",python,content +1929,2240454,"utils/nn.py",9071,0,"",python,selection_keyboard +1930,2241547,"utils/nn.py",9071,0,"8",python,content +1931,2241548,"utils/nn.py",9072,0,"",python,selection_keyboard +1932,2241774,"utils/nn.py",9072,0,",",python,content +1933,2241774,"utils/nn.py",9073,0,"",python,selection_keyboard +1934,2241891,"utils/nn.py",9073,0," ",python,content +1935,2241891,"utils/nn.py",9074,0,"",python,selection_keyboard +1936,2242723,"utils/nn.py",9074,0,"6",python,content +1937,2242723,"utils/nn.py",9075,0,"",python,selection_keyboard +1938,2242809,"utils/nn.py",9075,0,"4",python,content +1939,2242809,"utils/nn.py",9076,0,"",python,selection_keyboard +1940,2243243,"utils/nn.py",9076,0,")",python,content +1941,2243243,"utils/nn.py",9077,0,"",python,selection_keyboard +1942,2243453,"utils/nn.py",9076,0,"",python,selection_command +1943,2286433,"TERMINAL",0,0,"n",,terminal_output +1944,2286800,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(297)attention_fn()\r\n-> def _rearrange(x):\r\n",,terminal_output +1945,2287108,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(300)attention_fn()\r\n-> def _pad(x):\r\n",,terminal_output +1946,2287475,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(303)attention_fn()\r\n-> def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\r\n",,terminal_output +1947,2287641,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(310)attention_fn()\r\n-> original_shape = query.shape\r\n",,terminal_output +1948,2287790,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(311)attention_fn()\r\n-> original_seq_len = query.shape[-3]\r\n",,terminal_output +1949,2287978,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(314)attention_fn()\r\n-> target_seq_len = ((original_seq_len + 3) // 4) * 4\r\n",,terminal_output +1950,2288092,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(315)attention_fn()\r\n-> pad_size = target_seq_len - original_seq_len\r\n",,terminal_output +1951,2288241,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(317)attention_fn()\r\n-> query_4d = _pad(_rearrange(query))\r\n",,terminal_output +1952,2288543,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(318)attention_fn()\r\n-> key_4d = _pad(_rearrange(key))\r\n",,terminal_output +1953,2288709,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(319)attention_fn()\r\n-> value_4d = _pad(_rearrange(value))\r\n",,terminal_output +1954,2289007,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(321)attention_fn()\r\n-> attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\r\n",,terminal_output +1955,2289161,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(322)attention_fn()\r\n-> attention_mask = attention_mask.at[original_seq_len:, :].set(False)\r\n",,terminal_output +1956,2289298,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(323)attention_fn()\r\n-> attention_mask = attention_mask.at[:, original_seq_len:].set(False)\r\n",,terminal_output +1957,2289793,"TERMINAL",0,0,"\r\n",,terminal_output +1958,2289892,"TERMINAL",0,0,"(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(326)attention_fn()\r\n-> _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\r\n",,terminal_output +1959,2289958,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(325)attention_fn()\r\n-> mask_4d = (\r\n",,terminal_output +1960,2290515,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(328)attention_fn()\r\n-> mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\r\n",,terminal_output +1961,2290718,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(330)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +1962,2290879,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> output_4d = jax.nn.dot_product_attention(\r\n",,terminal_output +1963,2291041,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(334)attention_fn()\r\n-> query=query_4d,\r\n",,terminal_output +1964,2291199,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(335)attention_fn()\r\n-> key=key_4d,\r\n",,terminal_output +1965,2291636,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(336)attention_fn()\r\n-> value=value_4d,\r\n",,terminal_output +1966,2291885,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(337)attention_fn()\r\n-> bias=bias_4d,\r\n",,terminal_output +1967,2292133,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(338)attention_fn()\r\n-> mask=mask_4d,\r\n",,terminal_output +1968,2292494,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(339)attention_fn()\r\n-> implementation=implementation,\r\n",,terminal_output +1969,2292735,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(340)attention_fn()\r\n-> is_causal=is_causal,\r\n",,terminal_output +1970,2293171,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> output_4d = jax.nn.dot_product_attention(\r\n",,terminal_output +1971,2293381,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(342)attention_fn()\r\n-> return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\r\n",,terminal_output +1972,2293565,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(342)attention_fn()->Traced\r\n-> return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\r\n",,terminal_output +1973,2294466,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(619)__call__()\r\n-> out = self.out(x)\r\n",,terminal_output +1974,2297350,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(620)__call__()\r\n-> return out\r\n",,terminal_output +1975,2297933,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(620)__call__()->Traced\r\n-> return out\r\n",,terminal_output +1976,2298319,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(129)__call__()\r\n-> x = x + z\r\n",,terminal_output +1977,2301695,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +1978,2301696,"/fast/home/franz.srambical/jafar/utils/nn.py",3964,0,"",python,selection_command +1979,2305367,"utils/nn.py",0,0,"",python,tab +1980,2309556,"utils/nn.py",3878,0,"",python,selection_command +1981,2316858,"utils/nn.py",3903,0,"",python,selection_command +1982,2317007,"utils/nn.py",3938,0,"",python,selection_command +1983,2317158,"utils/nn.py",3976,0,"",python,selection_command +1984,2317373,"utils/nn.py",3938,0,"",python,selection_command +1985,2317574,"utils/nn.py",3903,0,"",python,selection_command +1986,2317719,"utils/nn.py",3878,0,"",python,selection_command +1987,2317854,"utils/nn.py",3838,0,"",python,selection_command +1988,2318153,"utils/nn.py",3878,0,"",python,selection_command +1989,2319128,"utils/nn.py",3866,24," breakpoint()",python,selection_command +1990,2319305,"utils/nn.py",3826,64," if x.shape == (1, 1, 921, 512):\n breakpoint()",python,selection_command +1991,2321668,"utils/nn.py",3826,65,"",python,content +1992,2321685,"utils/nn.py",3834,0,"",python,selection_command +1993,2321873,"utils/nn.py",3869,0,"",python,selection_command +1994,2322121,"utils/nn.py",3907,0,"",python,selection_command +1995,2322150,"utils/nn.py",3917,0,"",python,selection_command +1996,2322183,"utils/nn.py",3926,0,"",python,selection_command +1997,2322217,"utils/nn.py",3963,0,"",python,selection_command +1998,2322249,"utils/nn.py",3992,0,"",python,selection_command +1999,2322283,"utils/nn.py",4029,0,"",python,selection_command +2000,2322603,"utils/nn.py",4063,0,"",python,selection_command +2001,2322940,"utils/nn.py",4029,0,"",python,selection_command +2002,2324742,"utils/nn.py",4054,0,"\n if x.shape == (1, 1, 921, 512):\n breakpoint()",python,content +2003,2324745,"utils/nn.py",4063,0,"",python,selection_command +2004,2326334,"TERMINAL",0,0,"q",,terminal_output +2005,2326502,"TERMINAL",0,0,"uit",,terminal_output +2006,2326769,"TERMINAL",0,0,"()",,terminal_output +2007,2327003,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 76, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 232, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 129, in __call__\r\n # --- Temporal attention ---\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 129, in __call__\r\n # --- Temporal attention ---\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +2008,2327912,"TERMINAL",0,0,"(Pdb) ",,terminal_output +2009,2328035,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2010,2328938,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2011,2329037,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +2012,2329540,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2013,2341296,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2014,2349566,"TERMINAL",0,0,"2025-07-27 09:57:49.367350: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2015,2351122,"TERMINAL",0,0,"2025-07-27 09:57:50.924455: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2016,2354880,"TERMINAL",0,0,"2025-07-27 09:57:54.683002: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2017,2355628,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/models/dynamics_causal.py(76)__call__()\r\n-> logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n",,terminal_output +2018,2371522,"utils/nn.py",4066,0,"",python,selection_command +2019,2371764,"utils/nn.py",4067,0,"",python,selection_command +2020,2371796,"utils/nn.py",4068,0,"",python,selection_command +2021,2371822,"utils/nn.py",4074,0,"",python,selection_command +2022,2371854,"utils/nn.py",4077,0,"",python,selection_command +2023,2372033,"utils/nn.py",4078,0,"",python,selection_command +2024,2372206,"utils/nn.py",4079,0,"",python,selection_command +2025,2372527,"utils/nn.py",4081,0,"",python,selection_command +2026,2373241,"utils/nn.py",4081,1,"",python,content +2027,2374101,"utils/nn.py",4081,0,"9",python,content +2028,2374102,"utils/nn.py",4082,0,"",python,selection_keyboard +2029,2374136,"utils/nn.py",4082,0,"2",python,content +2030,2374136,"utils/nn.py",4083,0,"",python,selection_keyboard +2031,2374197,"utils/nn.py",4083,0,"1",python,content +2032,2374198,"utils/nn.py",4084,0,"",python,selection_keyboard +2033,2374585,"utils/nn.py",4083,0,"",python,selection_command +2034,2374921,"utils/nn.py",4084,0,"",python,selection_command +2035,2375100,"utils/nn.py",4086,0,"",python,selection_command +2036,2375355,"utils/nn.py",4086,3,"",python,content +2037,2375585,"utils/nn.py",4086,0,"1",python,content +2038,2375585,"utils/nn.py",4087,0,"",python,selection_keyboard +2039,2375767,"utils/nn.py",4086,0,"",python,selection_command +2040,2378294,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=13995.9 task 0: running\r\n",,terminal_output +2041,2378434,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13995.9\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T09:58:18.240] error: *** STEP 13995.9 ON hai003 CANCELLED AT 2025-07-27T09:58:18 DUE to SIGNAL Killed ***\r\n",,terminal_output +2042,2378581,"TERMINAL",0,0,"(Pdb) ",,terminal_output +2043,2378823,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2044,2379152,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2045,2379217,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +2046,2379455,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2047,2390775,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2048,2418628,"TERMINAL",0,0,"2025-07-27 09:58:58.431014: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2049,2420281,"TERMINAL",0,0,"2025-07-27 09:58:59.929754: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2050,2423762,"TERMINAL",0,0,"2025-07-27 09:59:03.521912: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2051,2423967,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom jasmine import Jasmine\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_co_train: bool = False\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n dynamics_type: str = ""maskgit""\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Load Dynamics model checkpoint ---\n rngs = nnx.Rngs(rng)\n jasmine = Jasmine(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=args.lam_co_train,\n # Dynamics\n dynamics_type=args.dynamics_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=True,\n rngs=rngs,\n )\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(jasmine, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Jasmine, batch: dict) -> jax.Array:\n """"""Runs Jasmine.sample with pre-defined generation hyper-parameters.""""""\n if args.dynamics_type == ""maskgit"":\n return model.sample_maskgit(\n batch,\n args.seq_len,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n )\n else:\n return model.sample_causal(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n )\n\n # --- Define autoregressive sampling loop ---\n # @nnx.jit\n def _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = _sampling_fn(jasmine, batch)\n return generated_vid\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n video_batch = next(dataloader)\n video_batch = video_batch.astype(args.dtype) / 255.0\n # Get latent actions for all videos in the batch\n batch = dict(videos=video_batch)\n action_batch = jasmine.vq_encode(batch, training=False)\n action_batch = jnp.asarray(action_batch).reshape(\n video_batch.shape[0], args.seq_len - 1, 1\n )\n\n # --- Sample + evaluate video ---\n # The autoregressive cache needs to be initialized with the shape of the tokenized inputs, not the raw video.\n # The number of spatial tokens is derived from the image dimensions and patch size.\n # It appears the 90x160 image is padded to 92x160, and a CLS token is added.\n # (92 // args.patch_size) * (160 // args.patch_size) + 1 = 23 * 40 + 1 = 921\n num_patches = ((args.image_height + 3) // 4 * 4 // args.patch_size) * (\n args.image_width // args.patch_size\n ) + 1\n # Shape for spatial attention: (batch, time, patches, num_heads, head_dim)\n spatial_token_shape = (\n args.batch_size,\n 1,\n num_patches,\n args.dyna_dim,\n )\n # Shape for temporal attention: (batch, patches, time, num_heads, head_dim)\n temporal_token_shape = (\n args.batch_size,\n num_patches,\n 1,\n args.dyna_dim,\n )\n if args.dynamics_type == ""causal"":\n transformer_blocks = jasmine.dynamics.transformer.blocks\n for block in transformer_blocks:\n block.spatial_attention.init_cache(spatial_token_shape, dtype=args.dtype)\n block.temporal_attention.init_cache(temporal_token_shape, dtype=args.dtype)\n vid = _autoreg_sample(rng, video_batch, action_batch)\n gt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\n recon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (video_batch * 255).astype(np.uint8)\n pred_videos = (vid * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +2052,2424449,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/models/dynamics_causal.py(76)__call__()\r\n-> logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n",,terminal_output +2053,2425488,"sample.py",6001,0,"",python,selection_command +2054,2425742,"sample.py",6055,0,"",python,selection_command +2055,2425772,"sample.py",6094,0,"",python,selection_command +2056,2425804,"sample.py",6096,0,"",python,selection_command +2057,2425832,"sample.py",6112,0,"",python,selection_command +2058,2425865,"sample.py",6150,0,"",python,selection_command +2059,2425895,"sample.py",6264,0,"",python,selection_command +2060,2433407,"utils/nn.py",0,0,"",python,tab +2061,2434439,"utils/nn.py",4052,0,"",python,selection_command +2062,2434695,"utils/nn.py",4015,0,"",python,selection_command +2063,2434710,"utils/nn.py",3982,0,"",python,selection_command +2064,2434746,"utils/nn.py",3949,0,"",python,selection_command +2065,2434775,"utils/nn.py",3917,0,"",python,selection_command +2066,2434809,"utils/nn.py",3915,0,"",python,selection_command +2067,2434913,"utils/nn.py",3892,0,"",python,selection_command +2068,2435404,"utils/nn.py",3861,37," z = self.spatial_attention(z)",python,selection_command +2069,2495684,"utils/nn.py",3892,0,"",python,selection_command +2070,2496199,"utils/nn.py",1818,0,"",python,selection_command +2071,2496439,"utils/nn.py",1836,0,"",python,selection_command +2072,2496606,"utils/nn.py",1838,0,"",python,selection_command +2073,2496752,"utils/nn.py",1841,0,"",python,selection_command +2074,2496828,"utils/nn.py",1842,0,"",python,selection_command +2075,2497379,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +2076,2497379,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +2077,2498042,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,33,"class MultiHeadAttention(Module):",python,selection_command +2078,2498117,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,61,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n",python,selection_command +2079,2498383,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,80,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n",python,selection_command +2080,2498405,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,129,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n",python,selection_command +2081,2498436,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,539,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n",python,selection_command +2082,2498469,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,878,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n",python,selection_command +2083,2498524,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,2871,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n",python,selection_command +2084,2498538,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,3038,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n",python,selection_command +2085,2498573,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,5094,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n",python,selection_command +2086,2498608,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,5296,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n",python,selection_command +2087,2498651,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,5353,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n",python,selection_command +2088,2498938,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,6058,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n",python,selection_command +2089,2499068,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,6711,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n",python,selection_command +2090,2499437,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,7264,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n",python,selection_command +2091,2499588,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,7424,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n",python,selection_command +2092,2499885,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,7791,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n",python,selection_command +2093,2500016,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,7944,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n",python,selection_command +2094,2500338,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,8119,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n",python,selection_command +2095,2500451,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,9411,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n",python,selection_command +2096,2500643,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,9537,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n",python,selection_command +2097,2500952,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,9918,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n",python,selection_command +2098,2501069,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,10115,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n",python,selection_command +2099,2501354,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,10211,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n",python,selection_command +2100,2501357,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,10523,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n",python,selection_command +2101,2501481,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,10861,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n",python,selection_command +2102,2501925,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,12487,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n",python,selection_command +2103,2502068,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13203,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n",python,selection_command +2104,2502673,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13647,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n",python,selection_command +2105,2502777,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13945,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n",python,selection_command +2106,2503772,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13647,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n",python,selection_command +2107,2504027,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13945,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n",python,selection_command +2108,2504196,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13966,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n",python,selection_command +2109,2504319,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,14839,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n",python,selection_command +2110,2504831,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,14873,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n\n\n# mask-making utility functions\n",python,selection_command +2111,2508485,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,15101,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n\n\n# mask-making utility functions\n\n\ndef make_attention_mask(\n query_input: Array,\n key_input: Array,\n pairwise_fn: Callable[..., Any] = jnp.multiply,\n extra_batch_dims: int = 0,\n dtype: Dtype = jnp.float32,\n):\n """"""Mask-making helper for attention weights.\n",python,selection_command +2112,2509233,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,14874,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n\n\n# mask-making utility functions\n\n",python,selection_command +2113,2509466,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,14840,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n\n",python,selection_command +2114,2517333,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24298,0,"",python,selection_command +2115,2517917,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24332,0,"",python,selection_command +2116,2518335,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24559,0,"",python,selection_command +2117,2518697,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24331,0,"",python,selection_command +2118,2519056,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24297,0,"",python,selection_command +2119,2519247,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23424,0,"",python,selection_command +2120,2519451,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23403,0,"",python,selection_command +2121,2519627,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23105,0,"",python,selection_command +2122,2519783,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22661,0,"",python,selection_command +2123,2519919,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21945,0,"",python,selection_command +2124,2520083,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20319,0,"",python,selection_command +2125,2520231,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19981,0,"",python,selection_command +2126,2520514,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19669,0,"",python,selection_command +2127,2520801,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19573,0,"",python,selection_command +2128,2520968,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19376,0,"",python,selection_command +2129,2521222,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18995,0,"",python,selection_command +2130,2521249,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18869,0,"",python,selection_command +2131,2521278,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17577,0,"",python,selection_command +2132,2521318,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17402,0,"",python,selection_command +2133,2521552,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17249,0,"",python,selection_command +2134,2521804,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16882,0,"",python,selection_command +2135,2521837,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16722,0,"",python,selection_command +2136,2521869,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16169,0,"",python,selection_command +2137,2521902,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15516,0,"",python,selection_command +2138,2522542,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16169,0,"",python,selection_command +2139,2524407,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15516,0,"",python,selection_command +2140,2524545,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14811,0,"",python,selection_command +2141,2524704,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14754,0,"",python,selection_command +2142,2525011,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14552,0,"",python,selection_command +2143,2525367,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12496,0,"",python,selection_command +2144,2525652,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12329,0,"",python,selection_command +2145,2526162,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",10336,0,"",python,selection_command +2146,2526642,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9997,0,"",python,selection_command +2147,2527023,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9587,0,"",python,selection_command +2148,2527398,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9538,0,"",python,selection_command +2149,2527691,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9519,0,"",python,selection_command +2150,2527967,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +2151,2528286,"utils/nn.py",0,0,"",python,tab +2152,2528286,"utils/nn.py",1842,0,"",python,selection_command +2153,2552745,"utils/nn.py",3674,0,"",python,selection_command +2154,2592979,"utils/nn.py",3725,0,"",python,selection_command +2155,2593217,"utils/nn.py",3761,0,"",python,selection_command +2156,2593244,"utils/nn.py",3797,0,"",python,selection_command +2157,2593276,"utils/nn.py",3830,0,"",python,selection_command +2158,2593312,"utils/nn.py",3865,0,"",python,selection_command +2159,2593342,"utils/nn.py",3903,0,"",python,selection_command +2160,2593375,"utils/nn.py",3917,0,"",python,selection_command +2161,2593407,"utils/nn.py",3922,0,"",python,selection_command +2162,2593439,"utils/nn.py",3959,0,"",python,selection_command +2163,2593476,"utils/nn.py",3988,0,"",python,selection_command +2164,2595148,"TERMINAL",0,0,"c",,terminal_output +2165,2595268,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(135)__call__()\r\n-> z = self.temporal_attention(z)\r\n",,terminal_output +2166,2597382,"TERMINAL",0,0,"z",,terminal_output +2167,2597495,"TERMINAL",0,0,".",,terminal_output +2168,2597613,"TERMINAL",0,0,"s",,terminal_output +2169,2597682,"TERMINAL",0,0,"h",,terminal_output +2170,2597797,"TERMINAL",0,0,"a",,terminal_output +2171,2597883,"TERMINAL",0,0,"p",,terminal_output +2172,2597985,"TERMINAL",0,0,"e",,terminal_output +2173,2598064,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 512)\r\n",,terminal_output +2174,2600085,"utils/nn.py",4025,0,"",python,selection_command +2175,2600254,"utils/nn.py",4059,0,"",python,selection_command +2176,2602126,"utils/nn.py",4054,0,"\n ",python,content +2177,2602168,"utils/nn.py",4063,0,"#",python,content +2178,2602169,"utils/nn.py",4064,0,"",python,selection_keyboard +2179,2602235,"utils/nn.py",4064,0," ",python,content +2180,2602236,"utils/nn.py",4065,0,"",python,selection_keyboard +2181,2602792,"utils/nn.py",4065,0,"z",python,content +2182,2602792,"utils/nn.py",4066,0,"",python,selection_keyboard +2183,2603027,"utils/nn.py",4066,0,".",python,content +2184,2603027,"utils/nn.py",4067,0,"",python,selection_keyboard +2185,2603088,"utils/nn.py",4067,0,"s",python,content +2186,2603088,"utils/nn.py",4068,0,"",python,selection_keyboard +2187,2603240,"utils/nn.py",4068,0,"h",python,content +2188,2603240,"utils/nn.py",4069,0,"",python,selection_keyboard +2189,2603306,"utils/nn.py",4069,0,"a",python,content +2190,2603307,"utils/nn.py",4070,0,"",python,selection_keyboard +2191,2603422,"utils/nn.py",4070,0,"p",python,content +2192,2603422,"utils/nn.py",4071,0,"",python,selection_keyboard +2193,2603470,"utils/nn.py",4071,0,"e",python,content +2194,2603471,"utils/nn.py",4072,0,"",python,selection_keyboard +2195,2604414,"utils/nn.py",4072,0," (1, 921, 1, 512)",python,content +2196,2604754,"utils/nn.py",4088,0,"",python,selection_command +2197,2606278,"utils/nn.py",4055,35,"",python,content +2198,2606294,"utils/nn.py",4063,0,"",python,selection_command +2199,2606439,"utils/nn.py",4103,0,"",python,selection_command +2200,2606587,"utils/nn.py",4128,0,"",python,selection_command +2201,2606861,"utils/nn.py",4103,0,"",python,selection_command +2202,2606993,"utils/nn.py",4119,0,"\n # z.shape (1, 921, 1, 512)",python,content +2203,2606996,"utils/nn.py",4128,0,"",python,selection_command +2204,2611559,"utils/nn.py",4163,0,"",python,selection_command +2205,2611731,"utils/nn.py",4165,0,"",python,selection_command +2206,2611873,"utils/nn.py",4167,0,"",python,selection_command +2207,2612012,"utils/nn.py",4171,0,"",python,selection_command +2208,2612109,"utils/nn.py",4172,0,"",python,selection_command +2209,2616596,"utils/nn.py",2589,0,"",python,selection_command +2210,2616996,"utils/nn.py",2608,0,"",python,selection_command +2211,2617153,"utils/nn.py",2610,0,"",python,selection_command +2212,2617317,"utils/nn.py",2613,0,"",python,selection_command +2213,2617476,"utils/nn.py",2614,0,"",python,selection_command +2214,2617843,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +2215,2617843,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +2216,2619297,"TERMINAL",0,0,"n",,terminal_output +2217,2619658,"TERMINAL",0,0," ",,terminal_output +2218,2619748,"TERMINAL",0,0,"s",,terminal_output +2219,2620035,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(453)__call__()\r\n-> def __call__(\r\n",,terminal_output +2220,2620407,"TERMINAL",0,0,"n",,terminal_output +2221,2621141,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(498)__call__()\r\n-> if rngs is None:\r\n",,terminal_output +2222,2621766,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(499)__call__()\r\n-> rngs = self.rngs\r\n",,terminal_output +2223,2621918,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(501)__call__()\r\n-> if inputs_k is None:\r\n",,terminal_output +2224,2622012,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(502)__call__()\r\n-> if inputs_v is not None:\r\n",,terminal_output +2225,2622432,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(508)__call__()\r\n-> inputs_k = inputs_q\r\n",,terminal_output +2226,2622719,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(509)__call__()\r\n-> if inputs_v is None:\r\n\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(510)__call__()\r\n-> inputs_v = inputs_k\r\n",,terminal_output +2227,2622936,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(512)__call__()\r\n-> if inputs_q.shape[-1] != self.in_features:\r\n",,terminal_output +2228,2623182,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(518)__call__()\r\n-> query = self.query(inputs_q)\r\n",,terminal_output +2229,2623740,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(519)__call__()\r\n-> key = self.key(inputs_k)\r\n",,terminal_output +2230,2623900,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(520)__call__()\r\n-> value = self.value(inputs_v)\r\n",,terminal_output +2231,2624406,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(522)__call__()\r\n-> if self.normalize_qk:\r\n",,terminal_output +2232,2625387,"TERMINAL",0,0,"q",,terminal_output +2233,2625534,"TERMINAL",0,0,"u",,terminal_output +2234,2625663,"TERMINAL",0,0,"er",,terminal_output +2235,2625903,"TERMINAL",0,0,"y",,terminal_output +2236,2626154,"TERMINAL",0,0,".",,terminal_output +2237,2626250,"TERMINAL",0,0,"s",,terminal_output +2238,2626401,"TERMINAL",0,0,"ha",,terminal_output +2239,2626528,"TERMINAL",0,0,"p",,terminal_output +2240,2626598,"TERMINAL",0,0,"e",,terminal_output +2241,2626737,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2242,2627287,"TERMINAL",0,0,"k",,terminal_output +2243,2627451,"TERMINAL",0,0,"e",,terminal_output +2244,2627517,"TERMINAL",0,0,"y",,terminal_output +2245,2627790,"TERMINAL",0,0,".",,terminal_output +2246,2627905,"TERMINAL",0,0,"s",,terminal_output +2247,2628014,"TERMINAL",0,0,"h",,terminal_output +2248,2628072,"TERMINAL",0,0,"a",,terminal_output +2249,2628202,"TERMINAL",0,0,"p",,terminal_output +2250,2628270,"TERMINAL",0,0,"e",,terminal_output +2251,2628431,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2252,2628737,"TERMINAL",0,0,"v",,terminal_output +2253,2628805,"TERMINAL",0,0,"a",,terminal_output +2254,2628870,"TERMINAL",0,0,"l",,terminal_output +2255,2628966,"TERMINAL",0,0,"u",,terminal_output +2256,2629069,"TERMINAL",0,0,"e",,terminal_output +2257,2629186,"TERMINAL",0,0,".",,terminal_output +2258,2629276,"TERMINAL",0,0,"s",,terminal_output +2259,2629380,"TERMINAL",0,0,"h",,terminal_output +2260,2629472,"TERMINAL",0,0,"a",,terminal_output +2261,2629587,"TERMINAL",0,0,"p",,terminal_output +2262,2629669,"TERMINAL",0,0,"e",,terminal_output +2263,2629783,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2264,2638248,"TERMINAL",0,0,"n",,terminal_output +2265,2638620,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(531)__call__()\r\n-> decode = first_from(\r\n",,terminal_output +2266,2639211,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(532)__call__()\r\n-> decode,\r\n",,terminal_output +2267,2639399,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(533)__call__()\r\n-> self.decode,\r\n",,terminal_output +2268,2639551,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(534)__call__()\r\n-> error_msg=""""""No `decode` argument was provided to MultiHeadAttention\r\n",,terminal_output +2269,2639683,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(531)__call__()\r\n-> decode = first_from(\r\n",,terminal_output +2270,2639817,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(538)__call__()\r\n-> if decode:\r\n",,terminal_output +2271,2639967,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(540)__call__()\r\n-> self.cached_key is None\r\n",,terminal_output +2272,2640100,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(541)__call__()\r\n-> or self.cached_value is None\r\n",,terminal_output +2273,2640485,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(542)__call__()\r\n-> or self.cache_index is None\r\n",,terminal_output +2274,2640669,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(552)__call__()\r\n-> ) = self.cached_key.value.shape\r\n",,terminal_output +2275,2640835,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(547)__call__()\r\n-> (\r\n",,terminal_output +2276,2641186,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(548)__call__()\r\n-> *batch_dims,\r\n",,terminal_output +2277,2641497,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(549)__call__()\r\n-> max_length,\r\n",,terminal_output +2278,2641814,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(550)__call__()\r\n-> num_heads,\r\n",,terminal_output +2279,2642154,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(551)__call__()\r\n-> depth_per_head,\r\n",,terminal_output +2280,2642508,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(554)__call__()\r\n-> expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\r\n",,terminal_output +2281,2642994,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(555)__call__()\r\n-> if expected_shape != query.shape:\r\n",,terminal_output +2282,2643779,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(562)__call__()\r\n-> cur_index = self.cache_index[...]\r\n",,terminal_output +2283,2644207,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(563)__call__()\r\n-> zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\r\n",,terminal_output +2284,2644567,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(564)__call__()\r\n-> indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\r\n",,terminal_output +2285,2645006,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(565)__call__()\r\n-> key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\r\n",,terminal_output +2286,2645896,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(566)__call__()\r\n-> value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\r\n",,terminal_output +2287,2646383,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(567)__call__()\r\n-> self.cached_key[...] = key\r\n",,terminal_output +2288,2647010,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(568)__call__()\r\n-> self.cached_value[...] = value\r\n",,terminal_output +2289,2651874,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(569)__call__()\r\n-> self.cache_index[...] += 1\r\n",,terminal_output +2290,2655750,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",11910,0,"",python,selection_command +2291,2656138,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12428,0,"",python,selection_command +2292,2656842,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12446,0,"",python,selection_command +2293,2657768,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12466,0,"",python,selection_command +2294,2658122,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16732,0,"",python,selection_command +2295,2663992,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16748,0,"",python,selection_command +2296,2664147,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16784,0,"",python,selection_command +2297,2664475,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16802,0,"",python,selection_command +2298,2664652,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16838,0,"",python,selection_command +2299,2664809,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16855,0,"",python,selection_command +2300,2665127,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18723,0,"",python,selection_command +2301,2666031,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20067,0,"",python,selection_command +2302,2667203,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20359,0,"",python,selection_command +2303,2692273,"TERMINAL",0,0,"s",,terminal_output +2304,2692328,"TERMINAL",0,0,"e",,terminal_output +2305,2692458,"TERMINAL",0,0,"l",,terminal_output +2306,2692601,"TERMINAL",0,0,"f",,terminal_output +2307,2692736,"TERMINAL",0,0,".",,terminal_output +2308,2692926,"TERMINAL",0,0,"c",,terminal_output +2309,2693120,"TERMINAL",0,0,"a",,terminal_output +2310,2693239,"TERMINAL",0,0,"c",,terminal_output +2311,2693328,"TERMINAL",0,0,"h",,terminal_output +2312,2693441,"TERMINAL",0,0,"e",,terminal_output +2313,2693594,"TERMINAL",0,0,"d",,terminal_output +2314,2693840,"TERMINAL",0,0,"_",,terminal_output +2315,2694043,"TERMINAL",0,0,"k",,terminal_output +2316,2694165,"TERMINAL",0,0,"e",,terminal_output +2317,2694282,"TERMINAL",0,0,"y",,terminal_output +2318,2694478,"TERMINAL",0,0,".",,terminal_output +2319,2694671,"TERMINAL",0,0,"v",,terminal_output +2320,2694749,"TERMINAL",0,0,"a",,terminal_output +2321,2694990,"TERMINAL",0,0," ",,terminal_output +2322,2695495,"TERMINAL",0,0," ",,terminal_output +2323,2695852,"TERMINAL",0,0,"v",,terminal_output +2324,2695927,"TERMINAL",0,0,"a",,terminal_output +2325,2696295,"TERMINAL",0,0," ",,terminal_output +2326,2696425,"TERMINAL",0,0," ",,terminal_output +2327,2698367,"TERMINAL",0,0,"v",,terminal_output +2328,2698434,"TERMINAL",0,0,"a",,terminal_output +2329,2698526,"TERMINAL",0,0,"l",,terminal_output +2330,2698624,"TERMINAL",0,0,"u",,terminal_output +2331,2698724,"TERMINAL",0,0,"e",,terminal_output +2332,2698850,"TERMINAL",0,0,".",,terminal_output +2333,2698949,"TERMINAL",0,0,"s",,terminal_output +2334,2699063,"TERMINAL",0,0,"h",,terminal_output +2335,2699133,"TERMINAL",0,0,"a",,terminal_output +2336,2699259,"TERMINAL",0,0,"p",,terminal_output +2337,2699316,"TERMINAL",0,0,"e",,terminal_output +2338,2699465,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2339,2725126,"TERMINAL",0,0,"e",,terminal_output +2340,2725253,"TERMINAL",0,0,"p",,terminal_output +2341,2726008,"TERMINAL",0,0," ",,terminal_output +2342,2726103,"TERMINAL",0,0,"x",,terminal_output +2343,2726205,"TERMINAL",0,0,"p",,terminal_output +2344,2726305,"TERMINAL",0,0,"e",,terminal_output +2345,2726404,"TERMINAL",0,0,"c",,terminal_output +2346,2726638,"TERMINAL",0,0,"t",,terminal_output +2347,2726724,"TERMINAL",0,0,"e",,terminal_output +2348,2727077,"TERMINAL",0,0,"d",,terminal_output +2349,2727639,"TERMINAL",0,0,"_",,terminal_output +2350,2727991,"TERMINAL",0,0,"s",,terminal_output +2351,2728167,"TERMINAL",0,0,"ha",,terminal_output +2352,2728343,"TERMINAL",0,0,"pe",,terminal_output +2353,2729293,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2354,2752717,"TERMINAL",0,0,"se",,terminal_output +2355,2752867,"TERMINAL",0,0,"l",,terminal_output +2356,2753068,"TERMINAL",0,0,"f",,terminal_output +2357,2753190,"TERMINAL",0,0,".",,terminal_output +2358,2753913,"TERMINAL",0,0,"c",,terminal_output +2359,2754050,"TERMINAL",0,0,"a",,terminal_output +2360,2754139,"TERMINAL",0,0,"c",,terminal_output +2361,2754256,"TERMINAL",0,0,"h",,terminal_output +2362,2754358,"TERMINAL",0,0,"e",,terminal_output +2363,2754615,"TERMINAL",0,0,"_",,terminal_output +2364,2754987,"TERMINAL",0,0,"ind",,terminal_output +2365,2755236,"TERMINAL",0,0,"e",,terminal_output +2366,2755311,"TERMINAL",0,0,"x",,terminal_output +2367,2755488,"TERMINAL",0,0,"[",,terminal_output +2368,2755842,"TERMINAL",0,0,"xx",,terminal_output +2369,2756196,"TERMINAL",0,0,"x]",,terminal_output +2370,2756551,"TERMINAL",0,0," ",,terminal_output +2371,2756902,"TERMINAL",0,0," ",,terminal_output +2372,2757082,"TERMINAL",0,0," ",,terminal_output +2373,2757156,"TERMINAL",0,0,"  ",,terminal_output +2374,2757676,"TERMINAL",0,0,".",,terminal_output +2375,2758130,"TERMINAL",0,0," ",,terminal_output +2376,2758312,"TERMINAL",0,0,"[",,terminal_output +2377,2758578,"TERMINAL",0,0,".",,terminal_output +2378,2758779,"TERMINAL",0,0,".",,terminal_output +2379,2758922,"TERMINAL",0,0,".",,terminal_output +2380,2759097,"TERMINAL",0,0,"]",,terminal_output +2381,2759330,"TERMINAL",0,0,"\r\n(Pdb) Tracedwith\r\n",,terminal_output +2382,2759500,"TERMINAL",0,0,"\r\n(Pdb) Tracedwith\r\n",,terminal_output +2383,2763168,"TERMINAL",0,0,"^P",,terminal_output +2384,2763451,"TERMINAL",0,0,".",,terminal_output +2385,2763564,"TERMINAL",0,0,"s",,terminal_output +2386,2763987,"TERMINAL",0,0," ",,terminal_output +2387,2764123,"TERMINAL",0,0," ",,terminal_output +2388,2764270,"TERMINAL",0,0,"  ",,terminal_output +2389,2764779,"TERMINAL",0,0,"sel",,terminal_output +2390,2764893,"TERMINAL",0,0,"f",,terminal_output +2391,2764980,"TERMINAL",0,0,".",,terminal_output +2392,2765101,"TERMINAL",0,0,"c",,terminal_output +2393,2765215,"TERMINAL",0,0,"a",,terminal_output +2394,2765284,"TERMINAL",0,0,"c",,terminal_output +2395,2765374,"TERMINAL",0,0,"h",,terminal_output +2396,2765500,"TERMINAL",0,0,"e",,terminal_output +2397,2765937,"TERMINAL",0,0," ",,terminal_output +2398,2766323,"TERMINAL",0,0,"e",,terminal_output +2399,2766518,"TERMINAL",0,0,"_",,terminal_output +2400,2766684,"TERMINAL",0,0,"i",,terminal_output +2401,2766768,"TERMINAL",0,0,"nd",,terminal_output +2402,2766935,"TERMINAL",0,0,"e",,terminal_output +2403,2767068,"TERMINAL",0,0,"x",,terminal_output +2404,2767134,"TERMINAL",0,0,".",,terminal_output +2405,2767282,"TERMINAL",0,0,"s",,terminal_output +2406,2767398,"TERMINAL",0,0,"h",,terminal_output +2407,2767462,"TERMINAL",0,0,"a",,terminal_output +2408,2767580,"TERMINAL",0,0,"p",,terminal_output +2409,2767669,"TERMINAL",0,0,"e",,terminal_output +2410,2767800,"TERMINAL",0,0,"\r\n(Pdb) ()\r\n",,terminal_output +2411,2768839,"TERMINAL",0,0,"^P",,terminal_output +2412,2769705,"TERMINAL",0,0,"  ",,terminal_output +2413,2770273,"TERMINAL",0,0,"^[[A",,terminal_output +2414,2771093,"TERMINAL",0,0," ",,terminal_output +2415,2771224,"TERMINAL",0,0," ",,terminal_output +2416,2771347,"TERMINAL",0,0,"  ",,terminal_output +2417,2771859,"TERMINAL",0,0,"s",,terminal_output +2418,2772154,"TERMINAL",0,0,"e",,terminal_output +2419,2772223,"TERMINAL",0,0,"l",,terminal_output +2420,2772371,"TERMINAL",0,0,"f",,terminal_output +2421,2772430,"TERMINAL",0,0,".",,terminal_output +2422,2772693,"TERMINAL",0,0,"c",,terminal_output +2423,2772794,"TERMINAL",0,0,"a",,terminal_output +2424,2772874,"TERMINAL",0,0,"c",,terminal_output +2425,2772990,"TERMINAL",0,0,"e",,terminal_output +2426,2773494,"TERMINAL",0,0," ",,terminal_output +2427,2773634,"TERMINAL",0,0,"h",,terminal_output +2428,2773703,"TERMINAL",0,0,"e",,terminal_output +2429,2773939,"TERMINAL",0,0,"_",,terminal_output +2430,2774141,"TERMINAL",0,0,"i",,terminal_output +2431,2774241,"TERMINAL",0,0,"nd",,terminal_output +2432,2774418,"TERMINAL",0,0,"e",,terminal_output +2433,2774592,"TERMINAL",0,0,"x",,terminal_output +2434,2774993,"TERMINAL",0,0,"[",,terminal_output +2435,2775277,"TERMINAL",0,0,".",,terminal_output +2436,2775445,"TERMINAL",0,0,".",,terminal_output +2437,2775727,"TERMINAL",0,0,".",,terminal_output +2438,2776030,"TERMINAL",0,0,"]",,terminal_output +2439,2776335,"TERMINAL",0,0,".",,terminal_output +2440,2776525,"TERMINAL",0,0,"s",,terminal_output +2441,2776642,"TERMINAL",0,0,"h",,terminal_output +2442,2776696,"TERMINAL",0,0,"a",,terminal_output +2443,2776817,"TERMINAL",0,0,"p",,terminal_output +2444,2776900,"TERMINAL",0,0,"e",,terminal_output +2445,2777004,"TERMINAL",0,0,"\r\n(Pdb) ()\r\n",,terminal_output +2446,2877859,"TERMINAL",0,0,"k",,terminal_output +2447,2877960,"TERMINAL",0,0,"e",,terminal_output +2448,2878118,"TERMINAL",0,0,"y",,terminal_output +2449,2878546,"TERMINAL",0,0,".",,terminal_output +2450,2878775,"TERMINAL",0,0,"sh",,terminal_output +2451,2878900,"TERMINAL",0,0,"a",,terminal_output +2452,2878970,"TERMINAL",0,0,"p",,terminal_output +2453,2879167,"TERMINAL",0,0,"e",,terminal_output +2454,2879267,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2455,2883783,"TERMINAL",0,0,"v",,terminal_output +2456,2883885,"TERMINAL",0,0,"a",,terminal_output +2457,2884034,"TERMINAL",0,0,"lu",,terminal_output +2458,2884157,"TERMINAL",0,0,"e",,terminal_output +2459,2884411,"TERMINAL",0,0,".",,terminal_output +2460,2884521,"TERMINAL",0,0,"sh",,terminal_output +2461,2884581,"TERMINAL",0,0,"a",,terminal_output +2462,2884749,"TERMINAL",0,0,"p",,terminal_output +2463,2884809,"TERMINAL",0,0,"e",,terminal_output +2464,2884996,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2465,2888521,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20391,0,"",python,selection_command +2466,2888740,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20428,0,"",python,selection_command +2467,2888773,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20458,0,"",python,selection_command +2468,2888806,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20473,0,"",python,selection_command +2469,2888837,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20499,0,"",python,selection_command +2470,2888871,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20571,0,"",python,selection_command +2471,2888905,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20579,0,"",python,selection_command +2472,2888937,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20594,0,"",python,selection_command +2473,2888971,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20615,0,"",python,selection_command +2474,2889005,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20635,0,"",python,selection_command +2475,2889037,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20654,0,"",python,selection_command +2476,2889128,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20678,0,"",python,selection_command +2477,2889129,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20716,0,"",python,selection_command +2478,2889137,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20771,0,"",python,selection_command +2479,2889171,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20845,0,"",python,selection_command +2480,2889204,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20885,0,"",python,selection_command +2481,2889237,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20911,0,"",python,selection_command +2482,2889271,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20958,0,"",python,selection_command +2483,2889305,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21010,0,"",python,selection_command +2484,2889337,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21047,0,"",python,selection_command +2485,2889373,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21062,0,"",python,selection_command +2486,2889407,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21126,0,"",python,selection_command +2487,2889440,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21166,0,"",python,selection_command +2488,2889481,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21226,0,"",python,selection_command +2489,2889512,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21294,0,"",python,selection_command +2490,2889873,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21296,0,"",python,selection_command +2491,2890011,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21297,0,"",python,selection_command +2492,2890376,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21317,0,"",python,selection_command +2493,2890762,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21297,0,"",python,selection_command +2494,2916424,"TERMINAL",0,0,"se",,terminal_output +2495,2916492,"TERMINAL",0,0,"l",,terminal_output +2496,2916626,"TERMINAL",0,0,"f",,terminal_output +2497,2916688,"TERMINAL",0,0,".",,terminal_output +2498,2916830,"TERMINAL",0,0,"c",,terminal_output +2499,2917247,"TERMINAL",0,0,"a",,terminal_output +2500,2917311,"TERMINAL",0,0,"c",,terminal_output +2501,2917515,"TERMINAL",0,0,"e",,terminal_output +2502,2917764,"TERMINAL",0,0," ",,terminal_output +2503,2918167,"TERMINAL",0,0,"hed",,terminal_output +2504,2918415,"TERMINAL",0,0,"_",,terminal_output +2505,2918557,"TERMINAL",0,0,"k",,terminal_output +2506,2918733,"TERMINAL",0,0,"e",,terminal_output +2507,2918912,"TERMINAL",0,0,"y.",,terminal_output +2508,2919087,"TERMINAL",0,0,"s",,terminal_output +2509,2919264,"TERMINAL",0,0,"ha",,terminal_output +2510,2919443,"TERMINAL",0,0,"p",,terminal_output +2511,2919619,"TERMINAL",0,0,"e\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2512,2931270,"TERMINAL",0,0,"k",,terminal_output +2513,2931484,"TERMINAL",0,0,"e",,terminal_output +2514,2931548,"TERMINAL",0,0,"y",,terminal_output +2515,2931787,"TERMINAL",0,0,".",,terminal_output +2516,2931905,"TERMINAL",0,0,"s",,terminal_output +2517,2932014,"TERMINAL",0,0,"h",,terminal_output +2518,2932104,"TERMINAL",0,0,"a",,terminal_output +2519,2932188,"TERMINAL",0,0,"p",,terminal_output +2520,2932305,"TERMINAL",0,0,"e",,terminal_output +2521,2932872,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2522,2977733,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21315,0,"",python,selection_mouse +2523,2993489,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21308,0,"",python,selection_mouse +2524,3061359,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21281,72," key = lax.dynamic_update_slice(self.cached_key[...], key, indices)",python,selection_command +2525,3061537,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21281,151," key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)",python,selection_command +2526,3061671,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21281,184," key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key",python,selection_command +2527,3061803,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21281,221," key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value",python,selection_command +2528,3061974,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21281,254," key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1",python,selection_command +2529,3082239,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21535,0,"",python,selection_mouse +2530,3082245,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21534,0,"",python,selection_command +2531,3082897,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21464,0,"",python,selection_mouse +2532,3083450,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21498,0,"",python,selection_mouse +2533,3085526,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21289,0,"",python,selection_mouse +2534,3085850,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21362,0,"",python,selection_mouse +2535,3130796,"TERMINAL",0,0,"n",,terminal_output +2536,3132812,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(574)__call__()\r\n-> mask = combine_masks(\r\n",,terminal_output +2537,3134717,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(575)__call__()\r\n-> mask,\r\n",,terminal_output +2538,3135391,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(576)__call__()\r\n-> jnp.broadcast_to(\r\n",,terminal_output +2539,3135812,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(577)__call__()\r\n-> jnp.arange(max_length) <= cur_index,\r\n",,terminal_output +2540,3136226,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(578)__call__()\r\n-> tuple(batch_dims) + (1, 1, max_length),\r\n",,terminal_output +2541,3139981,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(576)__call__()\r\n-> jnp.broadcast_to(\r\n",,terminal_output +2542,3141784,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(574)__call__()\r\n-> mask = combine_masks(\r\n",,terminal_output +2543,3142927,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(583)__call__()\r\n-> self.dropout_rate > 0.0\r\n",,terminal_output +2544,3149743,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(601)__call__()\r\n-> deterministic = True\r\n",,terminal_output +2545,3154255,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(602)__call__()\r\n-> dropout_rng = None\r\n",,terminal_output +2546,3157885,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(605)__call__()\r\n-> x = self.attention_fn(\r\n",,terminal_output +2547,3160613,"TERMINAL",0,0,"s",,terminal_output +2548,3160792,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(606)__call__()\r\n-> query,\r\n",,terminal_output +2549,3163408,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(607)__call__()\r\n-> key,\r\n",,terminal_output +2550,3163967,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(608)__call__()\r\n-> value,\r\n",,terminal_output +2551,3164355,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(609)__call__()\r\n-> mask=mask,\r\n",,terminal_output +2552,3164738,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(610)__call__()\r\n-> dropout_rng=dropout_rng,\r\n",,terminal_output +2553,3165013,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(611)__call__()\r\n-> dropout_rate=self.dropout_rate,\r\n",,terminal_output +2554,3165465,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(612)__call__()\r\n-> broadcast_dropout=self.broadcast_dropout,\r\n",,terminal_output +2555,3165664,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(613)__call__()\r\n-> deterministic=deterministic,\r\n",,terminal_output +2556,3166035,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(614)__call__()\r\n-> dtype=self.dtype,\r\n",,terminal_output +2557,3166605,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(615)__call__()\r\n-> precision=self.precision,\r\n",,terminal_output +2558,3167030,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(616)__call__()\r\n-> module=self if sow_weights else None,\r\n",,terminal_output +2559,3167834,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(605)__call__()\r\n-> x = self.attention_fn(\r\n",,terminal_output +2560,3170067,"TERMINAL",0,0,"\r\n(Pdb) --Call--\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(293)attention_fn()\r\n-> def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\r\n",,terminal_output +2561,3171305,"TERMINAL",0,0,"n",,terminal_output +2562,3171627,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(295)attention_fn()\r\n-> implementation = ""cudnn"" if use_flash_attention else None\r\n",,terminal_output +2563,3174741,"utils/nn.py",0,0,"",python,tab +2564,3177797,"utils/nn.py",11010,0,"",python,selection_command +2565,3178283,"utils/nn.py",10986,0,"",python,selection_command +2566,3178536,"utils/nn.py",10985,0,"",python,selection_command +2567,3178561,"utils/nn.py",10906,0,"",python,selection_command +2568,3178594,"utils/nn.py",10896,0,"",python,selection_command +2569,3178626,"utils/nn.py",10863,0,"",python,selection_command +2570,3178657,"utils/nn.py",10820,0,"",python,selection_command +2571,3178692,"utils/nn.py",10794,0,"",python,selection_command +2572,3178723,"utils/nn.py",10768,0,"",python,selection_command +2573,3178756,"utils/nn.py",10740,0,"",python,selection_command +2574,3178814,"utils/nn.py",10716,0,"",python,selection_command +2575,3178837,"utils/nn.py",10688,0,"",python,selection_command +2576,3178861,"utils/nn.py",10638,0,"",python,selection_command +2577,3178891,"utils/nn.py",10568,0,"",python,selection_command +2578,3178924,"utils/nn.py",10567,0,"",python,selection_command +2579,3178959,"utils/nn.py",10496,0,"",python,selection_command +2580,3178992,"utils/nn.py",10495,0,"",python,selection_command +2581,3179223,"utils/nn.py",9824,0,"",python,selection_command +2582,3179614,"utils/nn.py",8996,0,"",python,selection_command +2583,3180231,"utils/nn.py",9000,0,"",python,selection_command +2584,3180369,"utils/nn.py",9005,0,"",python,selection_command +2585,3180622,"utils/nn.py",9000,0,"",python,selection_command +2586,3181003,"utils/nn.py",9005,0,"",python,selection_command +2587,3181421,"utils/nn.py",9000,0,"",python,selection_command +2588,3182862,"utils/nn.py",9005,0,"",python,selection_command +2589,3183776,"utils/nn.py",9078,0,"",python,selection_command +2590,3184429,"utils/nn.py",9005,0,"",python,selection_command +2591,3184588,"utils/nn.py",9073,0,"\n ",python,content +2592,3185088,"utils/nn.py",9082,0,"#",python,content +2593,3185089,"utils/nn.py",9083,0,"",python,selection_keyboard +2594,3185165,"utils/nn.py",9083,0," ",python,content +2595,3185165,"utils/nn.py",9084,0,"",python,selection_keyboard +2596,3186440,"utils/nn.py",9084,0,"F",python,content +2597,3186441,"utils/nn.py",9085,0,"",python,selection_keyboard +2598,3186671,"utils/nn.py",9085,0,"I",python,content +2599,3186671,"utils/nn.py",9086,0,"",python,selection_keyboard +2600,3186720,"utils/nn.py",9086,0,"R",python,content +2601,3186720,"utils/nn.py",9087,0,"",python,selection_keyboard +2602,3186888,"utils/nn.py",9087,0,"S",python,content +2603,3186889,"utils/nn.py",9088,0,"",python,selection_keyboard +2604,3187025,"utils/nn.py",9088,0,"T",python,content +2605,3187025,"utils/nn.py",9089,0,"",python,selection_keyboard +2606,3187117,"utils/nn.py",9089,0," ",python,content +2607,3187117,"utils/nn.py",9090,0,"",python,selection_keyboard +2608,3187286,"utils/nn.py",9090,0,"P",python,content +2609,3187286,"utils/nn.py",9091,0,"",python,selection_keyboard +2610,3187355,"utils/nn.py",9091,0,"A",python,content +2611,3187355,"utils/nn.py",9092,0,"",python,selection_keyboard +2612,3187395,"utils/nn.py",9092,0,"S",python,content +2613,3187396,"utils/nn.py",9093,0,"",python,selection_keyboard +2614,3187554,"utils/nn.py",9093,0,"S",python,content +2615,3187554,"utils/nn.py",9094,0,"",python,selection_keyboard +2616,3187686,"utils/nn.py",9094,0,":",python,content +2617,3187686,"utils/nn.py",9095,0,"",python,selection_keyboard +2618,3187821,"utils/nn.py",9095,0," ",python,content +2619,3187822,"utils/nn.py",9096,0,"",python,selection_keyboard +2620,3190799,"utils/nn.py",9096,0,"q",python,content +2621,3190800,"utils/nn.py",9097,0,"",python,selection_keyboard +2622,3190875,"utils/nn.py",9097,0,"k",python,content +2623,3190875,"utils/nn.py",9098,0,"",python,selection_keyboard +2624,3190990,"utils/nn.py",9098,0,"v",python,content +2625,3190990,"utils/nn.py",9099,0,"",python,selection_keyboard +2626,3191215,"utils/nn.py",9099,0,".",python,content +2627,3191216,"utils/nn.py",9100,0,"",python,selection_keyboard +2628,3191303,"utils/nn.py",9100,0,"s",python,content +2629,3191304,"utils/nn.py",9101,0,"",python,selection_keyboard +2630,3191424,"utils/nn.py",9101,0,"h",python,content +2631,3191424,"utils/nn.py",9102,0,"",python,selection_keyboard +2632,3191642,"utils/nn.py",9102,0,"a",python,content +2633,3191643,"utils/nn.py",9103,0,"",python,selection_keyboard +2634,3191771,"utils/nn.py",9103,0,"p",python,content +2635,3191772,"utils/nn.py",9104,0,"",python,selection_keyboard +2636,3191856,"utils/nn.py",9104,0,"e",python,content +2637,3191857,"utils/nn.py",9105,0,"",python,selection_keyboard +2638,3192509,"utils/nn.py",9105,0," (1, 1, 921, 8, 64)",python,content +2639,3192767,"utils/nn.py",9123,0,"",python,selection_command +2640,3192976,"utils/nn.py",9162,0,"",python,selection_command +2641,3193309,"utils/nn.py",9125,39,"",python,content +2642,3193342,"utils/nn.py",9133,0,"",python,selection_command +2643,3193457,"utils/nn.py",9082,0,"",python,selection_command +2644,3194250,"utils/nn.py",9124,0,"\n ",python,content +2645,3195189,"utils/nn.py",9133,0,"# SECOND PASS: qkv.shape (1, 1, 921, 8, 64)",python,content +2646,3195378,"utils/nn.py",9175,0,"",python,selection_command +2647,3195473,"utils/nn.py",9173,0,"",python,selection_command +2648,3195734,"utils/nn.py",9171,0,"",python,selection_command +2649,3195762,"utils/nn.py",9170,0,"",python,selection_command +2650,3195791,"utils/nn.py",9168,0,"",python,selection_command +2651,3195825,"utils/nn.py",9165,0,"",python,selection_command +2652,3195859,"utils/nn.py",9163,0,"",python,selection_command +2653,3196241,"utils/nn.py",9162,0,"",python,selection_command +2654,3196517,"utils/nn.py",9160,0,"",python,selection_command +2655,3196695,"utils/nn.py",9159,0,"",python,selection_command +2656,3196866,"utils/nn.py",9158,0,"",python,selection_command +2657,3197089,"utils/nn.py",9158,18,"",python,content +2658,3197361,"utils/nn.py",9157,0,"",python,selection_command +2659,3201059,"TERMINAL",0,0,"q",,terminal_output +2660,3201225,"TERMINAL",0,0,"u",,terminal_output +2661,3201294,"TERMINAL",0,0,"e",,terminal_output +2662,3201358,"TERMINAL",0,0,"r",,terminal_output +2663,3201659,"TERMINAL",0,0,"y",,terminal_output +2664,3201823,"TERMINAL",0,0,"-",,terminal_output +2665,3202063,"TERMINAL",0,0,".",,terminal_output +2666,3202398,"TERMINAL",0,0," ",,terminal_output +2667,3202510,"TERMINAL",0,0," ",,terminal_output +2668,3202693,"TERMINAL",0,0,".",,terminal_output +2669,3202780,"TERMINAL",0,0,"s",,terminal_output +2670,3202904,"TERMINAL",0,0,"h",,terminal_output +2671,3203009,"TERMINAL",0,0,"a",,terminal_output +2672,3203125,"TERMINAL",0,0,"p",,terminal_output +2673,3203213,"TERMINAL",0,0,"e",,terminal_output +2674,3203332,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2675,3204150,"TERMINAL",0,0,"k",,terminal_output +2676,3204250,"TERMINAL",0,0,"e",,terminal_output +2677,3204749,"TERMINAL",0,0,"y",,terminal_output +2678,3204974,"TERMINAL",0,0,".",,terminal_output +2679,3205131,"TERMINAL",0,0,"s",,terminal_output +2680,3205260,"TERMINAL",0,0,"h",,terminal_output +2681,3205354,"TERMINAL",0,0,"a",,terminal_output +2682,3205447,"TERMINAL",0,0,"p",,terminal_output +2683,3205750,"TERMINAL",0,0,"e",,terminal_output +2684,3206137,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2685,3206613,"TERMINAL",0,0,"v",,terminal_output +2686,3206729,"TERMINAL",0,0,"al",,terminal_output +2687,3206827,"TERMINAL",0,0,"u",,terminal_output +2688,3206912,"TERMINAL",0,0,"e",,terminal_output +2689,3207032,"TERMINAL",0,0,".",,terminal_output +2690,3207162,"TERMINAL",0,0,"s",,terminal_output +2691,3207263,"TERMINAL",0,0,"h",,terminal_output +2692,3207382,"TERMINAL",0,0,"a",,terminal_output +2693,3207559,"TERMINAL",0,0,"e",,terminal_output +2694,3207868,"TERMINAL",0,0," ",,terminal_output +2695,3208030,"TERMINAL",0,0,"p",,terminal_output +2696,3208118,"TERMINAL",0,0,"e",,terminal_output +2697,3208249,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +2698,3212146,"utils/nn.py",9106,0,"",python,selection_command +2699,3212292,"utils/nn.py",9107,0,"",python,selection_command +2700,3213034,"utils/nn.py",9073,0,"\n ",python,content +2701,3213342,"utils/nn.py",9082,0,"#",python,content +2702,3213342,"utils/nn.py",9083,0,"",python,selection_keyboard +2703,3213410,"utils/nn.py",9083,0," ",python,content +2704,3213411,"utils/nn.py",9084,0,"",python,selection_keyboard +2705,3214084,"utils/nn.py",9084,0,"f",python,content +2706,3214085,"utils/nn.py",9085,0,"",python,selection_keyboard +2707,3214155,"utils/nn.py",9085,0,"o",python,content +2708,3214156,"utils/nn.py",9086,0,"",python,selection_keyboard +2709,3214277,"utils/nn.py",9086,0,"r",python,content +2710,3214277,"utils/nn.py",9087,0,"",python,selection_keyboard +2711,3214366,"utils/nn.py",9087,0," ",python,content +2712,3214366,"utils/nn.py",9088,0,"",python,selection_keyboard +2713,3214491,"utils/nn.py",9088,0,"t",python,content +2714,3214491,"utils/nn.py",9089,0,"",python,selection_keyboard +2715,3214578,"utils/nn.py",9089,0,"e",python,content +2716,3214579,"utils/nn.py",9090,0,"",python,selection_keyboard +2717,3214675,"utils/nn.py",9090,0,"m",python,content +2718,3214676,"utils/nn.py",9091,0,"",python,selection_keyboard +2719,3214705,"utils/nn.py",9091,0,"p",python,content +2720,3214705,"utils/nn.py",9092,0,"",python,selection_keyboard +2721,3214771,"utils/nn.py",9092,0,"o",python,content +2722,3214771,"utils/nn.py",9093,0,"",python,selection_keyboard +2723,3214849,"utils/nn.py",9093,0,"r",python,content +2724,3214850,"utils/nn.py",9094,0,"",python,selection_keyboard +2725,3214905,"utils/nn.py",9094,0,"a",python,content +2726,3214905,"utils/nn.py",9095,0,"",python,selection_keyboard +2727,3215025,"utils/nn.py",9095,0,"l",python,content +2728,3215025,"utils/nn.py",9096,0,"",python,selection_keyboard +2729,3215075,"utils/nn.py",9096,0," ",python,content +2730,3215075,"utils/nn.py",9097,0,"",python,selection_keyboard +2731,3215142,"utils/nn.py",9097,0,"a",python,content +2732,3215142,"utils/nn.py",9098,0,"",python,selection_keyboard +2733,3215239,"utils/nn.py",9098,0,"t",python,content +2734,3215239,"utils/nn.py",9099,0,"",python,selection_keyboard +2735,3215374,"utils/nn.py",9099,0,"t",python,content +2736,3215375,"utils/nn.py",9100,0,"",python,selection_keyboard +2737,3215421,"utils/nn.py",9100,0,"e",python,content +2738,3215422,"utils/nn.py",9101,0,"",python,selection_keyboard +2739,3215542,"utils/nn.py",9101,0,"n",python,content +2740,3215542,"utils/nn.py",9102,0,"",python,selection_keyboard +2741,3215625,"utils/nn.py",9102,0,"t",python,content +2742,3215625,"utils/nn.py",9103,0,"",python,selection_keyboard +2743,3215695,"utils/nn.py",9103,0,"i",python,content +2744,3215696,"utils/nn.py",9104,0,"",python,selection_keyboard +2745,3215711,"utils/nn.py",9104,0,"o",python,content +2746,3215712,"utils/nn.py",9105,0,"",python,selection_keyboard +2747,3215774,"utils/nn.py",9105,0,"n",python,content +2748,3215774,"utils/nn.py",9106,0,"",python,selection_keyboard +2749,3215991,"utils/nn.py",9106,0,":",python,content +2750,3215991,"utils/nn.py",9107,0,"",python,selection_keyboard +2751,3216135,"utils/nn.py",9107,0," ",python,content +2752,3216135,"utils/nn.py",9108,0,"",python,selection_keyboard +2753,3216267,"utils/nn.py",9108,0,"()",python,content +2754,3216267,"utils/nn.py",9109,0,"",python,selection_keyboard +2755,3216402,"utils/nn.py",9109,1,")",python,content +2756,3216402,"utils/nn.py",9110,0,"",python,selection_keyboard +2757,3216680,"utils/nn.py",9109,0,"",python,selection_command +2758,3217559,"utils/nn.py",9108,0,"",python,selection_command +2759,3219065,"utils/nn.py",9107,0,"",python,selection_command +2760,3219201,"utils/nn.py",9106,0,"",python,selection_command +2761,3219403,"utils/nn.py",9106,1,"",python,content +2762,3219955,"utils/nn.py",9107,0,"",python,selection_command +2763,3220450,"utils/nn.py",9108,0,"",python,selection_command +2764,3220781,"utils/nn.py",9108,0,"u",python,content +2765,3220781,"utils/nn.py",9109,0,"",python,selection_keyboard +2766,3220861,"utils/nn.py",9109,0,"s",python,content +2767,3220862,"utils/nn.py",9110,0,"",python,selection_keyboard +2768,3220938,"utils/nn.py",9110,0,"i",python,content +2769,3220938,"utils/nn.py",9111,0,"",python,selection_keyboard +2770,3221011,"utils/nn.py",9111,0,"n",python,content +2771,3221011,"utils/nn.py",9112,0,"",python,selection_keyboard +2772,3221054,"utils/nn.py",9112,0,"g",python,content +2773,3221055,"utils/nn.py",9113,0,"",python,selection_keyboard +2774,3221147,"utils/nn.py",9113,0," ",python,content +2775,3221147,"utils/nn.py",9114,0,"",python,selection_keyboard +2776,3221345,"utils/nn.py",9114,0,"k",python,content +2777,3221346,"utils/nn.py",9115,0,"",python,selection_keyboard +2778,3221433,"utils/nn.py",9115,0,"v",python,content +2779,3221433,"utils/nn.py",9116,0,"",python,selection_keyboard +2780,3221595,"utils/nn.py",9116,0," ",python,content +2781,3221596,"utils/nn.py",9117,0,"",python,selection_keyboard +2782,3221712,"utils/nn.py",9117,0,"c",python,content +2783,3221712,"utils/nn.py",9118,0,"",python,selection_keyboard +2784,3221774,"utils/nn.py",9118,0,"a",python,content +2785,3221774,"utils/nn.py",9119,0,"",python,selection_keyboard +2786,3221904,"utils/nn.py",9119,0,"c",python,content +2787,3221904,"utils/nn.py",9120,0,"",python,selection_keyboard +2788,3222096,"utils/nn.py",9120,0,"h",python,content +2789,3222096,"utils/nn.py",9121,0,"",python,selection_keyboard +2790,3222352,"utils/nn.py",9121,0,"e",python,content +2791,3222353,"utils/nn.py",9122,0,"",python,selection_keyboard +2792,3222563,"utils/nn.py",9121,0,"",python,selection_command +2793,3223118,"utils/nn.py",9171,0,"",python,selection_command +2794,3223336,"utils/nn.py",9207,0,"",python,selection_command +2795,3223740,"utils/nn.py",9171,0,"",python,selection_command +2796,3224293,"utils/nn.py",9169,0,"",python,selection_command +2797,3224441,"utils/nn.py",9168,0,"",python,selection_command +2798,3224570,"utils/nn.py",9166,0,"",python,selection_command +2799,3224699,"utils/nn.py",9163,0,"",python,selection_command +2800,3224837,"utils/nn.py",9161,0,"",python,selection_command +2801,3225066,"utils/nn.py",9160,0,"",python,selection_command +2802,3225540,"utils/nn.py",9160,1,"1",python,selection_command +2803,3225551,"utils/nn.py",9160,2,"1,",python,selection_command +2804,3225722,"utils/nn.py",9160,3,"1, ",python,selection_command +2805,3225831,"utils/nn.py",9160,3,"",python,content +2806,3226234,"utils/nn.py",9161,0,"",python,selection_command +2807,3226394,"utils/nn.py",9162,0,"",python,selection_command +2808,3226552,"utils/nn.py",9163,0,"",python,selection_command +2809,3226691,"utils/nn.py",9164,0,"",python,selection_command +2810,3226948,"utils/nn.py",9165,0,"1, ",python,content +2811,3226948,"utils/nn.py",9167,0,"",python,selection_command +2812,3231957,"TERMINAL",0,0,"n",,terminal_output +2813,3233879,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(297)attention_fn()\r\n-> def _rearrange(x):\r\n",,terminal_output +2814,3234288,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(300)attention_fn()\r\n-> def _pad(x):\r\n\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(303)attention_fn()\r\n-> def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\r\n\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(310)attention_fn()\r\n-> original_shape = query.shape\r\n\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(311)attention_fn()\r\n-> original_seq_len = query.shape[-3]\r\n\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(314)attention_fn()\r\n-> target_seq_len = ((original_seq_len + 3) // 4) * 4\r\n\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(315)attention_fn()\r\n-> pad_size = target_seq_len - original_seq_len\r\n",,terminal_output +2815,3234527,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(317)attention_fn()\r\n-> query_4d = _pad(_rearrange(query))\r\n",,terminal_output +2816,3234773,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(318)attention_fn()\r\n-> key_4d = _pad(_rearrange(key))\r\n",,terminal_output +2817,3234938,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(319)attention_fn()\r\n-> value_4d = _pad(_rearrange(value))\r\n",,terminal_output +2818,3235092,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(321)attention_fn()\r\n-> attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\r\n",,terminal_output +2819,3235248,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(322)attention_fn()\r\n-> attention_mask = attention_mask.at[original_seq_len:, :].set(False)\r\n",,terminal_output +2820,3235380,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(323)attention_fn()\r\n-> attention_mask = attention_mask.at[:, original_seq_len:].set(False)\r\n",,terminal_output +2821,3235530,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(326)attention_fn()\r\n-> _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\r\n",,terminal_output +2822,3236098,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(326)attention_fn()\r\n-> _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\r\n",,terminal_output +2823,3236275,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(326)attention_fn()->None\r\n-> _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\r\n",,terminal_output +2824,3236474,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(605)__call__()\r\n-> x = self.attention_fn(\r\n",,terminal_output +2825,3236643,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py(605)__call__()->None\r\n-> x = self.attention_fn(\r\n",,terminal_output +2826,3237885,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(135)__call__()\r\n-> z = self.temporal_attention(z)\r\n",,terminal_output +2827,3238896,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/utils/nn.py(135)__call__()->None\r\n-> z = self.temporal_attention(z)\r\n",,terminal_output +2828,3239773,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py(198)merge_inputs_wrapper()\r\n-> out = f(*args)\r\n",,terminal_output +2829,3240898,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py(198)merge_inputs_wrapper()->None\r\n-> out = f(*args)\r\n",,terminal_output +2830,3242443,"TERMINAL",0,0,"\r\n(Pdb) ValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/linear_util.py(396)_get_result_paths_thunk()\r\n-> ans = _fun(*args, **kwargs)\r\n",,terminal_output +2831,3243033,"TERMINAL",0,0,"\r\n(Pdb) --Return--\r\n> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/linear_util.py(396)_get_result_paths_thunk()->None\r\n-> ans = _fun(*args, **kwargs)\r\n",,terminal_output +2832,3322415,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +2833,3322416,"/fast/home/franz.srambical/jafar/utils/nn.py",10312,0,"",python,selection_command +2834,3327468,"/fast/home/franz.srambical/jafar/utils/nn.py",10611,0,"",python,selection_mouse +2835,3330928,"/fast/home/franz.srambical/jafar/utils/nn.py",9462,0,"",python,selection_mouse +2836,3330928,"/fast/home/franz.srambical/jafar/utils/nn.py",9461,0,"",python,selection_command +2837,3354408,"/fast/home/franz.srambical/jafar/utils/nn.py",9724,0,"",python,selection_mouse +2838,3354410,"/fast/home/franz.srambical/jafar/utils/nn.py",9723,0,"",python,selection_command +2839,3362802,"/fast/home/franz.srambical/jafar/utils/nn.py",10434,0,"",python,selection_mouse +2840,3369832,"/fast/home/franz.srambical/jafar/utils/nn.py",9911,0,"",python,selection_command +2841,3385615,"/fast/home/franz.srambical/jafar/utils/nn.py",9228,0,"",python,selection_mouse +2842,3388533,"/fast/home/franz.srambical/jafar/utils/nn.py",9208,0,"\n ",python,content +2843,3388796,"/fast/home/franz.srambical/jafar/utils/nn.py",9217,0,"i",python,content +2844,3388796,"/fast/home/franz.srambical/jafar/utils/nn.py",9218,0,"",python,selection_keyboard +2845,3388819,"/fast/home/franz.srambical/jafar/utils/nn.py",9218,0,"f",python,content +2846,3388820,"/fast/home/franz.srambical/jafar/utils/nn.py",9219,0,"",python,selection_keyboard +2847,3388926,"/fast/home/franz.srambical/jafar/utils/nn.py",9219,0," ",python,content +2848,3388926,"/fast/home/franz.srambical/jafar/utils/nn.py",9220,0,"",python,selection_keyboard +2849,3390428,"/fast/home/franz.srambical/jafar/utils/nn.py",9220,0,"query.shape == (1, 921, 1, 512):",python,content +2850,3390899,"/fast/home/franz.srambical/jafar/utils/nn.py",9252,0,"\n ",python,content +2851,3391332,"/fast/home/franz.srambical/jafar/utils/nn.py",9265,0,"breakpoint()",python,content +2852,3391504,"/fast/home/franz.srambical/jafar/utils/nn.py",9276,0,"",python,selection_command +2853,3395076,"models/dynamics_causal.py",0,0,"",python,tab +2854,3395076,"models/dynamics_causal.py",2171,10,"breakpoint",python,selection_command +2855,3397310,"models/dynamics_causal.py",2180,0,"",python,selection_mouse +2856,3397676,"models/dynamics_causal.py",2163,21,"",python,content +2857,3397692,"models/dynamics_causal.py",2171,0,"",python,selection_command +2858,3400688,"utils/nn.py",0,0,"",python,tab +2859,3400688,"utils/nn.py",4107,10,"breakpoint",python,selection_command +2860,3402329,"utils/nn.py",4113,0,"",python,selection_mouse +2861,3403476,"utils/nn.py",4095,24," breakpoint()",python,selection_command +2862,3403708,"utils/nn.py",4055,64," if x.shape == (1, 921, 1, 512):\n breakpoint()",python,selection_command +2863,3404342,"utils/nn.py",4055,65,"",python,content +2864,3404357,"utils/nn.py",4063,0,"",python,selection_command +2865,3404635,"utils/nn.py",4098,0,"",python,selection_command +2866,3404780,"utils/nn.py",4137,0,"",python,selection_command +2867,3407735,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +2868,3407735,"/fast/home/franz.srambical/jafar/utils/nn.py",4107,10,"breakpoint",python,selection_command +2869,3407828,"/fast/home/franz.srambical/jafar/utils/nn.py",4055,65,"",python,content +2870,3409823,"/fast/home/franz.srambical/jafar/utils/nn.py",9200,10,"breakpoint",python,selection_command +2871,3412190,"utils/nn.py",0,0,"",python,tab +2872,3412191,"utils/nn.py",9200,10,"breakpoint",python,selection_command +2873,3414501,"TERMINAL",0,0,"q",,terminal_output +2874,3414689,"TERMINAL",0,0,"ui",,terminal_output +2875,3414751,"TERMINAL",0,0,"t",,terminal_output +2876,3415017,"TERMINAL",0,0,"(",,terminal_output +2877,3415071,"TERMINAL",0,0,")",,terminal_output +2878,3415266,"TERMINAL",0,0,"\r\n",,terminal_output +2879,3415319,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 76, in __call__\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 232, in __call__\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 94, in trace_dispatch\r\n return self.dispatch_return(frame, arg)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 156, in dispatch_return\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +2880,3415917,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=13995.10 task 0: running\r\n",,terminal_output +2881,3416076,"TERMINAL",0,0,"(Pdb) ",,terminal_output +2882,3416147,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13995.10\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T10:15:35.952] error: *** STEP 13995.10 ON hai003 CANCELLED AT 2025-07-27T10:15:35 DUE to SIGNAL Killed ***\r\n",,terminal_output +2883,3416304,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2884,3417136,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2885,3417239,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +2886,3417810,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2887,3424565,"TERMINAL",0,0,"salloc",,terminal_focus +2888,3427520,"TERMINAL",0,0,"srun",,terminal_focus +2889,3429483,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2890,3437676,"TERMINAL",0,0,"2025-07-27 10:15:57.478966: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2891,3439184,"TERMINAL",0,0,"2025-07-27 10:15:58.985599: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2892,3442783,"TERMINAL",0,0,"2025-07-27 10:16:02.585736: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2893,3443569,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 329, in attention_fn\r\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 308, in _fuse_masks\r\n expanded_mask = jnp.pad(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4352, in pad\r\n pad_width = _broadcast_to_pairs(pad_width, np.ndim(array), ""pad_width"")\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 3937, in _broadcast_to_pairs\r\n raise ValueError(f""jnp.pad: {name} with {nd=} has unsupported shape {nvals.shape}. ""\r\nValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n",,terminal_output +2894,3444641,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2895,4473077,"TERMINAL",0,0,"salloc: Job 13995 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T10:33:09.015] error: *** STEP 13995.interactive ON hai003 CANCELLED AT 2025-07-27T10:33:09 DUE TO TIME LIMIT ***\r\n",,terminal_output +2896,4533065,"TERMINAL",0,0,"srun: error: hai003: task 0: Killed\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +2897,11470079,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G",,terminal_command +2898,11470134,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 13996\r\nsalloc: job 13996 queued and waiting for resources\r\n",,terminal_output +2899,11474332,"TERMINAL",0,0,"salloc",,terminal_focus +2900,11475260,"TERMINAL",0,0,"^Csalloc: Job allocation 13991 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +2901,11475399,"TERMINAL",0,0,"^C",,terminal_command +2902,11476197,"TERMINAL",0,0,"salloc",,terminal_focus +2903,11476821,"TERMINAL",0,0,"^Csalloc: Job allocation 13996 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +2904,11478436,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G --time=01:00:00",,terminal_command +2905,11478500,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 13997\r\n",,terminal_output +2906,11478594,"TERMINAL",0,0,"salloc: Nodes hai003 are ready for job\r\n",,terminal_output +2907,11478939,"TERMINAL",0,0,"Running inside SLURM, Job ID 13997.\r\n",,terminal_output +2908,11479014,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2909,11479662,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2910,11481168,"TERMINAL",0,0,"b",,terminal_output +2911,11481241,"TERMINAL",0,0,"a",,terminal_output +2912,11481359,"TERMINAL",0,0,"sh",,terminal_output +2913,11481499,"TERMINAL",0,0," ",,terminal_output +2914,11481962,"TERMINAL",0,0,"s",,terminal_output +2915,11482027,"TERMINAL",0,0,"a",,terminal_output +2916,11482197,"TERMINAL",0,0,"mpl",,terminal_output +2917,11482297,"TERMINAL",0,0,"e",,terminal_output +2918,11482502,"TERMINAL",0,0,".py ",,terminal_output +2919,11485164,"utils/nn.py",9209,0,"",python,selection_command +2920,11489192,"utils/nn.py",9095,0,"",python,selection_mouse +2921,11489318,"utils/nn.py",9095,3,"921",python,selection_mouse +2922,11489442,"utils/nn.py",9059,51," # FIRST PASS: qkv.shape (1, 921, 1, 8, 64)\n",python,selection_mouse +2923,11489861,"utils/nn.py",9097,0,"",python,selection_mouse +2924,11490295,"utils/nn.py",9106,0,"",python,selection_mouse +2925,11490554,"utils/nn.py",9109,0,"",python,selection_mouse +2926,11490555,"utils/nn.py",9108,0,"",python,selection_command +2927,11525242,"TERMINAL",0,0,"\r\n[?2004l\rsample.py: line 1: from: command not found\r\nsample.py: line 2: import: command not found\r\nsample.py: line 3: import: command not found\r\nsample.py: line 4: import: command not found\r\nsample.py: line 6: import: command not found\r\nsample.py: line 7: import: command not found\r\nsample.py: line 8: import: command not found\r\nsample.py: line 9: import: command not found\r\nsample.py: line 10: import: command not found\r\nsample.py: line 11: import: command not found\r\nsample.py: line 12: import: command not found\r\nsample.py: line 13: from: command not found\r\nsample.py: line 14: import: command not found\r\nsample.py: line 15: from: command not found\r\nsample.py: line 17: from: command not found\r\nsample.py: line 18: from: command not found\r\nsample.py: line 21: @dataclass: command not found\r\nsample.py: line 22: class: command not found\r\nsample.py: line 24: seed:: command not found\r\nsample.py: line 25: seq_len:: command not found\r\nsample.py: line 26: image_channels:: command not found\r\nsample.py: line 27: image_height:: command not found\r\nsample.py: line 28: image_width:: command not found\r\nsample.py: line 29: data_dir:: command not found\r\nsample.py: line 30: checkpoint:: command not found\r\nsample.py: line 32: batch_size:: command not found\r\nsample.py: line 33: maskgit_steps:: command not found\r\nsample.py: line 34: temperature:: command not found\r\nsample.py: line 35: sample_argmax:: command not found\r\nsample.py: line 36: start_frame:: command not found\r\nsample.py: line 38: tokenizer_dim:: command not found\r\nsample.py: line 39: tokenizer_ffn_dim:: command not found\r\nsample.py: line 40: latent_patch_dim:: command not found\r\nsample.py: line 41: num_patch_latents:: command not found\r\nsample.py: line 42: patch_size:: command not found\r\nsample.py: line 43: tokenizer_num_blocks:: command not found\r\nsample.py: line 44: tokenizer_num_heads:: command not found\r\nsample.py: line 46: lam_co_train:: command not found\r\nsample.py: line 47: lam_dim:: command not found\r\nsample.py: line 48: lam_ffn_dim:: command not found\r\nsample.py: line 49: latent_action_dim:: command not found\r\nsample.py: line 50: num_latent_actions:: command not found\r\nsample.py: line 51: lam_patch_size:: command not found\r\nsample.py: line 52: lam_num_blocks:: command not found\r\nsample.py: line 53: lam_num_heads:: command not found\r\nsample.py: line 55: dyna_dim:: command not found\r\nsample.py: line 56: dyna_ffn_dim:: command not found\r\nsample.py: line 57: dyna_num_blocks:: command not found\r\nsample.py: line 58: dyna_num_heads:: command not found\r\nsample.py: line 59: param_dtype: command not found\r\nsample.py: line 60: dtype: command not found\r\nsample.py: line 61: use_flash_attention:: command not found\r\nsample.py: line 62: dynamics_type:: command not found\r\nsample.py: line 65: syntax error near unexpected token `('\r\nsample.py: line 65: `args = tyro.cli(Args)'\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2928,11526585,"TERMINAL",0,0,"b",,terminal_output +2929,11526670,"TERMINAL",0,0,"a",,terminal_output +2930,11526727,"TERMINAL",0,0,"s",,terminal_output +2931,11526815,"TERMINAL",0,0,"h",,terminal_output +2932,11526936,"TERMINAL",0,0," ",,terminal_output +2933,11527268,"TERMINAL",0,0,"e",,terminal_output +2934,11527675,"TERMINAL",0,0,"x",,terminal_output +2935,11528021,"TERMINAL",0,0,"p",,terminal_output +2936,11528117,"TERMINAL",0,0,"eriments/",,terminal_output +2937,11528719,"TERMINAL",0,0,"s",,terminal_output +2938,11528899,"TERMINAL",0,0,"a",,terminal_output +2939,11529019,"TERMINAL",0,0,"mp",,terminal_output +2940,11529152,"TERMINAL",0,0,"le.sh ",,terminal_output +2941,11529712,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2942,11543099,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2943,11587929,"TERMINAL",0,0,"bash",,terminal_focus +2944,11589591,"TERMINAL",0,0,"git stash",,terminal_command +2945,11589642,"TERMINAL",0,0,"]633;C",,terminal_output +2946,11589832,"TERMINAL",0,0,"Saved working directory and index state WIP on causal-transformer-nnx: cf36115 feat: refactor; only send single token to attn\r\n",,terminal_output +2947,11589899,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar",,terminal_output +2948,11590116,"utils/nn.py",2133,7080," self.use_flash_attention, is_causal=self.spatial_causal\n ),\n rngs=rngs,\n # decode=self.decode,\n decode=False,\n )\n\n self.temporal_pos_enc = PositionalEncoding(self.dim)\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = self.spatial_pos_enc(x)\n z = self.spatial_norm(z)\n z = self.spatial_attention(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = self.temporal_pos_enc(x)\n z = self.temporal_norm(z)\n # FIXME (f.srambical): no need to pass mask if is_causal=True\n causal_mask = jnp.tri(z.shape[-2])\n z = self.temporal_attention(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = self.ffn_norm(x)\n z = self.ffn_dense1(z)\n z = jax.nn.gelu(z)\n z = self.ffn_dense2(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nnx.Module):\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n self.blocks: list[STBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=self.spatial_causal,\n decode=self.decode,\n rngs=rngs,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x: jax.Array) -> jax.Array:\n x = self.input_norm1(x)\n x = self.input_dense(x)\n x = self.input_norm2(x)\n\n for block in self.blocks:\n x = block(x)\n\n x = self.output_dense(x)\n return x # (B, T, E)\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n def __init__(\n self, latent_dim: int, num_latents: int, dropout: float, rngs: nnx.Rngs\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n normalized_codebook = normalize(self.codebook.value)\n distance = -jnp.matmul(x, normalized_codebook.T)\n if training:\n distance = self.drop(distance)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array) -> jax.Array:\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n",python,content +2949,11592877,"utils/nn.py",12,10750,"from typing import Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n",python,content +2950,11596390,"utils/nn.py",0,0,"Switched from branch 'causal-transformer-nnx' to 'main'",python,git_branch_checkout +2951,11606391,"utils/nn.py",0,0,"Switched from branch 'main' to 'readme-typo-2'",python,git_branch_checkout +2952,11607917,"README.md",0,0,"

🧞‍♀️ Jasmine: A simple, performant and scalable JAX-based world modeling codebase 🧞‍♀️

\n\n

\n \n \n \n \n

\n\nJasmine is a production-ready JAX-based world modeling codebase. It currently implements the high-level architecture of [Genie: Generative Interactive Environments](https://arxiv.org/abs/2402.15391) (Bruce et al., 2024) with [MaskGIT](https://arxiv.org/abs/2202.04200) (Chang et al., 2022), as well as an autoregressive (causal) baseline. A diffusion baseline is coming soon.\n\nJasmine scales from single hosts to hundreds of xPUs thanks to XLA and strives to be an easily hackable, batteries-included foundation for world modeling research.\n\n

Overview

\n\n- Asynchronous & distributed checkpointing thanks to [orbax.checkpoint](https://github.com/google/orbax)\n - Jasmine also supports mixing and matching hardware topologies (e.g. train on four nodes, load the checkpoint on a single node)\n- Optimized dataloading thanks to [Grain](https://github.com/google/grain)\n - Dataloading scales with the number of processes (i.e. nodes/xPUs)\n- Checkpointing of model weights, optimizer and dataloader states\n- Full reproducibility with **exact** training curves (thanks to seeded dataloading and training, and [JAX' approach to pseudo random numbers](https://docs.jax.dev/en/latest/random-numbers.html))\n- Automatic checkpoint deletion/retention according to specified retention policy thanks to `orbax.checkpoint.CheckpointManager`\n- Mixed precision training using `bfloat16`\n - `int8` training is on the roadmap via [aqt](https://github.com/google/aqt)\n- FlashAttention thanks to [cuDNN SDPA](https://github.com/jax-ml/jax/blob/a155c5a9997924170e0067d552351a9833c12c11/jax/_src/cudnn/fused_attention_stablehlo.py#L842)\n- Frame-level KV cache resets for accelerated spatiotemporal attention in causal baseline (still in PR)\n- Activation checkpointing (even onto host memory if desired)\n- DDP (changing to FSDP requires changing **a single line of code**)\n- WSD learning rate schedule\n - No need to retrain from scratch if you want to train for longer\n- Index-shuffling during dataloading\n- Google-native stack\n - https://github.com/google/orbax for checkpointing\n - https://github.com/google/grain for dataloading\n - https://github.com/google-deepmind/dm_pix for image manipulation\n - https://github.com/google/array_record as the data format\n- Easy model inspection thanks to [treescope](https://github.com/google-deepmind/treescope)\n- Easy model surgery thanks to the new [flax.nnx](https://flax.readthedocs.io/en/latest/guides/linen_to_nnx.html) API\n\n

Setup 🧗

\n\nJasmine requires `python 3.10`, `jax 0.6.2` and `flax 0.10.7`. To install the requirements, run:\n\n```bash\npip install -r requirements.txt\npre-commit install\n```\n\nDownload OpenAI's VPT dataset by running:\n\n```bash\nbash input_pipeline/download/openai/download_index_files.sh\npython input_pipeline/download/openai/download_videos.py\n```\n\nNote: this is a large dataset and may take a while to download.\n\nFor performant distributed training, we additionally preprocess the dataset into `arrayrecords`:\n\n```bash\npython input_pipeline/preprocess/video_to_array_records.py\n```\n\n

Quick Start 🚀

\n\nGenie has three components: a [video tokenizer](models/tokenizer.py), a [latent action model](models/lam.py), and a [dynamics model](models/dynamics.py). Each of these components are trained separately, however, the dynamics model requires a pre-trained video tokenizer (and latent action model).\n\nTo train the video tokenizer (similar for the LAM), run:\n\n```bash\npython train_tokenizer.py --ckpt_dir \n```\n\nOnce the tokenizer and LAM are trained, the dynamics model can be trained with:\n\n```bash\npython train_dynamics.py --tokenizer_checkpoint --lam_checkpoint \n```\n\nLogging with `wandb` is supported. To enable logging, set the `WANDB_API_KEY` environment variable or run:\n\n```bash\nwandb login\n```\n\nTraining can then be logged by setting the `--log` flag:\n\n```bash\npython train_tokenizer.py --log --entity --project \n```\n\n

Citing Jafar 📜

\n\nJasmine was built by [Mihir Mahajan](https://maharajamihir.github.io/), [Alfred Nguyen](https://avocadoali.github.io/) and [Franz Srambical](https://srambical.fr/), but started as a fork of [Jafar](https://github.com/flairox/jafar), built by [Matthew Jackson](https://matthewtjackson.com) and [Timon Willi](https://www.timonwilli.com).\n\nIf you use Jasmine in your work, please cite us, Jafar, and the original Genie paper as follows:\n\n```\n@article{\n mahajan2025jasmine,\n title={Jasmine: A simple, performant and scalable JAX-based world modeling codebase},\n author={Mihir Mahajan and Alfred Nguyen and Franz Srambical and Stefan Bauer},\n journal = {p(doom) blog},\n year={2025},\n url={https://pdoom.org/jasmine.html}\n note = {https://pdoom.org/blog.html}\n}\n```\n```\n@inproceedings{\n willi2024jafar,\n title={Jafar: An Open-Source Genie Reimplemention in Jax},\n author={Timon Willi and Matthew Thomas Jackson and Jakob Nicolaus Foerster},\n booktitle={First Workshop on Controllable Video Generation @ ICML 2024},\n year={2024},\n url={https://openreview.net/forum?id=ZZGaQHs9Jb}\n}\n```\n```\n@inproceedings{\n bruce2024genie,\n title={Genie: Generative Interactive Environments},\n author={Jake Bruce and Michael D Dennis and Ashley Edwards and Jack Parker-Holder and Yuge Shi and Edward Hughes and Matthew Lai and Aditi Mavalankar and Richie Steigerwald and Chris Apps and Yusuf Aytar and Sarah Maria Elisabeth Bechtle and Feryal Behbahani and Stephanie C.Y. Chan and Nicolas Heess and Lucy Gonzalez and Simon Osindero and Sherjil Ozair and Scott Reed and Jingwei Zhang and Konrad Zolna and Jeff Clune and Nando de Freitas and Satinder Singh and Tim Rockt{\""a}schel},\n booktitle={Forty-first International Conference on Machine Learning},\n year={2024},\n url={https://openreview.net/forum?id=bJbSbJskOS}\n}\n```\n",markdown,tab +2953,11609084,"README.md",4488,0,"",markdown,selection_command +2954,11609781,"README.md",4495,0,"",markdown,selection_command +2955,11614094,"README.md",4494,0,"",markdown,selection_command +2956,11614122,"README.md",4494,1," ",markdown,selection_command +2957,11614178,"README.md",4494,6," Jafar",markdown,selection_command +2958,11615776,"TERMINAL",0,0,"2025-07-27 12:32:15.573547: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2959,11615999,"README.md",4494,7," Jafar ",markdown,selection_command +2960,11616048,"README.md",4494,7,"",markdown,content +2961,11616696,"README.md",4494,0," ",markdown,content +2962,11616696,"README.md",4495,0,"",markdown,selection_keyboard +2963,11617297,"README.md",4494,0,"",markdown,selection_command +2964,11617327,"TERMINAL",0,0,"2025-07-27 12:32:17.127390: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2965,11621048,"TERMINAL",0,0,"2025-07-27 12:32:20.847762: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2966,11621972,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n d.text((2, y_offset), f""{action}"", fill=255)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n video_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n value_4d = _pad(_rearrange(value))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n for _ in range(self.num_blocks):\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 329, in attention_fn\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 308, in _fuse_masks\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4352, in pad\r\n pad_width = _broadcast_to_pairs(pad_width, np.ndim(array), ""pad_width"")\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 3937, in _broadcast_to_pairs\r\n raise ValueError(f""jnp.pad: {name} with {nd=} has unsupported shape {nvals.shape}. ""\r\nValueError: jnp.pad: pad_width with nd=5 has unsupported shape (2, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n",,terminal_output +2967,11623011,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2968,11623243,"README.md",0,0,"",markdown,tab +2969,11623243,"README.md",4462,0,"",markdown,selection_command +2970,11638109,"TERMINAL",0,0,"git reset --soft HEAD~1",,terminal_command +2971,11638154,"TERMINAL",0,0,"]633;C]0;franz.srambical@hai-login2:~/jafar",,terminal_output +2972,11652082,"TERMINAL",0,0,"\r[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +2973,11657179,"README.md",0,5286,"

Jafar: A JAX-based Genie Implementation 🧞

\n\n

\n \n \n \n \n

\n\nJafar is a JAX-based implementation of the DeepMind paper ""[Genie: Generative Interactive Environments](https://arxiv.org/abs/2402.15391)"" (Bruce et al., 2024).\n\nJafar supports training of all Genie components and can complete the CoinRun reproducibility experiment (Appendix F) on a single L40S GPU in under a week.\n\n

Setup 🧗

\n\nJafar was built with `python 3.10` and `jax 0.4.30`. To install requirements, run:\n\n```bash\npip install -r requirements.txt\npre-commit install\n```\n\nBefore training the models, generate the CoinRun dataset by running:\n\n```bash\npython generate_dataset.py --num_episodes 10000\n```\n\nNote: this is a large dataset (around 100GB) and may take a while to generate.\n\nFor performant distributed training, we additionally preprocess the dataset into `TFRecord`s:\n\n```bash\npython preprocess_dataset.py\n```\n\n

Quick Start 🚀

\n\nGenie has three components: a [video tokenizer](models/tokenizer.py), a [latent action model](models/lam.py), and a [dynamics model](models/dynamics.py). Each of these components are trained separately, however, the dynamics model requires a pre-trained video tokenizer and latent action model.\n\nTo train the video tokenizer (similar for the LAM), run:\n\n```bash\npython train_tokenizer.py --ckpt_dir \n```\n\nOnce the tokenizer and LAM are trained, the dynamics model can be trained with:\n\n```bash\npython train_dynamics.py --tokenizer_checkpoint --lam_checkpoint \n```\n\nLogging with `wandb` is supported. To enable logging, set the `WANDB_API_KEY` environment variable or run:\n\n```bash\nwandb login\n```\n\nTraining can then be logged by setting the `--log` flag:\n\n```bash\npython train_tokenizer.py --log --entity --project \n```\n\n

Citing Jafar 📜

\n\nJafar was built by [Matthew Jackson](https://matthewtjackson.com) and [Timon Willi](https://www.timonwilli.com).\n\nIf you use Jafar in your work, please cite us and the original Genie paper as follows:\n\n",markdown,content +2974,11657823,"README.md",0,0,"",markdown,tab +2975,11661088,"TERMINAL",0,0,"git stash pop",,terminal_command +2976,11661138,"TERMINAL",0,0,"]633;C",,terminal_output +2977,11661230,"TERMINAL",0,0,"On branch causal-transformer-nnx\r\nYour branch is up to date with 'origin/causal-transformer-nnx'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: .gitignore\r\n\tmodified: jasmine.py\r\n\tmodified: models/dynamics_causal.py\r\n\tmodified: sample.py\r\n\tmodified: utils/nn.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tbatch_size_benchmark_tokenizer_flash_attention.json\r\n\tbatch_size_benchmark_tokenizer_no_flash_attention.json\r\n\tbenchmark_batch_sizes_tokenizer.py\r\n\tgenerate_arrayrecord_dataset.py\r\n\tsalient_restore_failing_minimal_example.py\r\n\tslurm/\r\n\ttest.ipynb\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\nDropped refs/stash@{0} (fa8078ed596d9e9482341ba6c405f16d3d5e3cbf)\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +2978,11661393,"README.md",0,0,"Switched from branch 'readme-typo-2' to 'causal-transformer-nnx'",markdown,git_branch_checkout +2979,11662941,"TERMINAL",0,0,"srun",,terminal_focus +2980,11670195,"utils/nn.py",0,0,"import math\nfrom typing import Tuple, Callable\n\nfrom flax import nnx\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nnx.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n def __init__(self, d_model: int, max_len: int = 5000):\n self.d_model = d_model\n self.max_len = max_len\n\n pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n pe = pe.at[:, 0::2].set(jnp.sin(position * div_term))\n pe = pe.at[:, 1::2].set(jnp.cos(position * div_term))\n self.pe = nnx.Variable(pe)\n\n def __call__(self, x: jax.Array) -> jax.Array:\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nnx.Module):\n def __init__(\n self,\n dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.dim = dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.spatial_pos_enc = PositionalEncoding(self.dim)\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention,\n is_causal=self.spatial_causal,\n ),\n rngs=rngs,\n # decode=self.decode,\n decode=False,\n )\n\n self.temporal_pos_enc = PositionalEncoding(self.dim)\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = self.spatial_pos_enc(x)\n z = self.spatial_norm(z)\n # z.shape (1, 1, 921, 512)\n z = self.spatial_attention(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = self.temporal_pos_enc(x)\n z = self.temporal_norm(z)\n # z.shape (1, 921, 1, 512)\n z = self.temporal_attention(z)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = self.ffn_norm(x)\n z = self.ffn_dense1(z)\n z = jax.nn.gelu(z)\n z = self.ffn_dense2(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nnx.Module):\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n self.blocks: list[STBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=self.spatial_causal,\n decode=self.decode,\n rngs=rngs,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x: jax.Array) -> jax.Array:\n # x.shape (1, 1, 921, 512)\n x = self.input_norm1(x)\n x = self.input_dense(x)\n x = self.input_norm2(x)\n\n for block in self.blocks:\n # x.shape (1, 1, 921, 512)\n x = block(x)\n\n x = self.output_dense(x)\n return x # (B, T, E)\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n def __init__(\n self, latent_dim: int, num_latents: int, dropout: float, rngs: nnx.Rngs\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n normalized_codebook = normalize(self.codebook.value)\n distance = -jnp.matmul(x, normalized_codebook.T)\n if training:\n distance = self.drop(distance)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array) -> jax.Array:\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n # for temporal attention (using kv cache)\n # FIRST PASS: qkv.shape (1, 921, 1, 8, 64)\n # SECOND PASS: qkv.shape \n if query.shape == (1, 921, 1, 512):\n breakpoint()\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n # NOTE: jax.nn.dot_product_attention does not support dropout\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +2981,11732436,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"import math\nfrom typing import Tuple, Callable\n\nfrom flax import nnx\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nnx.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n def __init__(self, d_model: int, max_len: int = 5000):\n self.d_model = d_model\n self.max_len = max_len\n\n pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n pe = pe.at[:, 0::2].set(jnp.sin(position * div_term))\n pe = pe.at[:, 1::2].set(jnp.cos(position * div_term))\n self.pe = nnx.Variable(pe)\n\n def __call__(self, x: jax.Array) -> jax.Array:\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nnx.Module):\n def __init__(\n self,\n dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.dim = dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.spatial_pos_enc = PositionalEncoding(self.dim)\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention,\n is_causal=self.spatial_causal,\n ),\n rngs=rngs,\n # decode=self.decode,\n decode=False,\n )\n\n self.temporal_pos_enc = PositionalEncoding(self.dim)\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = self.spatial_pos_enc(x)\n z = self.spatial_norm(z)\n # z.shape (1, 1, 921, 512)\n z = self.spatial_attention(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = self.temporal_pos_enc(x)\n z = self.temporal_norm(z)\n # z.shape (1, 921, 1, 512)\n z = self.temporal_attention(z)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = self.ffn_norm(x)\n z = self.ffn_dense1(z)\n z = jax.nn.gelu(z)\n z = self.ffn_dense2(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nnx.Module):\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n self.blocks: list[STBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=self.spatial_causal,\n decode=self.decode,\n rngs=rngs,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x: jax.Array) -> jax.Array:\n # x.shape (1, 1, 921, 512)\n x = self.input_norm1(x)\n x = self.input_dense(x)\n x = self.input_norm2(x)\n\n for block in self.blocks:\n # x.shape (1, 1, 921, 512)\n x = block(x)\n\n x = self.output_dense(x)\n return x # (B, T, E)\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n def __init__(\n self, latent_dim: int, num_latents: int, dropout: float, rngs: nnx.Rngs\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n normalized_codebook = normalize(self.codebook.value)\n distance = -jnp.matmul(x, normalized_codebook.T)\n if training:\n distance = self.drop(distance)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array) -> jax.Array:\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n # for temporal attention (using kv cache)\n # FIRST PASS: qkv.shape (1, 921, 1, 8, 64)\n # SECOND PASS: qkv.shape \n if query.shape == (1, 921, 1, 512):\n breakpoint()\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n # NOTE: jax.nn.dot_product_attention does not support dropout\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +2982,11732437,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,0,"",python,selection_command +2983,11736304,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py",0,0,"# Copyright 2018 The JAX Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# pytype: skip-file\n""""""\nImplements the NumPy API, using the primitives in :mod:`jax.lax`.\n\nNumPy operations are implemented in Python in terms of the primitive operations\nin :mod:`jax.lax`. Since NumPy operations are not primitive and instead are\nimplemented in terms of :mod:`jax.lax` operations, we do not need to define\ntransformation rules such as gradient or batching rules. Instead,\ntransformations for NumPy primitives can be derived from the transformation\nrules for the underlying :code:`lax` primitives.\n""""""\nfrom __future__ import annotations\n\nimport builtins\nfrom collections.abc import Callable, Sequence\nfrom functools import partial\nimport math\nimport operator\nimport os\nfrom typing import (Any, IO, Literal, Protocol, TypeVar, Union, overload)\nimport warnings\n\nfrom jax import lax\nfrom jax._src.api import jit\nfrom jax._src import api\nfrom jax._src import config\nfrom jax._src import core\nfrom jax._src import deprecations\nfrom jax._src import dtypes\nfrom jax._src.api_util import _ensure_index_tuple\nfrom jax._src.custom_derivatives import custom_jvp\nfrom jax._src.lax import lax as lax_internal\nfrom jax._src.lax.lax import (PrecisionLike,_array_copy,\n _sort_le_comparator, _sort_lt_comparator)\nfrom jax._src.lib import xla_client as xc\nfrom jax._src.numpy.array import array, asarray\nfrom jax._src.numpy import indexing\nfrom jax._src.numpy import reductions\nfrom jax._src.numpy import tensor_contractions\nfrom jax._src.numpy import ufuncs\nfrom jax._src.numpy import util\nfrom jax._src.numpy.array_creation import (empty, empty_like, full, linspace,\n ones, ones_like, zeros, zeros_like)\nfrom jax._src.numpy.sorting import argsort, sort\nfrom jax._src.numpy.vectorize import vectorize\nfrom jax._src.typing import (\n Array, ArrayLike, DType, DTypeLike, DeprecatedArg, DimSize, Shape, SupportsShape\n)\nfrom jax._src.util import (\n canonicalize_axis as _canonicalize_axis,\n ceil_of_ratio, safe_zip, set_module, unzip2)\nfrom jax.sharding import Sharding\nfrom jax._src.sharding_impls import NamedSharding, PartitionSpec as P\nfrom jax._src.mesh import get_abstract_mesh\nfrom jax._src.pjit import auto_axes\nfrom jax.tree_util import tree_map\nimport numpy as np\n\nexport = set_module('jax.numpy')\n\nT = TypeVar('T')\n\n# Wrappers for NumPy printoptions\n\ndef get_printoptions():\n """"""Alias of :func:`numpy.get_printoptions`.\n\n JAX arrays are printed via NumPy, so NumPy's `printoptions`\n configurations will apply to printed JAX arrays.\n\n See the :func:`numpy.set_printoptions` documentation for details\n on the available options and their meanings.\n """"""\n return np.get_printoptions()\n\ndef printoptions(*args, **kwargs):\n """"""Alias of :func:`numpy.printoptions`.\n\n JAX arrays are printed via NumPy, so NumPy's `printoptions`\n configurations will apply to printed JAX arrays.\n\n See the :func:`numpy.set_printoptions` documentation for details\n on the available options and their meanings.\n """"""\n return np.printoptions(*args, **kwargs)\n\ndef set_printoptions(*args, **kwargs):\n """"""Alias of :func:`numpy.set_printoptions`.\n\n JAX arrays are printed via NumPy, so NumPy's `printoptions`\n configurations will apply to printed JAX arrays.\n\n See the :func:`numpy.set_printoptions` documentation for details\n on the available options and their meanings.\n """"""\n return np.set_printoptions(*args, **kwargs)\n\n@export\ndef iscomplexobj(x: Any) -> bool:\n """"""Check if the input is a complex number or an array containing complex elements.\n\n JAX implementation of :func:`numpy.iscomplexobj`.\n\n The function evaluates based on input type rather than value.\n Inputs with zero imaginary parts are still considered complex.\n\n Args:\n x: input object to check.\n\n Returns:\n True if ``x`` is a complex number or an array containing at least one complex element,\n False otherwise.\n\n See Also:\n - :func:`jax.numpy.isrealobj`\n - :func:`jax.numpy.iscomplex`\n\n Examples:\n >>> jnp.iscomplexobj(True)\n False\n >>> jnp.iscomplexobj(0)\n False\n >>> jnp.iscomplexobj(jnp.array([1, 2]))\n False\n >>> jnp.iscomplexobj(1+2j)\n True\n >>> jnp.iscomplexobj(jnp.array([0, 1+2j]))\n True\n """"""\n if x is None:\n return False\n try:\n typ = x.dtype.type\n except AttributeError:\n typ = asarray(x).dtype.type\n return issubdtype(typ, np.complexfloating)\n\n\ndef _dtype(x: Any) -> DType:\n return dtypes.dtype(x, canonicalize=True)\n\n# Dtype-related functions\niinfo = dtypes.iinfo\nfinfo = dtypes.finfo\n\ncan_cast = dtypes.can_cast\npromote_types = dtypes.promote_types\n\nComplexWarning = np.exceptions.ComplexWarning\n\n_lax_const = lax_internal._const\n\n\ndef _convert_and_clip_integer(val: ArrayLike, dtype: DType) -> Array:\n """"""\n Convert integer-typed val to specified integer dtype, clipping to dtype\n range rather than wrapping.\n\n Args:\n val: value to be converted\n dtype: dtype of output\n\n Returns:\n equivalent of val in new dtype\n\n Examples\n --------\n Normal integer type conversion will wrap:\n\n >>> val = jnp.uint32(0xFFFFFFFF)\n >>> val.astype('int32')\n Array(-1, dtype=int32)\n\n This function clips to the values representable in the new type:\n\n >>> _convert_and_clip_integer(val, 'int32')\n Array(2147483647, dtype=int32)\n """"""\n val = val if isinstance(val, Array) else asarray(val)\n dtype = dtypes.canonicalize_dtype(dtype)\n if not (issubdtype(dtype, np.integer) and issubdtype(val.dtype, np.integer)):\n raise TypeError(""_convert_and_clip_integer only accepts integer dtypes."")\n\n val_dtype = dtypes.canonicalize_dtype(val.dtype)\n if val_dtype != val.dtype:\n # TODO(jakevdp): this is a weird corner case; need to figure out how to handle it.\n # This happens in X32 mode and can either come from a jax value created in another\n # context, or a Python integer converted to int64.\n pass\n min_val = _lax_const(val, max(iinfo(dtype).min, iinfo(val_dtype).min))\n max_val = _lax_const(val, min(iinfo(dtype).max, iinfo(val_dtype).max))\n return clip(val, min_val, max_val).astype(dtype)\n\n\n@export\ndef load(file: IO[bytes] | str | os.PathLike[Any], *args: Any, **kwargs: Any) -> Array:\n """"""Load JAX arrays from npy files.\n\n JAX wrapper of :func:`numpy.load`.\n\n This function is a simple wrapper of :func:`numpy.load`, but in the case of\n ``.npy`` files created with :func:`numpy.save` or :func:`jax.numpy.save`,\n the output will be returned as a :class:`jax.Array`, and ``bfloat16`` data\n types will be restored. For ``.npz`` files, results will be returned as\n normal NumPy arrays.\n\n This function requires concrete array inputs, and is not compatible with\n transformations like :func:`jax.jit` or :func:`jax.vmap`.\n\n Args:\n file: string, bytes, or path-like object containing the array data.\n args, kwargs: for additional arguments, see :func:`numpy.load`\n\n Returns:\n the array stored in the file.\n\n See also:\n - :func:`jax.numpy.save`: save an array to a file.\n\n Examples:\n >>> import io\n >>> f = io.BytesIO() # use an in-memory file-like object.\n >>> x = jnp.array([2, 4, 6, 8], dtype='bfloat16')\n >>> jnp.save(f, x)\n >>> f.seek(0)\n 0\n >>> jnp.load(f)\n Array([2, 4, 6, 8], dtype=bfloat16)\n """"""\n # The main purpose of this wrapper is to recover bfloat16 data types.\n # Note: this will only work for files created via np.save(), not np.savez().\n out = np.load(file, *args, **kwargs)\n if isinstance(out, np.ndarray):\n # numpy does not recognize bfloat16, so arrays are serialized as void16\n if out.dtype == 'V2':\n out = out.view(dtypes.bfloat16)\n try:\n out = asarray(out)\n except (TypeError, AssertionError): # Unsupported dtype\n pass\n return out\n\n### implementations of numpy functions in terms of lax\n\n@export\n@jit\ndef fmin(x1: ArrayLike, x2: ArrayLike) -> Array:\n """"""Return element-wise minimum of the input arrays.\n\n JAX implementation of :func:`numpy.fmin`.\n\n Args:\n x1: input array or scalar.\n x2: input array or scalar. x1 and x2 must either have same shape or be\n broadcast compatible.\n\n Returns:\n An array containing the element-wise minimum of x1 and x2.\n\n Note:\n For each pair of elements, ``jnp.fmin`` returns:\n - the smaller of the two if both elements are finite numbers.\n - finite number if one element is ``nan``.\n - ``-inf`` if one element is ``-inf`` and the other is finite or ``nan``.\n - ``inf`` if one element is ``inf`` and the other is ``nan``.\n - ``nan`` if both elements are ``nan``.\n\n Examples:\n >>> jnp.fmin(2, 3)\n Array(2, dtype=int32, weak_type=True)\n >>> jnp.fmin(2, jnp.array([1, 4, 2, -1]))\n Array([ 1, 2, 2, -1], dtype=int32)\n\n >>> x1 = jnp.array([1, 3, 2])\n >>> x2 = jnp.array([2, 1, 4])\n >>> jnp.fmin(x1, x2)\n Array([1, 1, 2], dtype=int32)\n\n >>> x3 = jnp.array([1, 5, 3])\n >>> x4 = jnp.array([[2, 3, 1],\n ... [5, 6, 7]])\n >>> jnp.fmin(x3, x4)\n Array([[1, 3, 1],\n [1, 5, 3]], dtype=int32)\n\n >>> nan = jnp.nan\n >>> x5 = jnp.array([jnp.inf, 5, nan])\n >>> x6 = jnp.array([[2, 3, nan],\n ... [nan, 6, 7]])\n >>> jnp.fmin(x5, x6)\n Array([[ 2., 3., nan],\n [inf, 5., 7.]], dtype=float32)\n """"""\n return where(ufuncs.less(x1, x2) | ufuncs.isnan(x2), x1, x2)\n\n\n@export\n@jit\ndef fmax(x1: ArrayLike, x2: ArrayLike) -> Array:\n """"""Return element-wise maximum of the input arrays.\n\n JAX implementation of :func:`numpy.fmax`.\n\n Args:\n x1: input array or scalar\n x2: input array or scalar. x1 and x1 must either have same shape or be\n broadcast compatible.\n\n Returns:\n An array containing the element-wise maximum of x1 and x2.\n\n Note:\n For each pair of elements, ``jnp.fmax`` returns:\n - the larger of the two if both elements are finite numbers.\n - finite number if one element is ``nan``.\n - ``nan`` if both elements are ``nan``.\n - ``inf`` if one element is ``inf`` and the other is finite or ``nan``.\n - ``-inf`` if one element is ``-inf`` and the other is ``nan``.\n\n Examples:\n >>> jnp.fmax(3, 7)\n Array(7, dtype=int32, weak_type=True)\n >>> jnp.fmax(5, jnp.array([1, 7, 9, 4]))\n Array([5, 7, 9, 5], dtype=int32)\n\n >>> x1 = jnp.array([1, 3, 7, 8])\n >>> x2 = jnp.array([-1, 4, 6, 9])\n >>> jnp.fmax(x1, x2)\n Array([1, 4, 7, 9], dtype=int32)\n\n >>> x3 = jnp.array([[2, 3, 5, 10],\n ... [11, 9, 7, 5]])\n >>> jnp.fmax(x1, x3)\n Array([[ 2, 3, 7, 10],\n [11, 9, 7, 8]], dtype=int32)\n\n >>> x4 = jnp.array([jnp.inf, 6, -jnp.inf, nan])\n >>> x5 = jnp.array([[3, 5, 7, nan],\n ... [nan, 9, nan, -1]])\n >>> jnp.fmax(x4, x5)\n Array([[ inf, 6., 7., nan],\n [ inf, 9., -inf, -1.]], dtype=float32)\n """"""\n return where(ufuncs.greater(x1, x2) | ufuncs.isnan(x2), x1, x2)\n\n\n@export\ndef issubdtype(arg1: DTypeLike, arg2: DTypeLike) -> bool:\n """"""Return True if arg1 is equal or lower than arg2 in the type hierarchy.\n\n JAX implementation of :func:`numpy.issubdtype`.\n\n The main difference in JAX's implementation is that it properly handles\n dtype extensions such as :code:`bfloat16`.\n\n Args:\n arg1: dtype-like object. In typical usage, this will be a dtype specifier,\n such as ``""float32""`` (i.e. a string), ``np.dtype('int32')`` (i.e. an\n instance of :class:`numpy.dtype`), ``jnp.complex64`` (i.e. a JAX scalar\n constructor), or ``np.uint8`` (i.e. a NumPy scalar type).\n arg2: dtype-like object. In typical usage, this will be a generic scalar\n type, such as ``jnp.integer``, ``jnp.floating``, or ``jnp.complexfloating``.\n\n Returns:\n True if arg1 represents a dtype that is equal or lower in the type\n hierarchy than arg2.\n\n See also:\n - :func:`jax.numpy.isdtype`: similar function aligning with the array API standard.\n\n Examples:\n >>> jnp.issubdtype('uint32', jnp.unsignedinteger)\n True\n >>> jnp.issubdtype(np.int32, jnp.integer)\n True\n >>> jnp.issubdtype(jnp.bfloat16, jnp.floating)\n True\n >>> jnp.issubdtype(np.dtype('complex64'), jnp.complexfloating)\n True\n >>> jnp.issubdtype('complex64', jnp.integer)\n False\n\n Be aware that while this is very similar to :func:`numpy.issubdtype`, the\n results of these differ in the case of JAX's custom floating point types:\n\n >>> np.issubdtype('bfloat16', np.floating)\n False\n >>> jnp.issubdtype('bfloat16', jnp.floating)\n True\n """"""\n return dtypes.issubdtype(arg1, arg2)\n\n\n@export\ndef isscalar(element: Any) -> bool:\n """"""Return True if the input is a scalar.\n\n JAX implementation of :func:`numpy.isscalar`. JAX's implementation differs\n from NumPy's in that it considers zero-dimensional arrays to be scalars; see\n the *Note* below for more details.\n\n Args:\n element: input object to check; any type is valid input.\n\n Returns:\n True if ``element`` is a scalar value or an array-like object with zero\n dimensions, False otherwise.\n\n Note:\n JAX and NumPy differ in their representation of scalar values. NumPy has\n special scalar objects (e.g. ``np.int32(0)``) which are distinct from\n zero-dimensional arrays (e.g. ``np.array(0)``), and :func:`numpy.isscalar`\n returns ``True`` for the former and ``False`` for the latter.\n\n JAX does not define special scalar objects, but rather represents scalars as\n zero-dimensional arrays. As such, :func:`jax.numpy.isscalar` returns ``True``\n for both scalar objects (e.g. ``0.0`` or ``np.float32(0.0)``) and array-like\n objects with zero dimensions (e.g. ``jnp.array(0.0)``, ``np.array(0.0)``).\n\n One reason for the different conventions in ``isscalar`` is to maintain\n JIT-invariance: i.e. the property that the result of a function should not\n change when it is JIT-compiled. Because scalar inputs are cast to\n zero-dimensional JAX arrays at JIT boundaries, the semantics of\n :func:`numpy.isscalar` are such that the result changes under JIT:\n\n >>> np.isscalar(1.0)\n True\n >>> jax.jit(np.isscalar)(1.0)\n Array(False, dtype=bool)\n\n By treating zero-dimensional arrays as scalars, :func:`jax.numpy.isscalar`\n avoids this issue:\n\n >>> jnp.isscalar(1.0)\n True\n >>> jax.jit(jnp.isscalar)(1.0)\n Array(True, dtype=bool)\n\n Examples:\n In JAX, both scalars and zero-dimensional array-like objects are considered\n scalars:\n\n >>> jnp.isscalar(1.0)\n True\n >>> jnp.isscalar(1 + 1j)\n True\n >>> jnp.isscalar(jnp.array(1)) # zero-dimensional JAX array\n True\n >>> jnp.isscalar(jnp.int32(1)) # JAX scalar constructor\n True\n >>> jnp.isscalar(np.array(1.0)) # zero-dimensional NumPy array\n True\n >>> jnp.isscalar(np.int32(1)) # NumPy scalar type\n True\n\n Arrays with one or more dimension are not considered scalars:\n\n >>> jnp.isscalar(jnp.array([1]))\n False\n >>> jnp.isscalar(np.array([1]))\n False\n\n Compare this to :func:`numpy.isscalar`, which returns ``True`` for\n scalar-typed objects, and ``False`` for *all* arrays, even those with\n zero dimensions:\n\n >>> np.isscalar(np.int32(1)) # scalar object\n True\n >>> np.isscalar(np.array(1)) # zero-dimensional array\n False\n\n In JAX, as in NumPy, objects which are not array-like are not considered\n scalars:\n\n >>> jnp.isscalar(None)\n False\n >>> jnp.isscalar([1])\n False\n >>> jnp.isscalar(tuple())\n False\n >>> jnp.isscalar(slice(10))\n False\n """"""\n if np.isscalar(element):\n return True\n elif isinstance(element, (np.ndarray, Array)):\n return element.ndim == 0\n elif hasattr(element, '__jax_array__'):\n return asarray(element).ndim == 0\n return False\n\n\n@export\ndef result_type(*args: Any) -> DType:\n """"""Return the result of applying JAX promotion rules to the inputs.\n\n JAX implementation of :func:`numpy.result_type`.\n\n JAX's dtype promotion behavior is described in :ref:`type-promotion`.\n\n Args:\n args: one or more arrays or dtype-like objects.\n\n Returns:\n A :class:`numpy.dtype` instance representing the result of type\n promotion for the inputs.\n\n Examples:\n Inputs can be dtype specifiers:\n\n >>> jnp.result_type('int32', 'float32')\n dtype('float32')\n >>> jnp.result_type(np.uint16, np.dtype('int32'))\n dtype('int32')\n\n Inputs may also be scalars or arrays:\n\n >>> jnp.result_type(1.0, jnp.bfloat16(2))\n dtype(bfloat16)\n >>> jnp.result_type(jnp.arange(4), jnp.zeros(4))\n dtype('float32')\n\n Be aware that the result type will be canonicalized based on the state\n of the ``jax_enable_x64`` configuration flag, meaning that 64-bit types\n may be downcast to 32-bit:\n\n >>> jnp.result_type('float64')\n dtype('float32')\n\n For details on 64-bit values, refer to `Sharp bits - double precision`_:\n\n .. _Sharp bits - double precision: https://docs.jax.dev/en/latest/notebooks/Common_Gotchas_in_JAX.html#double-64bit-precision\n """"""\n return dtypes.result_type(*args)\n\n\n@export\n@jit\ndef trunc(x: ArrayLike) -> Array:\n """"""Round input to the nearest integer towards zero.\n\n JAX implementation of :func:`numpy.trunc`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array with same shape and dtype as ``x`` containing the rounded values.\n\n See also:\n - :func:`jax.numpy.fix`: Rounds the input to the nearest integer towards zero.\n - :func:`jax.numpy.ceil`: Rounds the input up to the nearest integer.\n - :func:`jax.numpy.floor`: Rounds the input down to the nearest integer.\n\n Examples:\n >>> key = jax.random.key(42)\n >>> x = jax.random.uniform(key, (3, 3), minval=-10, maxval=10)\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(x)\n [[-0.23 3.6 2.33]\n [ 1.22 -0.99 1.72]\n [-8.5 5.5 3.98]]\n >>> jnp.trunc(x)\n Array([[-0., 3., 2.],\n [ 1., -0., 1.],\n [-8., 5., 3.]], dtype=float32)\n """"""\n x = util.ensure_arraylike('trunc', x)\n if dtypes.isdtype(dtypes.dtype(x), ('integral', 'bool')):\n return x\n return where(lax.lt(x, _lax_const(x, 0)), ufuncs.ceil(x), ufuncs.floor(x))\n\n\n@partial(jit, static_argnames=['mode', 'op', 'precision', 'preferred_element_type'])\ndef _conv(x: Array, y: Array, mode: str, op: str, precision: PrecisionLike,\n preferred_element_type: DTypeLike | None = None) -> Array:\n if np.ndim(x) != 1 or np.ndim(y) != 1:\n raise ValueError(f""{op}() only support 1-dimensional inputs."")\n if preferred_element_type is None:\n # if unspecified, promote to inexact following NumPy's default for convolutions.\n x, y = util.promote_dtypes_inexact(x, y)\n else:\n # otherwise cast to same type but otherwise preserve input dtypes\n x, y = util.promote_dtypes(x, y)\n if len(x) == 0 or len(y) == 0:\n raise ValueError(f""{op}: inputs cannot be empty, got shapes {x.shape} and {y.shape}."")\n\n out_order = slice(None)\n if op == 'correlate':\n y = ufuncs.conj(y)\n if len(x) < len(y):\n x, y = y, x\n out_order = slice(None, None, -1)\n elif op == 'convolve':\n if len(x) < len(y):\n x, y = y, x\n y = flip(y)\n\n if mode == 'valid':\n padding = [(0, 0)]\n elif mode == 'same':\n padding = [(y.shape[0] // 2, y.shape[0] - y.shape[0] // 2 - 1)]\n elif mode == 'full':\n padding = [(y.shape[0] - 1, y.shape[0] - 1)]\n else:\n raise ValueError(""mode must be one of ['full', 'same', 'valid']"")\n\n result = lax.conv_general_dilated(x[None, None, :], y[None, None, :], (1,),\n padding, precision=precision,\n preferred_element_type=preferred_element_type)\n return result[0, 0, out_order]\n\n\n@export\n@partial(jit, static_argnames=('mode', 'precision', 'preferred_element_type'))\ndef convolve(a: ArrayLike, v: ArrayLike, mode: str = 'full', *,\n precision: PrecisionLike = None,\n preferred_element_type: DTypeLike | None = None) -> Array:\n r""""""Convolution of two one dimensional arrays.\n\n JAX implementation of :func:`numpy.convolve`.\n\n Convolution of one dimensional arrays is defined as:\n\n .. math::\n\n c_k = \sum_j a_{k - j} v_j\n\n Args:\n a: left-hand input to the convolution. Must have ``a.ndim == 1``.\n v: right-hand input to the convolution. Must have ``v.ndim == 1``.\n mode: controls the size of the output. Available operations are:\n\n * ``""full""``: (default) output the full convolution of the inputs.\n * ``""same""``: return a centered portion of the ``""full""`` output which\n is the same size as ``a``.\n * ``""valid""``: return the portion of the ``""full""`` output which do not\n depend on padding at the array edges.\n\n precision: Specify the precision of the computation. Refer to\n :class:`jax.lax.Precision` for a description of available values.\n\n preferred_element_type: A datatype, indicating to accumulate results to and\n return a result with that datatype. Default is ``None``, which means the\n default accumulation type for the input types.\n\n Returns:\n Array containing the convolved result.\n\n See Also:\n - :func:`jax.scipy.signal.convolve`: ND convolution\n - :func:`jax.numpy.correlate`: 1D correlation\n\n Examples:\n A few 1D convolution examples:\n\n >>> x = jnp.array([1, 2, 3, 2, 1])\n >>> y = jnp.array([4, 1, 2])\n\n ``jax.numpy.convolve``, by default, returns full convolution using implicit\n zero-padding at the edges:\n\n >>> jnp.convolve(x, y)\n Array([ 4., 9., 16., 15., 12., 5., 2.], dtype=float32)\n\n Specifying ``mode = 'same'`` returns a centered convolution the same size\n as the first input:\n\n >>> jnp.convolve(x, y, mode='same')\n Array([ 9., 16., 15., 12., 5.], dtype=float32)\n\n Specifying ``mode = 'valid'`` returns only the portion where the two arrays\n fully overlap:\n\n >>> jnp.convolve(x, y, mode='valid')\n Array([16., 15., 12.], dtype=float32)\n\n For complex-valued inputs:\n\n >>> x1 = jnp.array([3+1j, 2, 4-3j])\n >>> y1 = jnp.array([1, 2-3j, 4+5j])\n >>> jnp.convolve(x1, y1)\n Array([ 3. +1.j, 11. -7.j, 15.+10.j, 7. -8.j, 31. +8.j], dtype=complex64)\n """"""\n a, v = util.ensure_arraylike(""convolve"", a, v)\n return _conv(a, v, mode=mode, op='convolve',\n precision=precision, preferred_element_type=preferred_element_type)\n\n\n@export\n@partial(jit, static_argnames=('mode', 'precision', 'preferred_element_type'))\ndef correlate(a: ArrayLike, v: ArrayLike, mode: str = 'valid', *,\n precision: PrecisionLike = None,\n preferred_element_type: DTypeLike | None = None) -> Array:\n r""""""Correlation of two one dimensional arrays.\n\n JAX implementation of :func:`numpy.correlate`.\n\n Correlation of one dimensional arrays is defined as:\n\n .. math::\n\n c_k = \sum_j a_{k + j} \overline{v_j}\n\n where :math:`\overline{v_j}` is the complex conjugate of :math:`v_j`.\n\n Args:\n a: left-hand input to the correlation. Must have ``a.ndim == 1``.\n v: right-hand input to the correlation. Must have ``v.ndim == 1``.\n mode: controls the size of the output. Available operations are:\n\n * ``""full""``: output the full correlation of the inputs.\n * ``""same""``: return a centered portion of the ``""full""`` output which\n is the same size as ``a``.\n * ``""valid""``: (default) return the portion of the ``""full""`` output which do not\n depend on padding at the array edges.\n\n precision: Specify the precision of the computation. Refer to\n :class:`jax.lax.Precision` for a description of available values.\n\n preferred_element_type: A datatype, indicating to accumulate results to and\n return a result with that datatype. Default is ``None``, which means the\n default accumulation type for the input types.\n\n Returns:\n Array containing the cross-correlation result.\n\n See Also:\n - :func:`jax.scipy.signal.correlate`: ND correlation\n - :func:`jax.numpy.convolve`: 1D convolution\n\n Examples:\n >>> x = jnp.array([1, 2, 3, 2, 1])\n >>> y = jnp.array([4, 5, 6])\n\n Since default ``mode = 'valid'``, ``jax.numpy.correlate`` returns only the\n portion of correlation where the two arrays fully overlap:\n\n >>> jnp.correlate(x, y)\n Array([32., 35., 28.], dtype=float32)\n\n Specifying ``mode = 'full'`` returns full correlation using implicit\n zero-padding at the edges.\n\n >>> jnp.correlate(x, y, mode='full')\n Array([ 6., 17., 32., 35., 28., 13., 4.], dtype=float32)\n\n Specifying ``mode = 'same'`` returns a centered correlation the same size\n as the first input:\n\n >>> jnp.correlate(x, y, mode='same')\n Array([17., 32., 35., 28., 13.], dtype=float32)\n\n If both the inputs arrays are real-valued and symmetric then the result will\n also be symmetric and will be equal to the result of ``jax.numpy.convolve``.\n\n >>> x1 = jnp.array([1, 2, 3, 2, 1])\n >>> y1 = jnp.array([4, 5, 4])\n >>> jnp.correlate(x1, y1, mode='full')\n Array([ 4., 13., 26., 31., 26., 13., 4.], dtype=float32)\n >>> jnp.convolve(x1, y1, mode='full')\n Array([ 4., 13., 26., 31., 26., 13., 4.], dtype=float32)\n\n For complex-valued inputs:\n\n >>> x2 = jnp.array([3+1j, 2, 2-3j])\n >>> y2 = jnp.array([4, 2-5j, 1])\n >>> jnp.correlate(x2, y2, mode='full')\n Array([ 3. +1.j, 3.+17.j, 18.+11.j, 27. +4.j, 8.-12.j], dtype=complex64)\n """"""\n a, v = util.ensure_arraylike(""correlate"", a, v)\n return _conv(a, v, mode=mode, op='correlate',\n precision=precision, preferred_element_type=preferred_element_type)\n\n\n@export\ndef histogram_bin_edges(a: ArrayLike, bins: ArrayLike = 10,\n range: None | Array | Sequence[ArrayLike] = None,\n weights: ArrayLike | None = None) -> Array:\n """"""Compute the bin edges for a histogram.\n\n JAX implementation of :func:`numpy.histogram_bin_edges`.\n\n Args:\n a: array of values to be binned\n bins: Specify the number of bins in the histogram (default: 10).\n range: tuple of scalars. Specifies the range of the data. If not specified,\n the range is inferred from the data.\n weights: unused by JAX.\n\n Returns:\n An array of bin edges for the histogram.\n\n See also:\n - :func:`jax.numpy.histogram`: compute a 1D histogram.\n - :func:`jax.numpy.histogram2d`: compute a 2D histogram.\n - :func:`jax.numpy.histogramdd`: compute an N-dimensional histogram.\n\n Examples:\n >>> a = jnp.array([2, 5, 3, 6, 4, 1])\n >>> jnp.histogram_bin_edges(a, bins=5)\n Array([1., 2., 3., 4., 5., 6.], dtype=float32)\n >>> jnp.histogram_bin_edges(a, bins=5, range=(-10, 10)) # doctest: +SKIP\n Array([-10., -6., -2., 2., 6., 10.], dtype=float32)\n """"""\n del weights # unused, because string bins is not supported.\n if isinstance(bins, str):\n raise NotImplementedError(""string values for `bins` not implemented."")\n util.check_arraylike(""histogram_bin_edges"", a, bins)\n arr = asarray(a)\n dtype = dtypes.to_inexact_dtype(arr.dtype)\n if np.ndim(bins) == 1:\n return asarray(bins, dtype=dtype)\n\n bins_int = core.concrete_or_error(operator.index, bins,\n ""bins argument of histogram_bin_edges"")\n if range is None:\n range = [arr.min(), arr.max()]\n range = asarray(range, dtype=dtype)\n if np.shape(range) != (2,):\n raise ValueError(f""`range` must be either None or a sequence of scalars, got {range}"")\n range = (where(reductions.ptp(range) == 0, range[0] - 0.5, range[0]),\n where(reductions.ptp(range) == 0, range[1] + 0.5, range[1]))\n assert range is not None\n return linspace(range[0], range[1], bins_int + 1, dtype=dtype)\n\n\n@export\ndef histogram(a: ArrayLike, bins: ArrayLike = 10,\n range: Sequence[ArrayLike] | None = None,\n weights: ArrayLike | None = None,\n density: bool | None = None) -> tuple[Array, Array]:\n """"""Compute a 1-dimensional histogram.\n\n JAX implementation of :func:`numpy.histogram`.\n\n Args:\n a: array of values to be binned. May be any size or dimension.\n bins: Specify the number of bins in the histogram (default: 10). ``bins``\n may also be an array specifying the locations of the bin edges.\n range: tuple of scalars. Specifies the range of the data. If not specified,\n the range is inferred from the data.\n weights: An optional array specifying the weights of the data points.\n Should be broadcast-compatible with ``a``. If not specified, each\n data point is weighted equally.\n density: If True, return the normalized histogram in units of counts\n per unit length. If False (default) return the (weighted) counts per bin.\n\n Returns:\n A tuple of arrays ``(histogram, bin_edges)``, where ``histogram`` contains\n the aggregated data, and ``bin_edges`` specifies the boundaries of the bins.\n\n See Also:\n - :func:`jax.numpy.bincount`: Count the number of occurrences of each value in an array.\n - :func:`jax.numpy.histogram2d`: Compute the histogram of a 2D array.\n - :func:`jax.numpy.histogramdd`: Compute the histogram of an N-dimensional array.\n - :func:`jax.numpy.histogram_bin_edges`: Compute the bin edges for a histogram.\n\n Examples:\n >>> a = jnp.array([1, 2, 3, 10, 11, 15, 19, 25])\n >>> counts, bin_edges = jnp.histogram(a, bins=8)\n >>> print(counts)\n [3. 0. 0. 2. 1. 0. 1. 1.]\n >>> print(bin_edges)\n [ 1. 4. 7. 10. 13. 16. 19. 22. 25.]\n\n Specifying the bin range:\n\n >>> counts, bin_edges = jnp.histogram(a, range=(0, 25), bins=5)\n >>> print(counts)\n [3. 0. 2. 2. 1.]\n >>> print(bin_edges)\n [ 0. 5. 10. 15. 20. 25.]\n\n Specifying the bin edges explicitly:\n\n >>> bin_edges = jnp.array([0, 10, 20, 30])\n >>> counts, _ = jnp.histogram(a, bins=bin_edges)\n >>> print(counts)\n [3. 4. 1.]\n\n Using ``density=True`` returns a normalized histogram:\n\n >>> density, bin_edges = jnp.histogram(a, density=True)\n >>> dx = jnp.diff(bin_edges)\n >>> normed_sum = jnp.sum(density * dx)\n >>> jnp.allclose(normed_sum, 1.0)\n Array(True, dtype=bool)\n """"""\n if weights is None:\n a, _ = util.ensure_arraylike(""histogram"", a, bins)\n a, = util.promote_dtypes_inexact(a)\n weights = ones_like(a)\n else:\n a, _, weights = util.ensure_arraylike(""histogram"", a, bins, weights)\n if np.shape(a) != np.shape(weights):\n raise ValueError(""weights should have the same shape as a."")\n a, weights = util.promote_dtypes_inexact(a, weights)\n\n bin_edges = histogram_bin_edges(a, bins, range, weights)\n bin_idx = searchsorted(bin_edges, a, side='right')\n bin_idx = where(a == bin_edges[-1], len(bin_edges) - 1, bin_idx)\n counts = zeros(len(bin_edges), weights.dtype).at[bin_idx].add(weights)[1:]\n if density:\n bin_widths = diff(bin_edges)\n counts = counts / bin_widths / counts.sum()\n return counts, bin_edges\n\n\n@export\ndef histogram2d(x: ArrayLike, y: ArrayLike, bins: ArrayLike | list[ArrayLike] = 10,\n range: Sequence[None | Array | Sequence[ArrayLike]] | None = None,\n weights: ArrayLike | None = None,\n density: bool | None = None) -> tuple[Array, Array, Array]:\n """"""Compute a 2-dimensional histogram.\n\n JAX implementation of :func:`numpy.histogram2d`.\n\n Args:\n x: one-dimensional array of x-values for points to be binned.\n y: one-dimensional array of y-values for points to be binned.\n bins: Specify the number of bins in the histogram (default: 10). ``bins``\n may also be an array specifying the locations of the bin edges, or a pair\n of integers or pair of arrays specifying the number of bins in each\n dimension.\n range: Pair of arrays or lists of the form ``[[xmin, xmax], [ymin, ymax]]``\n specifying the range of the data in each dimension. If not specified, the\n range is inferred from the data.\n weights: An optional array specifying the weights of the data points.\n Should be the same shape as ``x`` and ``y``. If not specified, each\n data point is weighted equally.\n density: If True, return the normalized histogram in units of counts\n per unit area. If False (default) return the (weighted) counts per bin.\n\n Returns:\n A tuple of arrays ``(histogram, x_edges, y_edges)``, where ``histogram``\n contains the aggregated data, and ``x_edges`` and ``y_edges`` specify the\n boundaries of the bins.\n\n See Also:\n - :func:`jax.numpy.histogram`: Compute the histogram of a 1D array.\n - :func:`jax.numpy.histogramdd`: Compute the histogram of an N-dimensional array.\n - :func:`jax.numpy.histogram_bin_edges`: Compute the bin edges for a histogram.\n\n Examples:\n >>> x = jnp.array([1, 2, 3, 10, 11, 15, 19, 25])\n >>> y = jnp.array([2, 5, 6, 8, 13, 16, 17, 18])\n >>> counts, x_edges, y_edges = jnp.histogram2d(x, y, bins=8)\n >>> counts.shape\n (8, 8)\n >>> x_edges\n Array([ 1., 4., 7., 10., 13., 16., 19., 22., 25.], dtype=float32)\n >>> y_edges\n Array([ 2., 4., 6., 8., 10., 12., 14., 16., 18.], dtype=float32)\n\n Specifying the bin range:\n\n >>> counts, x_edges, y_edges = jnp.histogram2d(x, y, range=[(0, 25), (0, 25)], bins=5)\n >>> counts.shape\n (5, 5)\n >>> x_edges\n Array([ 0., 5., 10., 15., 20., 25.], dtype=float32)\n >>> y_edges\n Array([ 0., 5., 10., 15., 20., 25.], dtype=float32)\n\n Specifying the bin edges explicitly:\n\n >>> x_edges = jnp.array([0, 10, 20, 30])\n >>> y_edges = jnp.array([0, 10, 20, 30])\n >>> counts, _, _ = jnp.histogram2d(x, y, bins=[x_edges, y_edges])\n >>> counts\n Array([[3, 0, 0],\n [1, 3, 0],\n [0, 1, 0]], dtype=int32)\n\n Using ``density=True`` returns a normalized histogram:\n\n >>> density, x_edges, y_edges = jnp.histogram2d(x, y, density=True)\n >>> dx = jnp.diff(x_edges)\n >>> dy = jnp.diff(y_edges)\n >>> normed_sum = jnp.sum(density * dx[:, None] * dy[None, :])\n >>> jnp.allclose(normed_sum, 1.0)\n Array(True, dtype=bool)\n """"""\n x, y = util.ensure_arraylike(""histogram2d"", x, y)\n try:\n N = len(bins) # type: ignore[arg-type]\n except TypeError:\n N = 1\n\n if N != 1 and N != 2:\n x_edges = y_edges = asarray(bins)\n bins = [x_edges, y_edges]\n\n sample = transpose(asarray([x, y]))\n hist, edges = histogramdd(sample, bins, range, weights, density)\n return hist, edges[0], edges[1]\n\n\n@export\ndef histogramdd(sample: ArrayLike, bins: ArrayLike | list[ArrayLike] = 10,\n range: Sequence[None | Array | Sequence[ArrayLike]] | None = None,\n weights: ArrayLike | None = None,\n density: bool | None = None) -> tuple[Array, list[Array]]:\n """"""Compute an N-dimensional histogram.\n\n JAX implementation of :func:`numpy.histogramdd`.\n\n Args:\n sample: input array of shape ``(N, D)`` representing ``N`` points in\n ``D`` dimensions.\n bins: Specify the number of bins in each dimension of the histogram.\n (default: 10). May also be a length-D sequence of integers or arrays\n of bin edges.\n range: Length-D sequence of pairs specifying the range for each dimension.\n If not specified, the range is inferred from the data.\n weights: An optional shape ``(N,)`` array specifying the weights of the\n data points.\n Should be the same shape as ``sample``. If not specified, each\n data point is weighted equally.\n density: If True, return the normalized histogram in units of counts\n per unit volume. If False (default) return the (weighted) counts per bin.\n\n Returns:\n A tuple of arrays ``(histogram, bin_edges)``, where ``histogram`` contains\n the aggregated data, and ``bin_edges`` specifies the boundaries of the bins.\n\n See Also:\n - :func:`jax.numpy.histogram`: Compute the histogram of a 1D array.\n - :func:`jax.numpy.histogram2d`: Compute the histogram of a 2D array.\n - :func:`jax.numpy.histogram_bin_edges`: Compute the bin edges for a histogram.\n\n Examples:\n A histogram over 100 points in three dimensions\n\n >>> key = jax.random.key(42)\n >>> a = jax.random.normal(key, (100, 3))\n >>> counts, bin_edges = jnp.histogramdd(a, bins=6,\n ... range=[(-3, 3), (-3, 3), (-3, 3)])\n >>> counts.shape\n (6, 6, 6)\n >>> bin_edges # doctest: +SKIP\n [Array([-3., -2., -1., 0., 1., 2., 3.], dtype=float32),\n Array([-3., -2., -1., 0., 1., 2., 3.], dtype=float32),\n Array([-3., -2., -1., 0., 1., 2., 3.], dtype=float32)]\n\n Using ``density=True`` returns a normalized histogram:\n\n >>> density, bin_edges = jnp.histogramdd(a, density=True)\n >>> bin_widths = map(jnp.diff, bin_edges)\n >>> dx, dy, dz = jnp.meshgrid(*bin_widths, indexing='ij')\n >>> normed = jnp.sum(density * dx * dy * dz)\n >>> jnp.allclose(normed, 1.0)\n Array(True, dtype=bool)\n """"""\n if weights is None:\n sample = util.ensure_arraylike(""histogramdd"", sample)\n sample, = util.promote_dtypes_inexact(sample)\n else:\n sample, weights = util.ensure_arraylike(""histogramdd"", sample, weights)\n if np.shape(weights) != np.shape(sample)[:1]:\n raise ValueError(""should have one weight for each sample."")\n sample, weights = util.promote_dtypes_inexact(sample, weights)\n N, D = np.shape(sample)\n\n if range is not None and (\n len(range) != D or any(r is not None and np.shape(r)[0] != 2 for r in range)): # type: ignore[arg-type]\n raise ValueError(f""For sample.shape={(N, D)}, range must be a sequence ""\n f""of {D} pairs or Nones; got {range=}"")\n\n try:\n num_bins = len(bins) # type: ignore[arg-type]\n except TypeError:\n # when bin_size is integer, the same bin is used for each dimension\n bins_per_dimension: list[ArrayLike] = D * [bins] # type: ignore[assignment]\n else:\n if num_bins != D:\n raise ValueError(""should be a bin for each dimension."")\n bins_per_dimension = list(bins) # type: ignore[arg-type]\n\n bin_idx_by_dim: list[Array] = []\n bin_edges_by_dim: list[Array] = []\n\n for i in builtins.range(D):\n range_i = None if range is None else range[i]\n bin_edges = histogram_bin_edges(sample[:, i], bins_per_dimension[i], range_i, weights)\n bin_idx = searchsorted(bin_edges, sample[:, i], side='right')\n bin_idx = where(sample[:, i] == bin_edges[-1], bin_idx - 1, bin_idx)\n bin_idx_by_dim.append(bin_idx)\n bin_edges_by_dim.append(bin_edges)\n\n nbins = tuple(len(bin_edges) + 1 for bin_edges in bin_edges_by_dim)\n dedges = [diff(bin_edges) for bin_edges in bin_edges_by_dim]\n\n xy = ravel_multi_index(tuple(bin_idx_by_dim), nbins, mode='clip')\n hist = bincount(xy, weights, length=math.prod(nbins))\n hist = reshape(hist, nbins)\n core = D*(slice(1, -1),)\n hist = hist[core]\n\n if density:\n hist = hist.astype(sample.dtype)\n hist /= hist.sum()\n for norm in ix_(*dedges):\n hist /= norm\n\n return hist, bin_edges_by_dim\n\n\n@export\ndef transpose(a: ArrayLike, axes: Sequence[int] | None = None) -> Array:\n """"""Return a transposed version of an N-dimensional array.\n\n JAX implementation of :func:`numpy.transpose`, implemented in terms of\n :func:`jax.lax.transpose`.\n\n Args:\n a: input array\n axes: optionally specify the permutation using a length-`a.ndim` sequence of integers\n ``i`` satisfying ``0 <= i < a.ndim``. Defaults to ``range(a.ndim)[::-1]``, i.e.\n reverses the order of all axes.\n\n Returns:\n transposed copy of the array.\n\n See Also:\n - :func:`jax.Array.transpose`: equivalent function via an :class:`~jax.Array` method.\n - :attr:`jax.Array.T`: equivalent function via an :class:`~jax.Array` property.\n - :func:`jax.numpy.matrix_transpose`: transpose the last two axes of an array. This is\n suitable for working with batched 2D matrices.\n - :func:`jax.numpy.swapaxes`: swap any two axes in an array.\n - :func:`jax.numpy.moveaxis`: move an axis to another position in the array.\n\n Note:\n Unlike :func:`numpy.transpose`, :func:`jax.numpy.transpose` will return a copy rather\n than a view of the input array. However, under JIT, the compiler will optimize-away\n such copies when possible, so this doesn't have performance impacts in practice.\n\n Examples:\n For a 1D array, the transpose is the identity:\n\n >>> x = jnp.array([1, 2, 3, 4])\n >>> jnp.transpose(x)\n Array([1, 2, 3, 4], dtype=int32)\n\n For a 2D array, the transpose is a matrix transpose:\n\n >>> x = jnp.array([[1, 2],\n ... [3, 4]])\n >>> jnp.transpose(x)\n Array([[1, 3],\n [2, 4]], dtype=int32)\n\n For an N-dimensional array, the transpose reverses the order of the axes:\n\n >>> x = jnp.zeros(shape=(3, 4, 5))\n >>> jnp.transpose(x).shape\n (5, 4, 3)\n\n The ``axes`` argument can be specified to change this default behavior:\n\n >>> jnp.transpose(x, (0, 2, 1)).shape\n (3, 5, 4)\n\n Since swapping the last two axes is a common operation, it can be done\n via its own API, :func:`jax.numpy.matrix_transpose`:\n\n >>> jnp.matrix_transpose(x).shape\n (3, 5, 4)\n\n For convenience, transposes may also be performed using the :meth:`jax.Array.transpose`\n method or the :attr:`jax.Array.T` property:\n\n >>> x = jnp.array([[1, 2],\n ... [3, 4]])\n >>> x.transpose()\n Array([[1, 3],\n [2, 4]], dtype=int32)\n >>> x.T\n Array([[1, 3],\n [2, 4]], dtype=int32)\n """"""\n a = util.ensure_arraylike(""transpose"", a)\n axes_ = list(range(a.ndim)[::-1]) if axes is None else axes\n axes_ = [_canonicalize_axis(i, np.ndim(a)) for i in axes_]\n return lax.transpose(a, axes_)\n\n\n@export\ndef permute_dims(a: ArrayLike, /, axes: tuple[int, ...]) -> Array:\n """"""Permute the axes/dimensions of an array.\n\n JAX implementation of :func:`array_api.permute_dims`.\n\n Args:\n a: input array\n axes: tuple of integers in range ``[0, a.ndim)`` specifying the\n axes permutation.\n\n Returns:\n a copy of ``a`` with axes permuted.\n\n See also:\n - :func:`jax.numpy.transpose`\n - :func:`jax.numpy.matrix_transpose`\n\n Examples:\n >>> a = jnp.array([[1, 2, 3],\n ... [4, 5, 6]])\n >>> jnp.permute_dims(a, (1, 0))\n Array([[1, 4],\n [2, 5],\n [3, 6]], dtype=int32)\n """"""\n a = util.ensure_arraylike(""permute_dims"", a)\n return lax.transpose(a, axes)\n\n\n@export\ndef matrix_transpose(x: ArrayLike, /) -> Array:\n """"""Transpose the last two dimensions of an array.\n\n JAX implementation of :func:`numpy.matrix_transpose`, implemented in terms of\n :func:`jax.lax.transpose`.\n\n Args:\n x: input array, Must have ``x.ndim >= 2``\n\n Returns:\n matrix-transposed copy of the array.\n\n See Also:\n - :attr:`jax.Array.mT`: same operation accessed via an :func:`~jax.Array` property.\n - :func:`jax.numpy.transpose`: general multi-axis transpose\n\n Note:\n Unlike :func:`numpy.matrix_transpose`, :func:`jax.numpy.matrix_transpose` will return a\n copy rather than a view of the input array. However, under JIT, the compiler will\n optimize-away such copies when possible, so this doesn't have performance impacts in practice.\n\n Examples:\n Here is a 2x2x2 matrix representing a batched 2x2 matrix:\n\n >>> x = jnp.array([[[1, 2],\n ... [3, 4]],\n ... [[5, 6],\n ... [7, 8]]])\n >>> jnp.matrix_transpose(x)\n Array([[[1, 3],\n [2, 4]],\n \n [[5, 7],\n [6, 8]]], dtype=int32)\n\n For convenience, you can perform the same transpose via the :attr:`~jax.Array.mT`\n property of :class:`jax.Array`:\n\n >>> x.mT\n Array([[[1, 3],\n [2, 4]],\n \n [[5, 7],\n [6, 8]]], dtype=int32)\n """"""\n x = util.ensure_arraylike(""matrix_transpose"", x)\n ndim = x.ndim\n if ndim < 2:\n raise ValueError(f""x must be at least two-dimensional for matrix_transpose; got {ndim=}"")\n axes = (*range(ndim - 2), ndim - 1, ndim - 2)\n return lax.transpose(x, axes)\n\n\n@export\n@partial(jit, static_argnames=('k', 'axes'))\ndef rot90(m: ArrayLike, k: int = 1, axes: tuple[int, int] = (0, 1)) -> Array:\n """"""Rotate an array by 90 degrees counterclockwise in the plane specified by axes.\n\n JAX implementation of :func:`numpy.rot90`.\n\n Args:\n m: input array. Must have ``m.ndim >= 2``.\n k: int, optional, default=1. Specifies the number of times the array is rotated.\n For negative values of ``k``, the array is rotated in clockwise direction.\n axes: tuple of 2 integers, optional, default= (0, 1). The axes define the plane\n in which the array is rotated. Both the axes must be different.\n\n Returns:\n An array containing the copy of the input, ``m`` rotated by 90 degrees.\n\n See also:\n - :func:`jax.numpy.flip`: reverse the order along the given axis\n - :func:`jax.numpy.fliplr`: reverse the order along axis 1 (left/right)\n - :func:`jax.numpy.flipud`: reverse the order along axis 0 (up/down)\n\n Examples:\n >>> m = jnp.array([[1, 2, 3],\n ... [4, 5, 6]])\n >>> jnp.rot90(m)\n Array([[3, 6],\n [2, 5],\n [1, 4]], dtype=int32)\n >>> jnp.rot90(m, k=2)\n Array([[6, 5, 4],\n [3, 2, 1]], dtype=int32)\n\n ``jnp.rot90(m, k=1, axes=(1, 0))`` is equivalent to\n ``jnp.rot90(m, k=-1, axes(0,1))``.\n\n >>> jnp.rot90(m, axes=(1, 0))\n Array([[4, 1],\n [5, 2],\n [6, 3]], dtype=int32)\n >>> jnp.rot90(m, k=-1, axes=(0, 1))\n Array([[4, 1],\n [5, 2],\n [6, 3]], dtype=int32)\n\n when input array has ``ndim>2``:\n\n >>> m1 = jnp.array([[[1, 2, 3],\n ... [4, 5, 6]],\n ... [[7, 8, 9],\n ... [10, 11, 12]]])\n >>> jnp.rot90(m1, k=1, axes=(2, 1))\n Array([[[ 4, 1],\n [ 5, 2],\n [ 6, 3]],\n \n [[10, 7],\n [11, 8],\n [12, 9]]], dtype=int32)\n """"""\n m = util.ensure_arraylike(""rot90"", m)\n if np.ndim(m) < 2:\n raise ValueError(""rot90 requires its first argument to have ndim at least ""\n f""two, but got first argument of shape {np.shape(m)}, ""\n f""which has ndim {np.ndim(m)}"")\n ax1, ax2 = axes\n ax1 = _canonicalize_axis(ax1, np.ndim(m))\n ax2 = _canonicalize_axis(ax2, np.ndim(m))\n if ax1 == ax2:\n raise ValueError(""Axes must be different"") # same as numpy error\n k = k % 4\n if k == 0:\n return asarray(m)\n elif k == 2:\n return flip(flip(m, ax1), ax2)\n else:\n perm = list(range(np.ndim(m)))\n perm[ax1], perm[ax2] = perm[ax2], perm[ax1]\n if k == 1:\n return transpose(flip(m, ax2), perm)\n else:\n return flip(transpose(m, perm), ax2)\n\n\n@export\ndef flip(m: ArrayLike, axis: int | Sequence[int] | None = None) -> Array:\n """"""Reverse the order of elements of an array along the given axis.\n\n JAX implementation of :func:`numpy.flip`.\n\n Args:\n m: Array.\n axis: integer or sequence of integers. Specifies along which axis or axes\n should the array elements be reversed. Default is ``None``, which flips\n along all axes.\n\n Returns:\n An array with the elements in reverse order along ``axis``.\n\n See Also:\n - :func:`jax.numpy.fliplr`: reverse the order along axis 1 (left/right)\n - :func:`jax.numpy.flipud`: reverse the order along axis 0 (up/down)\n\n Examples:\n >>> x1 = jnp.array([[1, 2],\n ... [3, 4]])\n >>> jnp.flip(x1)\n Array([[4, 3],\n [2, 1]], dtype=int32)\n\n If ``axis`` is specified with an integer, then ``jax.numpy.flip`` reverses\n the array along that particular axis only.\n\n >>> jnp.flip(x1, axis=1)\n Array([[2, 1],\n [4, 3]], dtype=int32)\n\n >>> x2 = jnp.arange(1, 9).reshape(2, 2, 2)\n >>> x2\n Array([[[1, 2],\n [3, 4]],\n \n [[5, 6],\n [7, 8]]], dtype=int32)\n >>> jnp.flip(x2)\n Array([[[8, 7],\n [6, 5]],\n \n [[4, 3],\n [2, 1]]], dtype=int32)\n\n When ``axis`` is specified with a sequence of integers, then\n ``jax.numpy.flip`` reverses the array along the specified axes.\n\n >>> jnp.flip(x2, axis=[1, 2])\n Array([[[4, 3],\n [2, 1]],\n \n [[8, 7],\n [6, 5]]], dtype=int32)\n """"""\n arr = util.ensure_arraylike(""flip"", m)\n return _flip(arr, reductions._ensure_optional_axes(axis))\n\n@partial(jit, static_argnames=('axis',))\ndef _flip(m: Array, axis: int | tuple[int, ...] | None = None) -> Array:\n if axis is None:\n return lax.rev(m, list(range(len(np.shape(m)))))\n axis = _ensure_index_tuple(axis)\n return lax.rev(m, [_canonicalize_axis(ax, np.ndim(m)) for ax in axis])\n\n\n@export\ndef fliplr(m: ArrayLike) -> Array:\n """"""Reverse the order of elements of an array along axis 1.\n\n JAX implementation of :func:`numpy.fliplr`.\n\n Args:\n m: Array with at least two dimensions.\n\n Returns:\n An array with the elements in reverse order along axis 1.\n\n See Also:\n - :func:`jax.numpy.flip`: reverse the order along the given axis\n - :func:`jax.numpy.flipud`: reverse the order along axis 0\n\n Examples:\n >>> x = jnp.array([[1, 2],\n ... [3, 4]])\n >>> jnp.fliplr(x)\n Array([[2, 1],\n [4, 3]], dtype=int32)\n """"""\n arr = util.ensure_arraylike(""fliplr"", m)\n return _flip(arr, 1)\n\n\n@export\ndef flipud(m: ArrayLike) -> Array:\n """"""Reverse the order of elements of an array along axis 0.\n\n JAX implementation of :func:`numpy.flipud`.\n\n Args:\n m: Array with at least one dimension.\n\n Returns:\n An array with the elements in reverse order along axis 0.\n\n See Also:\n - :func:`jax.numpy.flip`: reverse the order along the given axis\n - :func:`jax.numpy.fliplr`: reverse the order along axis 1\n\n Examples:\n >>> x = jnp.array([[1, 2],\n ... [3, 4]])\n >>> jnp.flipud(x)\n Array([[3, 4],\n [1, 2]], dtype=int32)\n """"""\n arr = util.ensure_arraylike(""flipud"", m)\n return _flip(arr, 0)\n\n\n@export\n@jit\ndef iscomplex(x: ArrayLike) -> Array:\n """"""Return boolean array showing where the input is complex.\n\n JAX implementation of :func:`numpy.iscomplex`.\n\n Args:\n x: Input array to check.\n\n Returns:\n A new array containing boolean values indicating complex elements.\n\n See Also:\n - :func:`jax.numpy.iscomplexobj`\n - :func:`jax.numpy.isrealobj`\n\n Examples:\n >>> jnp.iscomplex(jnp.array([True, 0, 1, 2j, 1+2j]))\n Array([False, False, False, True, True], dtype=bool)\n """"""\n i = ufuncs.imag(x)\n return lax.ne(i, _lax_const(i, 0))\n\n\n@export\n@jit\ndef isreal(x: ArrayLike) -> Array:\n """"""Return boolean array showing where the input is real.\n\n JAX implementation of :func:`numpy.isreal`.\n\n Args:\n x: input array to check.\n\n Returns:\n A new array containing boolean values indicating real elements.\n\n See Also:\n - :func:`jax.numpy.iscomplex`\n - :func:`jax.numpy.isrealobj`\n\n Examples:\n >>> jnp.isreal(jnp.array([False, 0j, 1, 2.1, 1+2j]))\n Array([ True, True, True, True, False], dtype=bool)\n """"""\n i = ufuncs.imag(x)\n return lax.eq(i, _lax_const(i, 0))\n\n\n@export\n@partial(jit, static_argnames=['deg'])\ndef angle(z: ArrayLike, deg: bool = False) -> Array:\n """"""Return the angle of a complex valued number or array.\n\n JAX implementation of :func:`numpy.angle`.\n\n Args:\n z: A complex number or an array of complex numbers.\n deg: Boolean. If ``True``, returns the result in degrees else returns\n in radians. Default is ``False``.\n\n Returns:\n An array of counterclockwise angle of each element of ``z``, with the same\n shape as ``z`` of dtype float.\n\n Examples:\n\n If ``z`` is a number\n\n >>> z1 = 2+3j\n >>> jnp.angle(z1)\n Array(0.98279375, dtype=float32, weak_type=True)\n\n If ``z`` is an array\n\n >>> z2 = jnp.array([[1+3j, 2-5j],\n ... [4-3j, 3+2j]])\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(jnp.angle(z2))\n [[ 1.25 -1.19]\n [-0.64 0.59]]\n\n If ``deg=True``.\n\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(jnp.angle(z2, deg=True))\n [[ 71.57 -68.2 ]\n [-36.87 33.69]]\n """"""\n z = util.ensure_arraylike('angle', z)\n re = ufuncs.real(z)\n im = ufuncs.imag(z)\n dtype = _dtype(re)\n if not issubdtype(dtype, np.inexact) or (\n issubdtype(_dtype(z), np.floating) and np.ndim(z) == 0):\n dtype = dtypes.canonicalize_dtype(dtypes.float_)\n re = lax.convert_element_type(re, dtype)\n im = lax.convert_element_type(im, dtype)\n result = lax.atan2(im, re)\n return ufuncs.degrees(result) if deg else result\n\n\n@export\n@partial(jit, static_argnames=('n', 'axis'))\ndef diff(a: ArrayLike, n: int = 1, axis: int = -1,\n prepend: ArrayLike | None = None,\n append: ArrayLike | None = None) -> Array:\n """"""Calculate n-th order difference between array elements along a given axis.\n\n JAX implementation of :func:`numpy.diff`.\n\n The first order difference is computed by ``a[i+1] - a[i]``, and the n-th order\n difference is computed ``n`` times recursively.\n\n Args:\n a: input array. Must have ``a.ndim >= 1``.\n n: int, optional, default=1. Order of the difference. Specifies the number\n of times the difference is computed. If n=0, no difference is computed and\n input is returned as is.\n axis: int, optional, default=-1. Specifies the axis along which the difference\n is computed. The difference is computed along ``axis -1`` by default.\n prepend: scalar or array, optional, default=None. Specifies the values to be\n prepended along ``axis`` before computing the difference.\n append: scalar or array, optional, default=None. Specifies the values to be\n appended along ``axis`` before computing the difference.\n\n Returns:\n An array containing the n-th order difference between the elements of ``a``.\n\n See also:\n - :func:`jax.numpy.ediff1d`: Computes the differences between consecutive\n elements of an array.\n - :func:`jax.numpy.cumsum`: Computes the cumulative sum of the elements of\n the array along a given axis.\n - :func:`jax.numpy.gradient`: Computes the gradient of an N-dimensional array.\n\n Examples:\n ``jnp.diff`` computes the first order difference along ``axis``, by default.\n\n >>> a = jnp.array([[1, 5, 2, 9],\n ... [3, 8, 7, 4]])\n >>> jnp.diff(a)\n Array([[ 4, -3, 7],\n [ 5, -1, -3]], dtype=int32)\n\n When ``n = 2``, second order difference is computed along ``axis``.\n\n >>> jnp.diff(a, n=2)\n Array([[-7, 10],\n [-6, -2]], dtype=int32)\n\n When ``prepend = 2``, it is prepended to ``a`` along ``axis`` before computing\n the difference.\n\n >>> jnp.diff(a, prepend=2)\n Array([[-1, 4, -3, 7],\n [ 1, 5, -1, -3]], dtype=int32)\n\n When ``append = jnp.array([[3],[1]])``, it is appended to ``a`` along ``axis``\n before computing the difference.\n\n >>> jnp.diff(a, append=jnp.array([[3],[1]]))\n Array([[ 4, -3, 7, -6],\n [ 5, -1, -3, -3]], dtype=int32)\n """"""\n arr = util.ensure_arraylike(""diff"", a)\n n = core.concrete_or_error(operator.index, n, ""'n' argument of jnp.diff"")\n axis = core.concrete_or_error(operator.index, axis, ""'axis' argument of jnp.diff"")\n if n == 0:\n return arr\n if n < 0:\n raise ValueError(f""order must be non-negative but got {n}"")\n if arr.ndim == 0:\n raise ValueError(f""diff requires input that is at least one dimensional; got {a}"")\n\n nd = arr.ndim\n axis = _canonicalize_axis(axis, nd)\n\n combined: list[Array] = []\n if prepend is not None:\n prepend = util.ensure_arraylike(""diff"", prepend)\n if not np.ndim(prepend):\n shape = list(arr.shape)\n shape[axis] = 1\n prepend = broadcast_to(prepend, tuple(shape))\n combined.append(prepend)\n\n combined.append(arr)\n\n if append is not None:\n append = util.ensure_arraylike(""diff"", append)\n if not np.ndim(append):\n shape = list(arr.shape)\n shape[axis] = 1\n append = broadcast_to(append, tuple(shape))\n combined.append(append)\n\n if len(combined) > 1:\n arr = concatenate(combined, axis)\n\n slice1 = [slice(None)] * nd\n slice2 = [slice(None)] * nd\n slice1[axis] = slice(1, None)\n slice2[axis] = slice(None, -1)\n slice1_tuple = tuple(slice1)\n slice2_tuple = tuple(slice2)\n\n op = operator.ne if arr.dtype == np.bool_ else operator.sub\n for _ in range(n):\n arr = op(arr[slice1_tuple], arr[slice2_tuple])\n\n return arr\n\n\n@export\n@jit\ndef ediff1d(ary: ArrayLike, to_end: ArrayLike | None = None,\n to_begin: ArrayLike | None = None) -> Array:\n """"""Compute the differences of the elements of the flattened array.\n\n JAX implementation of :func:`numpy.ediff1d`.\n\n Args:\n ary: input array or scalar.\n to_end: scalar or array, optional, default=None. Specifies the numbers to\n append to the resulting array.\n to_begin: scalar or array, optional, default=None. Specifies the numbers to\n prepend to the resulting array.\n\n Returns:\n An array containing the differences between the elements of the input array.\n\n Note:\n Unlike NumPy's implementation of ediff1d, :py:func:`jax.numpy.ediff1d` will\n not issue an error if casting ``to_end`` or ``to_begin`` to the type of\n ``ary`` loses precision.\n\n See also:\n - :func:`jax.numpy.diff`: Computes the n-th order difference between elements\n of the array along a given axis.\n - :func:`jax.numpy.cumsum`: Computes the cumulative sum of the elements of\n the array along a given axis.\n - :func:`jax.numpy.gradient`: Computes the gradient of an N-dimensional array.\n\n Examples:\n >>> a = jnp.array([2, 3, 5, 9, 1, 4])\n >>> jnp.ediff1d(a)\n Array([ 1, 2, 4, -8, 3], dtype=int32)\n >>> jnp.ediff1d(a, to_begin=-10)\n Array([-10, 1, 2, 4, -8, 3], dtype=int32)\n >>> jnp.ediff1d(a, to_end=jnp.array([20, 30]))\n Array([ 1, 2, 4, -8, 3, 20, 30], dtype=int32)\n >>> jnp.ediff1d(a, to_begin=-10, to_end=jnp.array([20, 30]))\n Array([-10, 1, 2, 4, -8, 3, 20, 30], dtype=int32)\n\n For array with ``ndim > 1``, the differences are computed after flattening\n the input array.\n\n >>> a1 = jnp.array([[2, -1, 4, 7],\n ... [3, 5, -6, 9]])\n >>> jnp.ediff1d(a1)\n Array([ -3, 5, 3, -4, 2, -11, 15], dtype=int32)\n >>> a2 = jnp.array([2, -1, 4, 7, 3, 5, -6, 9])\n >>> jnp.ediff1d(a2)\n Array([ -3, 5, 3, -4, 2, -11, 15], dtype=int32)\n """"""\n arr = util.ensure_arraylike(""ediff1d"", ary).ravel()\n result = lax.sub(arr[1:], arr[:-1])\n if to_begin is not None:\n to_begin = util.ensure_arraylike(""ediff1d"", to_begin)\n result = concatenate((ravel(to_begin.astype(arr.dtype)), result))\n if to_end is not None:\n to_end = util.ensure_arraylike(""ediff1d"", to_end)\n result = concatenate((result, ravel(to_end.astype(arr.dtype))))\n return result\n\n\n@export\n@partial(jit, static_argnames=(""axis"", ""edge_order""))\ndef gradient(\n f: ArrayLike,\n *varargs: ArrayLike,\n axis: int | Sequence[int] | None = None,\n edge_order: int | None = None,\n) -> Array | list[Array]:\n """"""Compute the numerical gradient of a sampled function.\n\n JAX implementation of :func:`numpy.gradient`.\n\n The gradient in ``jnp.gradient`` is computed using second-order finite\n differences across the array of sampled function values. This should not\n be confused with :func:`jax.grad`, which computes a precise gradient of\n a callable function via :ref:`automatic differentiation `.\n\n Args:\n f: *N*-dimensional array of function values.\n varargs: optional list of scalars or arrays specifying spacing of\n function evaluations. Options are:\n\n - not specified: unit spacing in all dimensions.\n - a single scalar: constant spacing in all dimensions.\n - *N* values: specify different spacing in each dimension:\n\n - scalar values indicate constant spacing in that dimension.\n - array values must match the length of the corresponding dimension,\n and specify the coordinates at which ``f`` is evaluated.\n\n edge_order: not implemented in JAX\n axis: integer or tuple of integers specifying the axis along which\n to compute the gradient. If None (default) calculates the gradient\n along all axes.\n\n Returns:\n an array or tuple of arrays containing the numerical gradient along\n each specified axis.\n\n See also:\n - :func:`jax.grad`: automatic differentiation of a function with a single output.\n\n Examples:\n Comparing numerical and automatic differentiation of a simple function:\n\n >>> def f(x):\n ... return jnp.sin(x) * jnp.exp(-x / 4)\n ...\n >>> def gradf_exact(x):\n ... # exact analytical gradient of f(x)\n ... return -f(x) / 4 + jnp.cos(x) * jnp.exp(-x / 4)\n ...\n >>> x = jnp.linspace(0, 5, 10)\n\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(""numerical gradient:"", jnp.gradient(f(x), x))\n ... print(""automatic gradient:"", jax.vmap(jax.grad(f))(x))\n ... print(""exact gradient: "", gradf_exact(x))\n ...\n numerical gradient: [ 0.83 0.61 0.18 -0.2 -0.43 -0.49 -0.39 -0.21 -0.02 0.08]\n automatic gradient: [ 1. 0.62 0.17 -0.23 -0.46 -0.51 -0.41 -0.21 -0.01 0.15]\n exact gradient: [ 1. 0.62 0.17 -0.23 -0.46 -0.51 -0.41 -0.21 -0.01 0.15]\n\n Notice that, as expected, the numerical gradient has some approximation error\n compared to the automatic gradient computed via :func:`jax.grad`.\n """"""\n\n if edge_order is not None:\n raise NotImplementedError(\n ""The 'edge_order' argument to jnp.gradient is not supported.""\n )\n a, *spacing = util.promote_dtypes_inexact(f, *varargs)\n\n def gradient_along_axis(a, h, axis):\n sliced = partial(lax.slice_in_dim, a, axis=axis)\n upper_edge = sliced(1, 2) - sliced(0, 1)\n lower_edge = sliced(-1, None) - sliced(-2, -1)\n\n if np.ndim(h) == 0:\n inner = (sliced(2, None) - sliced(None, -2)) * 0.5 / h\n lower_edge /= h\n upper_edge /= h\n\n elif np.ndim(h) == 1:\n if len(h) != a.shape[axis]:\n raise ValueError(\n ""Spacing arrays must have the same length as the ""\n ""dimension along which the gradient is calculated.""\n )\n h_shape = [1] * a.ndim\n h_shape[axis] = len(h)\n h = h.reshape(h_shape)\n sliced_x = partial(lax.slice_in_dim, h, axis=axis)\n\n upper_edge /= sliced_x(1, 2) - sliced_x(0, 1)\n lower_edge /= sliced_x(-1, None) - sliced_x(-2, -1)\n dx1 = sliced_x(1, -1) - sliced_x(0, -2)\n dx2 = sliced_x(2, None) - sliced_x(1, -1)\n a = -(dx2) / (dx1 * (dx1 + dx2))\n b = (dx2 - dx1) / (dx1 * dx2)\n c = dx1 / (dx2 * (dx1 + dx2))\n inner = a * sliced(0, -2) + b * sliced(1, -1) + c * sliced(2, None)\n else:\n raise ValueError(""Spacing arrays must be 1D arrays or scalars."")\n\n return concatenate((upper_edge, inner, lower_edge), axis=axis)\n\n if axis is None:\n axis_tuple = tuple(range(a.ndim))\n else:\n axis_tuple = tuple(_canonicalize_axis(i, a.ndim) for i in _ensure_index_tuple(axis))\n if len(axis_tuple) == 0:\n return []\n\n if min(s for i, s in enumerate(a.shape) if i in axis_tuple) < 2:\n raise ValueError(""Shape of array too small to calculate ""\n ""a numerical gradient, ""\n ""at least 2 elements are required."")\n if len(spacing) == 0:\n dx: Sequence[ArrayLike] = [1.0] * len(axis_tuple)\n elif len(spacing) == 1:\n dx = list(spacing) * len(axis_tuple)\n elif len(spacing) == len(axis_tuple):\n dx = list(spacing)\n else:\n TypeError(f""Invalid number of spacing arguments {len(spacing)} for {axis=}"")\n\n a_grad = [gradient_along_axis(a, h, ax) for ax, h in zip(axis_tuple, dx)]\n return a_grad[0] if len(axis_tuple) == 1 else a_grad\n\n\n@export\ndef isrealobj(x: Any) -> bool:\n """"""Check if the input is not a complex number or an array containing complex elements.\n\n JAX implementation of :func:`numpy.isrealobj`.\n\n The function evaluates based on input type rather than value.\n Inputs with zero imaginary parts are still considered complex.\n\n Args:\n x: input object to check.\n\n Returns:\n False if ``x`` is a complex number or an array containing at least one complex element,\n True otherwise.\n\n See Also:\n - :func:`jax.numpy.iscomplexobj`\n - :func:`jax.numpy.isreal`\n\n Examples:\n >>> jnp.isrealobj(0)\n True\n >>> jnp.isrealobj(1.2)\n True\n >>> jnp.isrealobj(jnp.array([1, 2]))\n True\n >>> jnp.isrealobj(1+2j)\n False\n >>> jnp.isrealobj(jnp.array([0, 1+2j]))\n False\n """"""\n return not iscomplexobj(x)\n\n\n@export\ndef reshape(\n a: ArrayLike, shape: DimSize | Shape, order: str = ""C"", *,\n copy: bool | None = None, out_sharding=None) -> Array:\n """"""Return a reshaped copy of an array.\n\n JAX implementation of :func:`numpy.reshape`, implemented in terms of\n :func:`jax.lax.reshape`.\n\n Args:\n a: input array to reshape\n shape: integer or sequence of integers giving the new shape, which must match the\n size of the input array. If any single dimension is given size ``-1``, it will be\n replaced with a value such that the output has the correct size.\n order: ``'F'`` or ``'C'``, specifies whether the reshape should apply column-major\n (fortran-style, ``""F""``) or row-major (C-style, ``""C""``) order; default is ``""C""``.\n JAX does not support ``order=""A""``.\n copy: unused by JAX; JAX always returns a copy, though under JIT the compiler\n may optimize such copies away.\n\n Returns:\n reshaped copy of input array with the specified shape.\n\n Notes:\n Unlike :func:`numpy.reshape`, :func:`jax.numpy.reshape` will return a copy rather\n than a view of the input array. However, under JIT, the compiler will optimize-away\n such copies when possible, so this doesn't have performance impacts in practice.\n\n See Also:\n - :meth:`jax.Array.reshape`: equivalent functionality via an array method.\n - :func:`jax.numpy.ravel`: flatten an array into a 1D shape.\n - :func:`jax.numpy.squeeze`: remove one or more length-1 axes from an array's shape.\n\n Examples:\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6]])\n >>> jnp.reshape(x, 6)\n Array([1, 2, 3, 4, 5, 6], dtype=int32)\n >>> jnp.reshape(x, (3, 2))\n Array([[1, 2],\n [3, 4],\n [5, 6]], dtype=int32)\n\n You can use ``-1`` to automatically compute a shape that is consistent with\n the input size:\n\n >>> jnp.reshape(x, -1) # -1 is inferred to be 6\n Array([1, 2, 3, 4, 5, 6], dtype=int32)\n >>> jnp.reshape(x, (-1, 2)) # -1 is inferred to be 3\n Array([[1, 2],\n [3, 4],\n [5, 6]], dtype=int32)\n\n The default ordering of axes in the reshape is C-style row-major ordering.\n To use Fortran-style column-major ordering, specify ``order='F'``:\n\n >>> jnp.reshape(x, 6, order='F')\n Array([1, 4, 2, 5, 3, 6], dtype=int32)\n >>> jnp.reshape(x, (3, 2), order='F')\n Array([[1, 5],\n [4, 3],\n [2, 6]], dtype=int32)\n\n For convenience, this functionality is also available via the\n :meth:`jax.Array.reshape` method:\n\n >>> x.reshape(3, 2)\n Array([[1, 2],\n [3, 4],\n [5, 6]], dtype=int32)\n """"""\n del copy # unused\n\n __tracebackhide__ = True\n util.check_arraylike(""reshape"", a)\n\n try:\n if out_sharding is None:\n # forward to method for ndarrays\n return a.reshape(shape, order=order) # type: ignore[call-overload,union-attr]\n except AttributeError:\n pass\n return asarray(a).reshape(shape, order=order, out_sharding=out_sharding)\n\n\n@export\n@partial(jit, static_argnames=('order', 'out_sharding'), inline=True)\ndef ravel(a: ArrayLike, order: str = ""C"", *, out_sharding=None) -> Array:\n """"""Flatten array into a 1-dimensional shape.\n\n JAX implementation of :func:`numpy.ravel`, implemented in terms of\n :func:`jax.lax.reshape`.\n\n ``ravel(arr, order=order)`` is equivalent to ``reshape(arr, -1, order=order)``.\n\n Args:\n a: array to be flattened.\n order: ``'F'`` or ``'C'``, specifies whether the reshape should apply column-major\n (fortran-style, ``""F""``) or row-major (C-style, ``""C""``) order; default is ``""C""``.\n JAX does not support `order=""A""` or `order=""K""`.\n\n Returns:\n flattened copy of input array.\n\n Notes:\n Unlike :func:`numpy.ravel`, :func:`jax.numpy.ravel` will return a copy rather\n than a view of the input array. However, under JIT, the compiler will optimize-away\n such copies when possible, so this doesn't have performance impacts in practice.\n\n See Also:\n - :meth:`jax.Array.ravel`: equivalent functionality via an array method.\n - :func:`jax.numpy.reshape`: general array reshape.\n\n Examples:\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6]])\n\n By default, ravel in C-style, row-major order\n\n >>> jnp.ravel(x)\n Array([1, 2, 3, 4, 5, 6], dtype=int32)\n\n Optionally ravel in Fortran-style, column-major:\n\n >>> jnp.ravel(x, order='F')\n Array([1, 4, 2, 5, 3, 6], dtype=int32)\n\n For convenience, the same functionality is available via the :meth:`jax.Array.ravel`\n method:\n\n >>> x.ravel()\n Array([1, 2, 3, 4, 5, 6], dtype=int32)\n """"""\n a = util.ensure_arraylike(""ravel"", a)\n if order == ""K"":\n raise NotImplementedError(""Ravel not implemented for order='K'."")\n return reshape(a, (np.size(a),), order, out_sharding=out_sharding)\n\n\n@export\ndef ravel_multi_index(multi_index: Sequence[ArrayLike], dims: Sequence[int],\n mode: str = 'raise', order: str = 'C') -> Array:\n """"""Convert multi-dimensional indices into flat indices.\n\n JAX implementation of :func:`numpy.ravel_multi_index`\n\n Args:\n multi_index: sequence of integer arrays containing indices in each dimension.\n dims: sequence of integer sizes; must have ``len(dims) == len(multi_index)``\n mode: how to handle out-of bound indices. Options are\n\n - ``""raise""`` (default): raise a ValueError. This mode is incompatible\n with :func:`~jax.jit` or other JAX transformations.\n - ``""clip""``: clip out-of-bound indices to valid range.\n - ``""wrap""``: wrap out-of-bound indices to valid range.\n\n order: ``""C""`` (default) or ``""F""``, specify whether to assume C-style\n row-major order or Fortran-style column-major order.\n\n Returns:\n array of flattened indices\n\n See also:\n :func:`jax.numpy.unravel_index`: inverse of this function.\n\n Examples:\n Define a 2-dimensional array and a sequence of indices of even values:\n\n >>> x = jnp.array([[2., 3., 4.],\n ... [5., 6., 7.]])\n >>> indices = jnp.where(x % 2 == 0)\n >>> indices\n (Array([0, 0, 1], dtype=int32), Array([0, 2, 1], dtype=int32))\n >>> x[indices]\n Array([2., 4., 6.], dtype=float32)\n\n Compute the flattened indices:\n\n >>> indices_flat = jnp.ravel_multi_index(indices, x.shape)\n >>> indices_flat\n Array([0, 2, 4], dtype=int32)\n\n These flattened indices can be used to extract the same values from the\n flattened ``x`` array:\n\n >>> x_flat = x.ravel()\n >>> x_flat\n Array([2., 3., 4., 5., 6., 7.], dtype=float32)\n >>> x_flat[indices_flat]\n Array([2., 4., 6.], dtype=float32)\n\n The original indices can be recovered with :func:`~jax.numpy.unravel_index`:\n\n >>> jnp.unravel_index(indices_flat, x.shape)\n (Array([0, 0, 1], dtype=int32), Array([0, 2, 1], dtype=int32))\n """"""\n assert len(multi_index) == len(dims), f""len(multi_index)={len(multi_index)} != len(dims)={len(dims)}""\n dims = tuple(core.concrete_or_error(operator.index, d, ""in `dims` argument of ravel_multi_index()."") for d in dims)\n multi_index_arr = list(util.ensure_arraylike_tuple(""ravel_multi_index"", multi_index))\n for index in multi_index_arr:\n if mode == 'raise':\n core.concrete_or_error(array, index,\n ""The error occurred because ravel_multi_index was jit-compiled""\n "" with mode='raise'. Use mode='wrap' or mode='clip' instead."")\n if not issubdtype(_dtype(index), np.integer):\n raise TypeError(""only int indices permitted"")\n if mode == ""raise"":\n if any(reductions.any((i < 0) | (i >= d)) for i, d in zip(multi_index_arr, dims)):\n raise ValueError(""invalid entry in coordinates array"")\n elif mode == ""clip"":\n multi_index_arr = [clip(i, 0, d - 1) for i, d in zip(multi_index_arr, dims)]\n elif mode == ""wrap"":\n multi_index_arr = [i % d for i, d in zip(multi_index_arr, dims)]\n else:\n raise ValueError(f""invalid mode={mode!r}. Expected 'raise', 'wrap', or 'clip'"")\n\n if order == ""F"":\n strides = np.cumprod((1,) + dims[:-1])\n elif order == ""C"":\n strides = np.cumprod((1,) + dims[1:][::-1])[::-1]\n else:\n raise ValueError(f""invalid order={order!r}. Expected 'C' or 'F'"")\n\n result = array(0, dtype=(multi_index_arr[0].dtype if multi_index_arr\n else dtypes.canonicalize_dtype(dtypes.int_)))\n for i, s in zip(multi_index_arr, strides):\n result = result + i * int(s)\n return result\n\n\n@export\ndef unravel_index(indices: ArrayLike, shape: Shape) -> tuple[Array, ...]:\n """"""Convert flat indices into multi-dimensional indices.\n\n JAX implementation of :func:`numpy.unravel_index`. The JAX version differs in\n its treatment of out-of-bound indices: unlike NumPy, negative indices are\n supported, and out-of-bound indices are clipped to the nearest valid value.\n\n Args:\n indices: integer array of flat indices\n shape: shape of multidimensional array to index into\n\n Returns:\n Tuple of unraveled indices\n\n See also:\n :func:`jax.numpy.ravel_multi_index`: Inverse of this function.\n\n Examples:\n Start with a 1D array values and indices:\n\n >>> x = jnp.array([2., 3., 4., 5., 6., 7.])\n >>> indices = jnp.array([1, 3, 5])\n >>> print(x[indices])\n [3. 5. 7.]\n\n Now if ``x`` is reshaped, ``unravel_indices`` can be used to convert\n the flat indices into a tuple of indices that access the same entries:\n\n >>> shape = (2, 3)\n >>> x_2D = x.reshape(shape)\n >>> indices_2D = jnp.unravel_index(indices, shape)\n >>> indices_2D\n (Array([0, 1, 1], dtype=int32), Array([1, 0, 2], dtype=int32))\n >>> print(x_2D[indices_2D])\n [3. 5. 7.]\n\n The inverse function, ``ravel_multi_index``, can be used to obtain the\n original indices:\n\n >>> jnp.ravel_multi_index(indices_2D, shape)\n Array([1, 3, 5], dtype=int32)\n """"""\n indices_arr = util.ensure_arraylike(""unravel_index"", indices)\n # Note: we do not convert shape to an array, because it may be passed as a\n # tuple of weakly-typed values, and asarray() would strip these weak types.\n try:\n shape = list(shape)\n except TypeError:\n # TODO: Consider warning here since shape is supposed to be a sequence, so\n # this should not happen.\n shape = [shape]\n if any(np.ndim(s) != 0 for s in shape):\n raise ValueError(""unravel_index: shape should be a scalar or 1D sequence."")\n out_indices: list[ArrayLike] = [0] * len(shape)\n for i, s in reversed(list(enumerate(shape))):\n indices_arr, out_indices[i] = ufuncs.divmod(indices_arr, s)\n oob_pos = indices_arr > 0\n oob_neg = indices_arr < -1\n return tuple(where(oob_pos, s - 1, where(oob_neg, 0, i))\n for s, i in safe_zip(shape, out_indices))\n\n\n@export\n@partial(jit, static_argnames=('new_shape',))\ndef resize(a: ArrayLike, new_shape: Shape) -> Array:\n """"""Return a new array with specified shape.\n\n JAX implementation of :func:`numpy.resize`.\n\n Args:\n a: input array or scalar.\n new_shape: int or tuple of ints. Specifies the shape of the resized array.\n\n Returns:\n A resized array with specified shape. The elements of ``a`` are repeated in\n the resized array, if the resized array is larger than the original array.\n\n See also:\n - :func:`jax.numpy.reshape`: Returns a reshaped copy of an array.\n - :func:`jax.numpy.repeat`: Constructs an array from repeated elements.\n\n Examples:\n >>> x = jnp.array([1, 2, 3, 4, 5, 6, 7, 8, 9])\n >>> jnp.resize(x, (3, 3))\n Array([[1, 2, 3],\n [4, 5, 6],\n [7, 8, 9]], dtype=int32)\n >>> jnp.resize(x, (3, 4))\n Array([[1, 2, 3, 4],\n [5, 6, 7, 8],\n [9, 1, 2, 3]], dtype=int32)\n >>> jnp.resize(4, (3, 2))\n Array([[4, 4],\n [4, 4],\n [4, 4]], dtype=int32, weak_type=True)\n """"""\n util.check_arraylike(""resize"", a)\n new_shape = _ensure_index_tuple(new_shape)\n\n if any(dim_length < 0 for dim_length in new_shape):\n raise ValueError(""all elements of `new_shape` must be non-negative"")\n\n arr = ravel(a)\n\n new_size = math.prod(new_shape)\n if arr.size == 0 or new_size == 0:\n return zeros_like(arr, shape=new_shape)\n\n repeats = ceil_of_ratio(new_size, arr.size)\n arr = tile(arr, repeats)[:new_size]\n\n return reshape(arr, new_shape)\n\n\n@export\ndef squeeze(a: ArrayLike, axis: int | Sequence[int] | None = None) -> Array:\n """"""Remove one or more length-1 axes from array\n\n JAX implementation of :func:`numpy.sqeeze`, implemented via :func:`jax.lax.squeeze`.\n\n Args:\n a: input array\n axis: integer or sequence of integers specifying axes to remove. If any specified\n axis does not have a length of 1, an error is raised. If not specified, squeeze\n all length-1 axes in ``a``.\n\n Returns:\n copy of ``a`` with length-1 axes removed.\n\n Notes:\n Unlike :func:`numpy.squeeze`, :func:`jax.numpy.squeeze` will return a copy rather\n than a view of the input array. However, under JIT, the compiler will optimize-away\n such copies when possible, so this doesn't have performance impacts in practice.\n\n See Also:\n - :func:`jax.numpy.expand_dims`: the inverse of ``squeeze``: add dimensions of length 1.\n - :meth:`jax.Array.squeeze`: equivalent functionality via an array method.\n - :func:`jax.lax.squeeze`: equivalent XLA API.\n - :func:`jax.numpy.ravel`: flatten an array into a 1D shape.\n - :func:`jax.numpy.reshape`: general array reshape.\n\n Examples:\n >>> x = jnp.array([[[0]], [[1]], [[2]]])\n >>> x.shape\n (3, 1, 1)\n\n Squeeze all length-1 dimensions:\n\n >>> jnp.squeeze(x)\n Array([0, 1, 2], dtype=int32)\n >>> _.shape\n (3,)\n\n Equivalent while specifying the axes explicitly:\n\n >>> jnp.squeeze(x, axis=(1, 2))\n Array([0, 1, 2], dtype=int32)\n\n Attempting to squeeze a non-unit axis results in an error:\n\n >>> jnp.squeeze(x, axis=0) # doctest: +IGNORE_EXCEPTION_DETAIL\n Traceback (most recent call last):\n ...\n ValueError: cannot select an axis to squeeze out which has size not equal to one, got shape=(3, 1, 1) and dimensions=(0,)\n\n For convenience, this functionality is also available via the\n :meth:`jax.Array.squeeze` method:\n\n >>> x.squeeze()\n Array([0, 1, 2], dtype=int32)\n """"""\n arr = util.ensure_arraylike(""squeeze"", a)\n return _squeeze(arr, _ensure_index_tuple(axis) if axis is not None else None)\n\n@partial(jit, static_argnames=('axis',), inline=True)\ndef _squeeze(a: Array, axis: tuple[int, ...]) -> Array:\n if axis is None:\n a_shape = np.shape(a)\n if not core.is_constant_shape(a_shape):\n # We do not even know the rank of the output if the input shape is not known\n raise ValueError(""jnp.squeeze with axis=None is not supported with shape polymorphism"")\n axis = tuple(i for i, d in enumerate(a_shape) if d == 1)\n return lax.squeeze(a, axis)\n\n\n@export\ndef expand_dims(a: ArrayLike, axis: int | Sequence[int]) -> Array:\n """"""Insert dimensions of length 1 into array\n\n JAX implementation of :func:`numpy.expand_dims`, implemented via\n :func:`jax.lax.expand_dims`.\n\n Args:\n a: input array\n axis: integer or sequence of integers specifying positions of axes to add.\n\n Returns:\n Copy of ``a`` with added dimensions.\n\n Notes:\n Unlike :func:`numpy.expand_dims`, :func:`jax.numpy.expand_dims` will return a copy\n rather than a view of the input array. However, under JIT, the compiler will optimize\n away such copies when possible, so this doesn't have performance impacts in practice.\n\n See Also:\n - :func:`jax.numpy.squeeze`: inverse of this operation, i.e. remove length-1 dimensions.\n - :func:`jax.lax.expand_dims`: XLA version of this functionality.\n\n Examples:\n >>> x = jnp.array([1, 2, 3])\n >>> x.shape\n (3,)\n\n Expand the leading dimension:\n\n >>> jnp.expand_dims(x, 0)\n Array([[1, 2, 3]], dtype=int32)\n >>> _.shape\n (1, 3)\n\n Expand the trailing dimension:\n\n >>> jnp.expand_dims(x, 1)\n Array([[1],\n [2],\n [3]], dtype=int32)\n >>> _.shape\n (3, 1)\n\n Expand multiple dimensions:\n\n >>> jnp.expand_dims(x, (0, 1, 3))\n Array([[[[1],\n [2],\n [3]]]], dtype=int32)\n >>> _.shape\n (1, 1, 3, 1)\n\n Dimensions can also be expanded more succinctly by indexing with ``None``:\n\n >>> x[None] # equivalent to jnp.expand_dims(x, 0)\n Array([[1, 2, 3]], dtype=int32)\n >>> x[:, None] # equivalent to jnp.expand_dims(x, 1)\n Array([[1],\n [2],\n [3]], dtype=int32)\n >>> x[None, None, :, None] # equivalent to jnp.expand_dims(x, (0, 1, 3))\n Array([[[[1],\n [2],\n [3]]]], dtype=int32)\n """"""\n a = util.ensure_arraylike(""expand_dims"", a)\n axis = _ensure_index_tuple(axis)\n return lax.expand_dims(a, axis)\n\n\n@export\n@partial(jit, static_argnames=('axis1', 'axis2'), inline=True)\ndef swapaxes(a: ArrayLike, axis1: int, axis2: int) -> Array:\n """"""Swap two axes of an array.\n\n JAX implementation of :func:`numpy.swapaxes`, implemented in terms of\n :func:`jax.lax.transpose`.\n\n Args:\n a: input array\n axis1: index of first axis\n axis2: index of second axis\n\n Returns:\n Copy of ``a`` with specified axes swapped.\n\n Notes:\n Unlike :func:`numpy.swapaxes`, :func:`jax.numpy.swapaxes` will return a copy rather\n than a view of the input array. However, under JIT, the compiler will optimize away\n such copies when possible, so this doesn't have performance impacts in practice.\n\n See Also:\n - :func:`jax.numpy.moveaxis`: move a single axis of an array.\n - :func:`jax.numpy.rollaxis`: older API for ``moveaxis``.\n - :func:`jax.lax.transpose`: more general axes permutations.\n - :meth:`jax.Array.swapaxes`: same functionality via an array method.\n\n Examples:\n >>> a = jnp.ones((2, 3, 4, 5))\n >>> jnp.swapaxes(a, 1, 3).shape\n (2, 5, 4, 3)\n\n Equivalent output via the ``swapaxes`` array method:\n\n >>> a.swapaxes(1, 3).shape\n (2, 5, 4, 3)\n\n Equivalent output via :func:`~jax.numpy.transpose`:\n\n >>> a.transpose(0, 3, 2, 1).shape\n (2, 5, 4, 3)\n """"""\n a = util.ensure_arraylike(""swapaxes"", a)\n perm = np.arange(np.ndim(a))\n perm[axis1], perm[axis2] = perm[axis2], perm[axis1]\n return lax.transpose(a, list(perm))\n\n\n@export\ndef moveaxis(a: ArrayLike, source: int | Sequence[int],\n destination: int | Sequence[int]) -> Array:\n """"""Move an array axis to a new position\n\n JAX implementation of :func:`numpy.moveaxis`, implemented in terms of\n :func:`jax.lax.transpose`.\n\n Args:\n a: input array\n source: index or indices of the axes to move.\n destination: index or indices of the axes destinations\n\n Returns:\n Copy of ``a`` with axes moved from ``source`` to ``destination``.\n\n Notes:\n Unlike :func:`numpy.moveaxis`, :func:`jax.numpy.moveaxis` will return a copy rather\n than a view of the input array. However, under JIT, the compiler will optimize away\n such copies when possible, so this doesn't have performance impacts in practice.\n\n See also:\n - :func:`jax.numpy.swapaxes`: swap two axes.\n - :func:`jax.numpy.rollaxis`: older API for moving an axis.\n - :func:`jax.numpy.transpose`: general axes permutation.\n\n Examples:\n >>> a = jnp.ones((2, 3, 4, 5))\n\n Move axis ``1`` to the end of the array:\n\n >>> jnp.moveaxis(a, 1, -1).shape\n (2, 4, 5, 3)\n\n Move the last axis to position 1:\n\n >>> jnp.moveaxis(a, -1, 1).shape\n (2, 5, 3, 4)\n\n Move multiple axes:\n\n >>> jnp.moveaxis(a, (0, 1), (-1, -2)).shape\n (4, 5, 3, 2)\n\n This can also be accomplished via :func:`~jax.numpy.transpose`:\n\n >>> a.transpose(2, 3, 1, 0).shape\n (4, 5, 3, 2)\n """"""\n arr = util.ensure_arraylike(""moveaxis"", a)\n return _moveaxis(arr, _ensure_index_tuple(source),\n _ensure_index_tuple(destination))\n\n@partial(jit, static_argnames=('source', 'destination'), inline=True)\ndef _moveaxis(a: Array, source: tuple[int, ...], destination: tuple[int, ...]) -> Array:\n source = tuple(_canonicalize_axis(i, np.ndim(a)) for i in source)\n destination = tuple(_canonicalize_axis(i, np.ndim(a)) for i in destination)\n if len(source) != len(destination):\n raise ValueError(""Inconsistent number of elements: {} vs {}""\n .format(len(source), len(destination)))\n perm = [i for i in range(np.ndim(a)) if i not in source]\n for dest, src in sorted(zip(destination, source)):\n perm.insert(dest, src)\n return lax.transpose(a, perm)\n\n\n@export\n@partial(jit, static_argnames=('equal_nan',))\ndef isclose(a: ArrayLike, b: ArrayLike, rtol: ArrayLike = 1e-05, atol: ArrayLike = 1e-08,\n equal_nan: bool = False) -> Array:\n r""""""Check if the elements of two arrays are approximately equal within a tolerance.\n\n JAX implementation of :func:`numpy.allclose`.\n\n Essentially this function evaluates the following condition:\n\n .. math::\n\n |a - b| \le \mathtt{atol} + \mathtt{rtol} * |b|\n\n ``jnp.inf`` in ``a`` will be considered equal to ``jnp.inf`` in ``b``.\n\n Args:\n a: first input array to compare.\n b: second input array to compare.\n rtol: relative tolerance used for approximate equality. Default = 1e-05.\n atol: absolute tolerance used for approximate equality. Default = 1e-08.\n equal_nan: Boolean. If ``True``, NaNs in ``a`` will be considered\n equal to NaNs in ``b``. Default is ``False``.\n\n Returns:\n A new array containing boolean values indicating whether the input arrays\n are element-wise approximately equal within the specified tolerances.\n\n See Also:\n - :func:`jax.numpy.allclose`\n - :func:`jax.numpy.equal`\n\n Examples:\n >>> jnp.isclose(jnp.array([1e6, 2e6, jnp.inf]), jnp.array([1e6, 2e7, jnp.inf]))\n Array([ True, False, True], dtype=bool)\n >>> jnp.isclose(jnp.array([1e6, 2e6, 3e6]),\n ... jnp.array([1.00008e6, 2.00008e7, 3.00008e8]), rtol=1e3)\n Array([ True, True, True], dtype=bool)\n >>> jnp.isclose(jnp.array([1e6, 2e6, 3e6]),\n ... jnp.array([1.00001e6, 2.00002e6, 3.00009e6]), atol=1e3)\n Array([ True, True, True], dtype=bool)\n >>> jnp.isclose(jnp.array([jnp.nan, 1, 2]),\n ... jnp.array([jnp.nan, 1, 2]), equal_nan=True)\n Array([ True, True, True], dtype=bool)\n """"""\n a, b = util.promote_args(""isclose"", a, b)\n dtype = _dtype(a)\n if dtypes.issubdtype(dtype, dtypes.extended):\n return lax.eq(a, b)\n\n a, b = util.promote_args_inexact(""isclose"", a, b)\n dtype = _dtype(a)\n if issubdtype(dtype, np.complexfloating):\n dtype = np.array(0, dtype).real.dtype\n rtol = lax.convert_element_type(rtol, dtype)\n atol = lax.convert_element_type(atol, dtype)\n both_nan = ufuncs.logical_and(ufuncs.isnan(a), ufuncs.isnan(b))\n check_fin = ufuncs.isfinite(b)\n in_range = lax.le(\n lax.abs(lax.sub(a, b)),\n lax.add(atol, lax.mul(rtol, lax.abs(b))))\n out = ufuncs.logical_or(lax.eq(a, b), ufuncs.logical_and(check_fin, in_range))\n return ufuncs.logical_or(out, both_nan) if equal_nan else out\n\n\ndef _interp(x: ArrayLike, xp: ArrayLike, fp: ArrayLike,\n left: ArrayLike | str | None = None,\n right: ArrayLike | str | None = None,\n period: ArrayLike | None = None) -> Array:\n x, xp, fp = util.ensure_arraylike(""interp"", x, xp, fp)\n if np.shape(xp) != np.shape(fp) or np.ndim(xp) != 1:\n raise ValueError(""xp and fp must be one-dimensional arrays of equal size"")\n x_arr, xp_arr = util.promote_dtypes_inexact(x, xp)\n fp_arr, = util.promote_dtypes_inexact(fp)\n del x, xp, fp\n\n if isinstance(left, str):\n if left != 'extrapolate':\n raise ValueError(""the only valid string value of `left` is ""\n f""'extrapolate', but got: {left!r}"")\n extrapolate_left = True\n else:\n extrapolate_left = False\n if isinstance(right, str):\n if right != 'extrapolate':\n raise ValueError(""the only valid string value of `right` is ""\n f""'extrapolate', but got: {right!r}"")\n extrapolate_right = True\n else:\n extrapolate_right = False\n\n if dtypes.issubdtype(x_arr.dtype, np.complexfloating):\n raise ValueError(""jnp.interp: complex x values not supported."")\n\n if period is not None:\n if np.ndim(period) != 0:\n raise ValueError(f""period must be a scalar; got {period}"")\n period = ufuncs.abs(period)\n x_arr = x_arr % period\n xp_arr = xp_arr % period\n xp_arr, fp_arr = lax.sort_key_val(xp_arr, fp_arr)\n xp_arr = concatenate([xp_arr[-1:] - period, xp_arr, xp_arr[:1] + period])\n fp_arr = concatenate([fp_arr[-1:], fp_arr, fp_arr[:1]])\n\n i = clip(searchsorted(xp_arr, x_arr, side='right'), 1, len(xp_arr) - 1)\n df = fp_arr[i] - fp_arr[i - 1]\n dx = xp_arr[i] - xp_arr[i - 1]\n delta = x_arr - xp_arr[i - 1]\n\n epsilon = np.spacing(np.finfo(xp_arr.dtype).eps)\n dx0 = lax.abs(dx) <= epsilon # Prevent NaN gradients when `dx` is small.\n f = where(dx0, fp_arr[i - 1], fp_arr[i - 1] + (delta / where(dx0, 1, dx)) * df)\n\n if not extrapolate_left:\n assert not isinstance(left, str)\n left_arr: ArrayLike = fp_arr[0] if left is None else left\n if period is None:\n f = where(x_arr < xp_arr[0], left_arr, f)\n if not extrapolate_right:\n assert not isinstance(right, str)\n right_arr: ArrayLike = fp_arr[-1] if right is None else right\n if period is None:\n f = where(x_arr > xp_arr[-1], right_arr, f)\n\n return f\n\n\n@export\ndef interp(x: ArrayLike, xp: ArrayLike, fp: ArrayLike,\n left: ArrayLike | str | None = None,\n right: ArrayLike | str | None = None,\n period: ArrayLike | None = None) -> Array:\n """"""One-dimensional linear interpolation.\n\n JAX implementation of :func:`numpy.interp`.\n\n Args:\n x: N-dimensional array of x coordinates at which to evaluate the interpolation.\n xp: one-dimensional sorted array of points to be interpolated.\n fp: array of shape ``xp.shape`` containing the function values associated with ``xp``.\n left: specify how to handle points ``x < xp[0]``. Default is to return ``fp[0]``.\n If ``left`` is a scalar value, it will return this value. if ``left`` is the string\n ``""extrapolate""``, then the value will be determined by linear extrapolation.\n ``left`` is ignored if ``period`` is specified.\n right: specify how to handle points ``x > xp[-1]``. Default is to return ``fp[-1]``.\n If ``right`` is a scalar value, it will return this value. if ``right`` is the string\n ``""extrapolate""``, then the value will be determined by linear extrapolation.\n ``right`` is ignored if ``period`` is specified.\n period: optionally specify the period for the *x* coordinates, for e.g. interpolation\n in angular space.\n\n Returns:\n an array of shape ``x.shape`` containing the interpolated function at values ``x``.\n\n Examples:\n >>> xp = jnp.arange(10)\n >>> fp = 2 * xp\n >>> x = jnp.array([0.5, 2.0, 3.5])\n >>> interp(x, xp, fp)\n Array([1., 4., 7.], dtype=float32)\n\n Unless otherwise specified, extrapolation will be constant:\n\n >>> x = jnp.array([-10., 10.])\n >>> interp(x, xp, fp)\n Array([ 0., 18.], dtype=float32)\n\n Use ``""extrapolate""`` mode for linear extrapolation:\n\n >>> interp(x, xp, fp, left='extrapolate', right='extrapolate')\n Array([-20., 20.], dtype=float32)\n\n For periodic interpolation, specify the ``period``:\n\n >>> xp = jnp.array([0, jnp.pi / 2, jnp.pi, 3 * jnp.pi / 2])\n >>> fp = jnp.sin(xp)\n >>> x = 2 * jnp.pi # note: not in input array\n >>> jnp.interp(x, xp, fp, period=2 * jnp.pi)\n Array(0., dtype=float32)\n """"""\n static_argnames = []\n if isinstance(left, str) or left is None:\n static_argnames.append('left')\n if isinstance(right, str) or right is None:\n static_argnames.append('right')\n if period is None:\n static_argnames.append('period')\n jitted_interp = jit(_interp, static_argnames=static_argnames)\n return jitted_interp(x, xp, fp, left, right, period)\n\n\n@overload\ndef where(condition: ArrayLike, x: Literal[None] = None,\n y: Literal[None] = None, /, *, size: int | None = None,\n fill_value: None | ArrayLike | tuple[ArrayLike, ...] = None\n ) -> tuple[Array, ...]: ...\n\n@overload\ndef where(condition: ArrayLike, x: ArrayLike, y: ArrayLike, / ,*,\n size: int | None = None,\n fill_value: None | ArrayLike | tuple[ArrayLike, ...] = None\n ) -> Array: ...\n\n@overload\ndef where(condition: ArrayLike, x: ArrayLike | None = None,\n y: ArrayLike | None = None, /, *, size: int | None = None,\n fill_value: None | ArrayLike | tuple[ArrayLike, ...] = None\n ) -> Array | tuple[Array, ...]: ...\n\n\n@export\ndef where(condition, x=None, y=None, /, *, size=None, fill_value=None):\n """"""Select elements from two arrays based on a condition.\n\n JAX implementation of :func:`numpy.where`.\n\n .. note::\n when only ``condition`` is provided, ``jnp.where(condition)`` is equivalent\n to ``jnp.nonzero(condition)``. For that case, refer to the documentation of\n :func:`jax.numpy.nonzero`. The docstring below focuses on the case where\n ``x`` and ``y`` are specified.\n\n The three-term version of ``jnp.where`` lowers to :func:`jax.lax.select`.\n\n Args:\n condition: boolean array. Must be broadcast-compatible with ``x`` and ``y`` when\n they are specified.\n x: arraylike. Should be broadcast-compatible with ``condition`` and ``y``, and\n typecast-compatible with ``y``.\n y: arraylike. Should be broadcast-compatible with ``condition`` and ``x``, and\n typecast-compatible with ``x``.\n size: integer, only referenced when ``x`` and ``y`` are ``None``. For details,\n see :func:`jax.numpy.nonzero`.\n fill_value: only referenced when ``x`` and ``y`` are ``None``. For details,\n see :func:`jax.numpy.nonzero`.\n\n Returns:\n An array of dtype ``jnp.result_type(x, y)`` with values drawn from ``x`` where ``condition``\n is True, and from ``y`` where condition is ``False``. If ``x`` and ``y`` are ``None``, the\n function behaves differently; see :func:`jax.numpy.nonzero` for a description of the return\n type.\n\n See Also:\n - :func:`jax.numpy.nonzero`\n - :func:`jax.numpy.argwhere`\n - :func:`jax.lax.select`\n\n Notes:\n Special care is needed when the ``x`` or ``y`` input to :func:`jax.numpy.where` could\n have a value of NaN. Specifically, when a gradient is taken with :func:`jax.grad`\n (reverse-mode differentiation), a NaN in either ``x`` or ``y`` will propagate into the\n gradient, regardless of the value of ``condition``. More information on this behavior\n and workarounds is available in the `JAX FAQ\n `_.\n\n Examples:\n When ``x`` and ``y`` are not provided, ``where`` behaves equivalently to\n :func:`jax.numpy.nonzero`:\n\n >>> x = jnp.arange(10)\n >>> jnp.where(x > 4)\n (Array([5, 6, 7, 8, 9], dtype=int32),)\n >>> jnp.nonzero(x > 4)\n (Array([5, 6, 7, 8, 9], dtype=int32),)\n\n When ``x`` and ``y`` are provided, ``where`` selects between them based on\n the specified condition:\n\n >>> jnp.where(x > 4, x, 0)\n Array([0, 0, 0, 0, 0, 5, 6, 7, 8, 9], dtype=int32)\n """"""\n if x is None and y is None:\n util.check_arraylike(""where"", condition)\n return nonzero(condition, size=size, fill_value=fill_value)\n else:\n util.check_arraylike(""where"", condition, x, y)\n if size is not None or fill_value is not None:\n raise ValueError(""size and fill_value arguments cannot be used in ""\n ""three-term where function."")\n if x is None or y is None:\n raise ValueError(""Either both or neither of the x and y arguments ""\n ""should be provided to jax.numpy.where, got ""\n f""{x} and {y}."")\n return util._where(condition, x, y)\n\n\n@export\ndef select(\n condlist: Sequence[ArrayLike],\n choicelist: Sequence[ArrayLike],\n default: ArrayLike = 0,\n) -> Array:\n """"""Select values based on a series of conditions.\n\n JAX implementation of :func:`numpy.select`, implemented in terms\n of :func:`jax.lax.select_n`\n\n Args:\n condlist: sequence of array-like conditions. All entries must be mutually\n broadcast-compatible.\n choicelist: sequence of array-like values to choose. Must have the same length\n as ``condlist``, and all entries must be broadcast-compatible with entries\n of ``condlist``.\n default: value to return when every condition is False (default: 0).\n\n Returns:\n Array of selected values from ``choicelist`` corresponding to the first\n ``True`` entry in ``condlist`` at each location.\n\n See also:\n - :func:`jax.numpy.where`: select between two values based on a single condition.\n - :func:`jax.lax.select_n`: select between *N* values based on an index.\n\n Examples:\n >>> condlist = [\n ... jnp.array([False, True, False, False]),\n ... jnp.array([True, False, False, False]),\n ... jnp.array([False, True, True, False]),\n ... ]\n >>> choicelist = [\n ... jnp.array([1, 2, 3, 4]),\n ... jnp.array([10, 20, 30, 40]),\n ... jnp.array([100, 200, 300, 400]),\n ... ]\n >>> jnp.select(condlist, choicelist, default=0)\n Array([ 10, 2, 300, 0], dtype=int32)\n\n This is logically equivalent to the following nested ``where`` statement:\n\n >>> default = 0\n >>> jnp.where(condlist[0],\n ... choicelist[0],\n ... jnp.where(condlist[1],\n ... choicelist[1],\n ... jnp.where(condlist[2],\n ... choicelist[2],\n ... default)))\n Array([ 10, 2, 300, 0], dtype=int32)\n\n However, for efficiency it is implemented in terms of :func:`jax.lax.select_n`.\n """"""\n if len(condlist) != len(choicelist):\n msg = ""condlist must have length equal to choicelist ({} vs {})""\n raise ValueError(msg.format(len(condlist), len(choicelist)))\n if len(condlist) == 0:\n raise ValueError(""condlist must be non-empty"")\n\n util.check_arraylike(""select"", *condlist, *choicelist, default)\n condlist = [asarray(cond) for cond in condlist]\n choicelist = [asarray(choice) for choice in choicelist]\n default = asarray(default)\n\n # Put the default at front with condition False because\n # argmax returns zero for an array of False values.\n choicelist = util.promote_dtypes(default, *choicelist)\n conditions = stack(broadcast_arrays(False, *condlist))\n idx = argmax(conditions.astype(bool), axis=0)\n return lax.select_n(*broadcast_arrays(idx, *choicelist))\n\n\n@export\ndef bincount(x: ArrayLike, weights: ArrayLike | None = None,\n minlength: int = 0, *, length: int | None = None\n ) -> Array:\n """"""Count the number of occurrences of each value in an integer array.\n\n JAX implementation of :func:`numpy.bincount`.\n\n For an array of non-negative integers ``x``, this function returns an array ``counts``\n of size ``x.max() + 1``, such that ``counts[i]`` contains the number of occurrences\n of the value ``i`` in ``x``.\n\n The JAX version has a few differences from the NumPy version:\n\n - In NumPy, passing an array ``x`` with negative entries will result in an error.\n In JAX, negative values are clipped to zero.\n - JAX adds an optional ``length`` parameter which can be used to statically specify\n the length of the output array so that this function can be used with transformations\n like :func:`jax.jit`. In this case, items larger than `length + 1` will be dropped.\n\n Args:\n x : 1-dimensional array of non-negative integers\n weights: optional array of weights associated with ``x``. If not specified, the\n weight for each entry will be ``1``.\n minlength: the minimum length of the output counts array.\n length: the length of the output counts array. Must be specified statically for\n ``bincount`` to be used with :func:`jax.jit` and other JAX transformations.\n\n Returns:\n An array of counts or summed weights reflecting the number of occurrences of values\n in ``x``.\n\n See Also:\n - :func:`jax.numpy.histogram`\n - :func:`jax.numpy.digitize`\n - :func:`jax.numpy.unique_counts`\n\n Examples:\n Basic bincount:\n\n >>> x = jnp.array([1, 1, 2, 3, 3, 3])\n >>> jnp.bincount(x)\n Array([0, 2, 1, 3], dtype=int32)\n\n Weighted bincount:\n\n >>> weights = jnp.array([1, 2, 3, 4, 5, 6])\n >>> jnp.bincount(x, weights)\n Array([ 0, 3, 3, 15], dtype=int32)\n\n Specifying a static ``length`` makes this jit-compatible:\n\n >>> jit_bincount = jax.jit(jnp.bincount, static_argnames=['length'])\n >>> jit_bincount(x, length=5)\n Array([0, 2, 1, 3, 0], dtype=int32)\n\n Any negative numbers are clipped to the first bin, and numbers beyond the\n specified ``length`` are dropped:\n\n >>> x = jnp.array([-1, -1, 1, 3, 10])\n >>> jnp.bincount(x, length=5)\n Array([2, 1, 0, 1, 0], dtype=int32)\n """"""\n x = util.ensure_arraylike(""bincount"", x)\n if _dtype(x) == bool:\n x = lax.convert_element_type(x, 'int32')\n if not issubdtype(_dtype(x), np.integer):\n raise TypeError(f""x argument to bincount must have an integer type; got {_dtype(x)}"")\n if np.ndim(x) != 1:\n raise ValueError(""only 1-dimensional input supported."")\n minlength = core.concrete_or_error(operator.index, minlength,\n ""The error occurred because of argument 'minlength' of jnp.bincount."")\n if length is None:\n x_arr = core.concrete_or_error(asarray, x,\n ""The error occurred because of argument 'x' of jnp.bincount. ""\n ""To avoid this error, pass a static `length` argument."")\n length = max(minlength, x_arr.size and int(max(0, x_arr.max())) + 1)\n else:\n length = core.concrete_dim_or_error(length,\n ""The error occurred because of argument 'length' of jnp.bincount."")\n if weights is None:\n weights = np.array(1, dtype=dtypes.int_)\n elif np.shape(x) != np.shape(weights):\n raise ValueError(""shape of weights must match shape of x."")\n return zeros(length, _dtype(weights)).at[clip(x, 0)].add(weights, mode='drop')\n\n@overload\ndef broadcast_shapes(*shapes: Sequence[int]) -> tuple[int, ...]: ...\n\n@overload\ndef broadcast_shapes(*shapes: Sequence[int | core.Tracer]\n ) -> tuple[int | core.Tracer, ...]: ...\n\n@export\ndef broadcast_shapes(*shapes):\n """"""Broadcast input shapes to a common output shape.\n\n JAX implementation of :func:`numpy.broadcast_shapes`. JAX uses NumPy-style\n broadcasting rules, which you can read more about at `NumPy broadcasting`_.\n\n Args:\n shapes: 0 or more shapes specified as sequences of integers\n\n Returns:\n The broadcasted shape as a tuple of integers.\n\n See Also:\n - :func:`jax.numpy.broadcast_arrays`: broadcast arrays to a common shape.\n - :func:`jax.numpy.broadcast_to`: broadcast an array to a specified shape.\n\n Examples:\n Some compatible shapes:\n\n >>> jnp.broadcast_shapes((1,), (4,))\n (4,)\n >>> jnp.broadcast_shapes((3, 1), (4,))\n (3, 4)\n >>> jnp.broadcast_shapes((3, 1), (1, 4), (5, 1, 1))\n (5, 3, 4)\n\n Incompatible shapes:\n\n >>> jnp.broadcast_shapes((3, 1), (4, 1)) # doctest: +IGNORE_EXCEPTION_DETAIL\n Traceback (most recent call last):\n ValueError: Incompatible shapes for broadcasting: shapes=[(3, 1), (4, 1)]\n\n .. _NumPy broadcasting: https://numpy.org/doc/stable/user/basics.broadcasting.html\n """"""\n if not shapes:\n return ()\n shapes = [(shape,) if np.ndim(shape) == 0 else tuple(shape) for shape in shapes]\n return lax.broadcast_shapes(*shapes)\n\n\n@export\ndef broadcast_arrays(*args: ArrayLike) -> list[Array]:\n """"""Broadcast arrays to a common shape.\n\n JAX implementation of :func:`numpy.broadcast_arrays`. JAX uses NumPy-style\n broadcasting rules, which you can read more about at `NumPy broadcasting`_.\n\n Args:\n args: zero or more array-like objects to be broadcasted.\n\n Returns:\n a list of arrays containing broadcasted copies of the inputs.\n\n See also:\n - :func:`jax.numpy.broadcast_shapes`: broadcast input shapes to a common shape.\n - :func:`jax.numpy.broadcast_to`: broadcast an array to a specified shape.\n\n Examples:\n\n >>> x = jnp.arange(3)\n >>> y = jnp.int32(1)\n >>> jnp.broadcast_arrays(x, y)\n [Array([0, 1, 2], dtype=int32), Array([1, 1, 1], dtype=int32)]\n\n >>> x = jnp.array([[1, 2, 3]])\n >>> y = jnp.array([[10],\n ... [20]])\n >>> x2, y2 = jnp.broadcast_arrays(x, y)\n >>> x2\n Array([[1, 2, 3],\n [1, 2, 3]], dtype=int32)\n >>> y2\n Array([[10, 10, 10],\n [20, 20, 20]], dtype=int32)\n\n .. _NumPy broadcasting: https://numpy.org/doc/stable/user/basics.broadcasting.html\n """"""\n args = util.ensure_arraylike_tuple(""broadcast_arrays"", args)\n return util._broadcast_arrays(*args)\n\n\n@export\ndef broadcast_to(array: ArrayLike, shape: DimSize | Shape,\n *, out_sharding: NamedSharding | P | None = None) -> Array:\n """"""Broadcast an array to a specified shape.\n\n JAX implementation of :func:`numpy.broadcast_to`. JAX uses NumPy-style\n broadcasting rules, which you can read more about at `NumPy broadcasting`_.\n\n Args:\n array: array to be broadcast.\n shape: shape to which the array will be broadcast.\n\n Returns:\n a copy of array broadcast to the specified shape.\n\n See also:\n - :func:`jax.numpy.broadcast_arrays`: broadcast arrays to a common shape.\n - :func:`jax.numpy.broadcast_shapes`: broadcast input shapes to a common shape.\n\n Examples:\n >>> x = jnp.int32(1)\n >>> jnp.broadcast_to(x, (1, 4))\n Array([[1, 1, 1, 1]], dtype=int32)\n\n >>> x = jnp.array([1, 2, 3])\n >>> jnp.broadcast_to(x, (2, 3))\n Array([[1, 2, 3],\n [1, 2, 3]], dtype=int32)\n\n >>> x = jnp.array([[2], [4]])\n >>> jnp.broadcast_to(x, (2, 4))\n Array([[2, 2, 2, 2],\n [4, 4, 4, 4]], dtype=int32)\n\n .. _NumPy broadcasting: https://numpy.org/doc/stable/user/basics.broadcasting.html\n """"""\n return util._broadcast_to(array, shape, sharding=out_sharding)\n\n\ndef _split(op: str, ary: ArrayLike,\n indices_or_sections: int | Sequence[int] | ArrayLike,\n axis: int = 0) -> list[Array]:\n ary = util.ensure_arraylike(op, ary)\n axis = core.concrete_or_error(operator.index, axis, f""in jax.numpy.{op} argument `axis`"")\n size = ary.shape[axis]\n if (isinstance(indices_or_sections, (tuple, list)) or\n isinstance(indices_or_sections, (np.ndarray, Array)) and\n indices_or_sections.ndim > 0):\n split_indices = np.asarray([0] + [\n core.concrete_dim_or_error(i_s, f""in jax.numpy.{op} argument 1"")\n for i_s in indices_or_sections] + [size])\n sizes = list(np.diff(split_indices))\n else:\n if core.is_symbolic_dim(indices_or_sections):\n raise ValueError(f""jax.numpy.{op} with a symbolic number of sections is ""\n ""not supported"")\n num_sections: int = core.concrete_or_error(int, indices_or_sections,\n f""in jax.numpy.{op} argument 1"")\n part_size, r = divmod(size, num_sections)\n if r == 0:\n sizes = [part_size] * num_sections\n elif op == ""array_split"":\n sizes = [(part_size + 1)] * r + [part_size] * (num_sections - r)\n else:\n raise ValueError(f""array split does not result in an equal division: rest is {r}"")\n sizes = [i if core.is_symbolic_dim(i) else np.int64(i)\n for i in sizes]\n return list(lax.split(ary, sizes, axis=axis))\n\n\n@export\ndef split(ary: ArrayLike, indices_or_sections: int | Sequence[int] | ArrayLike,\n axis: int = 0) -> list[Array]:\n """"""Split an array into sub-arrays.\n\n JAX implementation of :func:`numpy.split`.\n\n Args:\n ary: N-dimensional array-like object to split\n indices_or_sections: either a single integer or a sequence of indices.\n\n - if ``indices_or_sections`` is an integer *N*, then *N* must evenly divide\n ``ary.shape[axis]`` and ``ary`` will be divided into *N* equally-sized\n chunks along ``axis``.\n - if ``indices_or_sections`` is a sequence of integers, then these integers\n specify the boundary between unevenly-sized chunks along ``axis``; see\n examples below.\n\n axis: the axis along which to split; defaults to 0.\n\n Returns:\n A list of arrays. If ``indices_or_sections`` is an integer *N*, then the list is\n of length *N*. If ``indices_or_sections`` is a sequence *seq*, then the list is\n is of length *len(seq) + 1*.\n\n Examples:\n Splitting a 1-dimensional array:\n\n >>> x = jnp.array([1, 2, 3, 4, 5, 6, 7, 8, 9])\n\n Split into three equal sections:\n\n >>> chunks = jnp.split(x, 3)\n >>> print(*chunks)\n [1 2 3] [4 5 6] [7 8 9]\n\n Split into sections by index:\n\n >>> chunks = jnp.split(x, [2, 7]) # [x[0:2], x[2:7], x[7:]]\n >>> print(*chunks)\n [1 2] [3 4 5 6 7] [8 9]\n\n Splitting a two-dimensional array along axis 1:\n\n >>> x = jnp.array([[1, 2, 3, 4],\n ... [5, 6, 7, 8]])\n >>> x1, x2 = jnp.split(x, 2, axis=1)\n >>> print(x1)\n [[1 2]\n [5 6]]\n >>> print(x2)\n [[3 4]\n [7 8]]\n\n See also:\n - :func:`jax.numpy.array_split`: like ``split``, but allows ``indices_or_sections``\n to be an integer that does not evenly divide the size of the array.\n - :func:`jax.numpy.vsplit`: split vertically, i.e. along axis=0\n - :func:`jax.numpy.hsplit`: split horizontally, i.e. along axis=1\n - :func:`jax.numpy.dsplit`: split depth-wise, i.e. along axis=2\n """"""\n return _split(""split"", ary, indices_or_sections, axis=axis)\n\n\n@export\ndef vsplit(ary: ArrayLike, indices_or_sections: int | Sequence[int] | ArrayLike) -> list[Array]:\n """"""Split an array into sub-arrays vertically.\n\n JAX implementation of :func:`numpy.vsplit`.\n\n Refer to the documentation of :func:`jax.numpy.split` for details; ``vsplit`` is\n equivalent to ``split`` with ``axis=0``.\n\n Examples:\n 1D array:\n\n >>> x = jnp.array([1, 2, 3, 4, 5, 6])\n >>> x1, x2 = jnp.vsplit(x, 2)\n >>> print(x1, x2)\n [1 2 3] [4 5 6]\n\n 2D array:\n\n >>> x = jnp.array([[1, 2, 3, 4],\n ... [5, 6, 7, 8]])\n >>> x1, x2 = jnp.vsplit(x, 2)\n >>> print(x1, x2)\n [[1 2 3 4]] [[5 6 7 8]]\n\n See also:\n - :func:`jax.numpy.split`: split an array along any axis.\n - :func:`jax.numpy.hsplit`: split horizontally, i.e. along axis=1\n - :func:`jax.numpy.dsplit`: split depth-wise, i.e. along axis=2\n - :func:`jax.numpy.array_split`: like ``split``, but allows ``indices_or_sections``\n to be an integer that does not evenly divide the size of the array.\n """"""\n return _split(""vsplit"", ary, indices_or_sections, axis=0)\n\n\n@export\ndef hsplit(ary: ArrayLike, indices_or_sections: int | Sequence[int] | ArrayLike) -> list[Array]:\n """"""Split an array into sub-arrays horizontally.\n\n JAX implementation of :func:`numpy.hsplit`.\n\n Refer to the documentation of :func:`jax.numpy.split` for details. ``hsplit`` is\n equivalent to ``split`` with ``axis=1``, or ``axis=0`` for one-dimensional arrays.\n\n Examples:\n 1D array:\n\n >>> x = jnp.array([1, 2, 3, 4, 5, 6])\n >>> x1, x2 = jnp.hsplit(x, 2)\n >>> print(x1, x2)\n [1 2 3] [4 5 6]\n\n 2D array:\n\n >>> x = jnp.array([[1, 2, 3, 4],\n ... [5, 6, 7, 8]])\n >>> x1, x2 = jnp.hsplit(x, 2)\n >>> print(x1)\n [[1 2]\n [5 6]]\n >>> print(x2)\n [[3 4]\n [7 8]]\n\n See also:\n - :func:`jax.numpy.split`: split an array along any axis.\n - :func:`jax.numpy.vsplit`: split vertically, i.e. along axis=0\n - :func:`jax.numpy.dsplit`: split depth-wise, i.e. along axis=2\n - :func:`jax.numpy.array_split`: like ``split``, but allows ``indices_or_sections``\n to be an integer that does not evenly divide the size of the array.\n """"""\n a = util.ensure_arraylike(""hsplit"", ary)\n return _split(""hsplit"", a, indices_or_sections, axis=0 if a.ndim == 1 else 1)\n\n\n@export\ndef dsplit(ary: ArrayLike, indices_or_sections: int | Sequence[int] | ArrayLike) -> list[Array]:\n """"""Split an array into sub-arrays depth-wise.\n\n JAX implementation of :func:`numpy.dsplit`.\n\n Refer to the documentation of :func:`jax.numpy.split` for details. ``dsplit`` is\n equivalent to ``split`` with ``axis=2``.\n\n Examples:\n\n >>> x = jnp.arange(12).reshape(3, 1, 4)\n >>> print(x)\n [[[ 0 1 2 3]]\n \n [[ 4 5 6 7]]\n \n [[ 8 9 10 11]]]\n >>> x1, x2 = jnp.dsplit(x, 2)\n >>> print(x1)\n [[[0 1]]\n \n [[4 5]]\n \n [[8 9]]]\n >>> print(x2)\n [[[ 2 3]]\n \n [[ 6 7]]\n \n [[10 11]]]\n\n See also:\n - :func:`jax.numpy.split`: split an array along any axis.\n - :func:`jax.numpy.vsplit`: split vertically, i.e. along axis=0\n - :func:`jax.numpy.hsplit`: split horizontally, i.e. along axis=1\n - :func:`jax.numpy.array_split`: like ``split``, but allows ``indices_or_sections``\n to be an integer that does not evenly divide the size of the array.\n """"""\n return _split(""dsplit"", ary, indices_or_sections, axis=2)\n\n\n@export\ndef array_split(ary: ArrayLike, indices_or_sections: int | Sequence[int] | ArrayLike,\n axis: int = 0) -> list[Array]:\n """"""Split an array into sub-arrays.\n\n JAX implementation of :func:`numpy.array_split`.\n\n Refer to the documentation of :func:`jax.numpy.split` for details; ``array_split``\n is equivalent to ``split``, but allows integer ``indices_or_sections`` which does\n not evenly divide the split axis.\n\n Examples:\n >>> x = jnp.array([1, 2, 3, 4, 5, 6, 7, 8, 9])\n >>> chunks = jnp.array_split(x, 4)\n >>> print(*chunks)\n [1 2 3] [4 5] [6 7] [8 9]\n\n See also:\n - :func:`jax.numpy.split`: split an array along any axis.\n - :func:`jax.numpy.vsplit`: split vertically, i.e. along axis=0\n - :func:`jax.numpy.hsplit`: split horizontally, i.e. along axis=1\n - :func:`jax.numpy.dsplit`: split depth-wise, i.e. along axis=2\n """"""\n return _split(""array_split"", ary, indices_or_sections, axis=axis)\n\n\n@export\n@jit\ndef clip(\n arr: ArrayLike | None = None,\n /,\n min: ArrayLike | None = None,\n max: ArrayLike | None = None,\n *,\n a: ArrayLike | DeprecatedArg = DeprecatedArg(),\n a_min: ArrayLike | None | DeprecatedArg = DeprecatedArg(),\n a_max: ArrayLike | None | DeprecatedArg = DeprecatedArg()\n) -> Array:\n """"""Clip array values to a specified range.\n\n JAX implementation of :func:`numpy.clip`.\n\n Args:\n arr: N-dimensional array to be clipped.\n min: optional minimum value of the clipped range; if ``None`` (default) then\n result will not be clipped to any minimum value. If specified, it should be\n broadcast-compatible with ``arr`` and ``max``.\n max: optional maximum value of the clipped range; if ``None`` (default) then\n result will not be clipped to any maximum value. If specified, it should be\n broadcast-compatible with ``arr`` and ``min``.\n a: deprecated alias of the ``arr`` argument. Will result in a\n :class:`DeprecationWarning` if used.\n a_min: deprecated alias of the ``min`` argument. Will result in a\n :class:`DeprecationWarning` if used.\n a_max: deprecated alias of the ``max`` argument. Will result in a\n :class:`DeprecationWarning` if used.\n\n Returns:\n An array containing values from ``arr``, with values smaller than ``min`` set\n to ``min``, and values larger than ``max`` set to ``max``.\n Wherever ``min`` is larger than ``max``, the value of ``max`` is returned.\n\n See also:\n - :func:`jax.numpy.minimum`: Compute the element-wise minimum value of two arrays.\n - :func:`jax.numpy.maximum`: Compute the element-wise maximum value of two arrays.\n\n Examples:\n >>> arr = jnp.array([0, 1, 2, 3, 4, 5, 6, 7])\n >>> jnp.clip(arr, 2, 5)\n Array([2, 2, 2, 3, 4, 5, 5, 5], dtype=int32)\n """"""\n # TODO(micky774): deprecated 2024-4-2, remove after deprecation expires.\n arr = a if not isinstance(a, DeprecatedArg) else arr\n if arr is None:\n raise ValueError(""No input was provided to the clip function."")\n min = a_min if not isinstance(a_min, DeprecatedArg) else min\n max = a_max if not isinstance(a_max, DeprecatedArg) else max\n if any(not isinstance(t, DeprecatedArg) for t in (a, a_min, a_max)):\n deprecations.warn(\n ""jax-numpy-clip-args"",\n (""Passing arguments 'a', 'a_min' or 'a_max' to jax.numpy.clip is ""\n ""deprecated. Please use 'arr', 'min' or 'max' respectively instead.""),\n stacklevel=2,\n )\n\n util.check_arraylike(""clip"", arr)\n if any(iscomplexobj(t) for t in (arr, min, max)):\n raise ValueError(\n ""Clip received a complex value either through the input or the min/max ""\n ""keywords. Complex values have no ordering and cannot be clipped. ""\n ""Please convert to a real value or array by taking the real or ""\n ""imaginary components via jax.numpy.real/imag respectively."")\n if min is not None:\n arr = ufuncs.maximum(min, arr)\n if max is not None:\n arr = ufuncs.minimum(max, arr) # type: ignore\n return asarray(arr)\n\n\n@export\n@partial(jit, static_argnames=('decimals',))\ndef round(a: ArrayLike, decimals: int = 0, out: None = None) -> Array:\n """"""Round input evenly to the given number of decimals.\n\n JAX implementation of :func:`numpy.round`.\n\n Args:\n a: input array or scalar.\n decimals: int, default=0. Number of decimal points to which the input needs\n to be rounded. It must be specified statically. Not implemented for\n ``decimals < 0``.\n out: Unused by JAX.\n\n Returns:\n An array containing the rounded values to the specified ``decimals`` with\n same shape and dtype as ``a``.\n\n Note:\n ``jnp.round`` rounds to the nearest even integer for the values exactly halfway\n between rounded decimal values.\n\n See also:\n - :func:`jax.numpy.floor`: Rounds the input to the nearest integer downwards.\n - :func:`jax.numpy.ceil`: Rounds the input to the nearest integer upwards.\n - :func:`jax.numpy.fix` and :func:numpy.trunc`: Rounds the input to the\n nearest integer towards zero.\n\n Examples:\n >>> x = jnp.array([1.532, 3.267, 6.149])\n >>> jnp.round(x)\n Array([2., 3., 6.], dtype=float32)\n >>> jnp.round(x, decimals=2)\n Array([1.53, 3.27, 6.15], dtype=float32)\n\n For values exactly halfway between rounded values:\n\n >>> x1 = jnp.array([10.5, 21.5, 12.5, 31.5])\n >>> jnp.round(x1)\n Array([10., 22., 12., 32.], dtype=float32)\n """"""\n a = util.ensure_arraylike(""round"", a)\n decimals = core.concrete_or_error(operator.index, decimals, ""'decimals' argument of jnp.round"")\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.round is not supported."")\n dtype = _dtype(a)\n if issubdtype(dtype, np.integer):\n if decimals < 0:\n raise NotImplementedError(\n ""integer np.round not implemented for decimals < 0"")\n return a # no-op on integer types\n\n def _round_float(x: ArrayLike) -> Array:\n if decimals == 0:\n return lax.round(x, lax.RoundingMethod.TO_NEAREST_EVEN)\n\n # TODO(phawkins): the strategy of rescaling the value isn't necessarily a\n # good one since we may be left with an incorrectly rounded value at the\n # end due to precision problems. As a workaround for float16, convert to\n # float32,\n x = lax.convert_element_type(x, np.float32) if dtype == np.float16 else x\n factor = _lax_const(x, 10 ** decimals)\n out = lax.div(lax.round(lax.mul(x, factor),\n lax.RoundingMethod.TO_NEAREST_EVEN), factor)\n return lax.convert_element_type(out, dtype) if dtype == np.float16 else out\n\n if issubdtype(dtype, np.complexfloating):\n return lax.complex(_round_float(lax.real(a)), _round_float(lax.imag(a)))\n else:\n return _round_float(a)\n\n\n@export\n@partial(jit, static_argnames=('decimals',))\ndef around(a: ArrayLike, decimals: int = 0, out: None = None) -> Array:\n """"""Alias of :func:`jax.numpy.round`""""""\n return round(a, decimals, out)\n\n\n@export\n@jit\ndef fix(x: ArrayLike, out: None = None) -> Array:\n """"""Round input to the nearest integer towards zero.\n\n JAX implementation of :func:`numpy.fix`.\n\n Args:\n x: input array.\n out: unused by JAX.\n\n Returns:\n An array with same shape and dtype as ``x`` containing the rounded values.\n\n See also:\n - :func:`jax.numpy.trunc`: Rounds the input to nearest integer towards zero.\n - :func:`jax.numpy.ceil`: Rounds the input up to the nearest integer.\n - :func:`jax.numpy.floor`: Rounds the input down to the nearest integer.\n\n Examples:\n >>> key = jax.random.key(0)\n >>> x = jax.random.uniform(key, (3, 3), minval=-5, maxval=5)\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(x)\n [[ 4.48 4.79 -1.68]\n [-0.31 0.7 -3.34]\n [-1.9 1.89 2.47]]\n >>> jnp.fix(x)\n Array([[ 4., 4., -1.],\n [-0., 0., -3.],\n [-1., 1., 2.]], dtype=float32)\n """"""\n x = util.ensure_arraylike(""fix"", x)\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.fix is not supported."")\n zero = _lax_const(x, 0)\n return where(lax.ge(x, zero), ufuncs.floor(x), ufuncs.ceil(x))\n\n\n@export\n@jit\ndef nan_to_num(x: ArrayLike, copy: bool = True, nan: ArrayLike = 0.0,\n posinf: ArrayLike | None = None,\n neginf: ArrayLike | None = None) -> Array:\n """"""Replace NaN and infinite entries in an array.\n\n JAX implementation of :func:`numpy.nan_to_num`.\n\n Args:\n x: array of values to be replaced. If it does not have an inexact\n dtype it will be returned unmodified.\n copy: unused by JAX\n nan: value to substitute for NaN entries. Defaults to 0.0.\n posinf: value to substitute for positive infinite entries.\n Defaults to the maximum representable value.\n neginf: value to substitute for positive infinite entries.\n Defaults to the minimum representable value.\n\n Returns:\n A copy of ``x`` with the requested substitutions.\n\n See also:\n - :func:`jax.numpy.isnan`: return True where the array contains NaN\n - :func:`jax.numpy.isposinf`: return True where the array contains +inf\n - :func:`jax.numpy.isneginf`: return True where the array contains -inf\n\n Examples:\n >>> x = jnp.array([0, jnp.nan, 1, jnp.inf, 2, -jnp.inf])\n\n Default substitution values:\n\n >>> jnp.nan_to_num(x)\n Array([ 0.0000000e+00, 0.0000000e+00, 1.0000000e+00, 3.4028235e+38,\n 2.0000000e+00, -3.4028235e+38], dtype=float32)\n\n Overriding substitutions for ``-inf`` and ``+inf``:\n\n >>> jnp.nan_to_num(x, posinf=999, neginf=-999)\n Array([ 0., 0., 1., 999., 2., -999.], dtype=float32)\n\n If you only wish to substitute for NaN values while leaving ``inf`` values\n untouched, using :func:`~jax.numpy.where` with :func:`jax.numpy.isnan` is\n a better option:\n\n >>> jnp.where(jnp.isnan(x), 0, x)\n Array([ 0., 0., 1., inf, 2., -inf], dtype=float32)\n """"""\n del copy\n x = util.ensure_arraylike(""nan_to_num"", x)\n dtype = _dtype(x)\n if not issubdtype(dtype, np.inexact):\n return x\n if issubdtype(dtype, np.complexfloating):\n return lax.complex(\n nan_to_num(lax.real(x), nan=nan, posinf=posinf, neginf=neginf),\n nan_to_num(lax.imag(x), nan=nan, posinf=posinf, neginf=neginf))\n info = finfo(dtypes.canonicalize_dtype(dtype))\n posinf = info.max if posinf is None else posinf\n neginf = info.min if neginf is None else neginf\n out = where(ufuncs.isnan(x), asarray(nan, dtype=dtype), x)\n out = where(ufuncs.isposinf(out), asarray(posinf, dtype=dtype), out)\n out = where(ufuncs.isneginf(out), asarray(neginf, dtype=dtype), out)\n return out\n\n\n@export\n@partial(jit, static_argnames=('equal_nan',))\ndef allclose(a: ArrayLike, b: ArrayLike, rtol: ArrayLike = 1e-05,\n atol: ArrayLike = 1e-08, equal_nan: bool = False) -> Array:\n r""""""Check if two arrays are element-wise approximately equal within a tolerance.\n\n JAX implementation of :func:`numpy.allclose`.\n\n Essentially this function evaluates the following condition:\n\n .. math::\n\n |a - b| \le \mathtt{atol} + \mathtt{rtol} * |b|\n\n ``jnp.inf`` in ``a`` will be considered equal to ``jnp.inf`` in ``b``.\n\n Args:\n a: first input array to compare.\n b: second input array to compare.\n rtol: relative tolerance used for approximate equality. Default = 1e-05.\n atol: absolute tolerance used for approximate equality. Default = 1e-08.\n equal_nan: Boolean. If ``True``, NaNs in ``a`` will be considered\n equal to NaNs in ``b``. Default is ``False``.\n\n Returns:\n Boolean scalar array indicating whether the input arrays are element-wise\n approximately equal within the specified tolerances.\n\n See Also:\n - :func:`jax.numpy.isclose`\n - :func:`jax.numpy.equal`\n\n Examples:\n >>> jnp.allclose(jnp.array([1e6, 2e6, 3e6]), jnp.array([1e6, 2e6, 3e7]))\n Array(False, dtype=bool)\n >>> jnp.allclose(jnp.array([1e6, 2e6, 3e6]),\n ... jnp.array([1.00008e6, 2.00008e7, 3.00008e8]), rtol=1e3)\n Array(True, dtype=bool)\n >>> jnp.allclose(jnp.array([1e6, 2e6, 3e6]),\n ... jnp.array([1.00001e6, 2.00002e6, 3.00009e6]), atol=1e3)\n Array(True, dtype=bool)\n >>> jnp.allclose(jnp.array([jnp.nan, 1, 2]),\n ... jnp.array([jnp.nan, 1, 2]), equal_nan=True)\n Array(True, dtype=bool)\n """"""\n util.check_arraylike(""allclose"", a, b)\n return reductions.all(isclose(a, b, rtol, atol, equal_nan))\n\n\n@export\ndef nonzero(a: ArrayLike, *, size: int | None = None,\n fill_value: None | ArrayLike | tuple[ArrayLike, ...] = None\n ) -> tuple[Array, ...]:\n """"""Return indices of nonzero elements of an array.\n\n JAX implementation of :func:`numpy.nonzero`.\n\n Because the size of the output of ``nonzero`` is data-dependent, the function\n is not compatible with JIT and other transformations. The JAX version adds\n the optional ``size`` argument which must be specified statically for\n ``jnp.nonzero`` to be used within JAX's transformations.\n\n Args:\n a: N-dimensional array.\n size: optional static integer specifying the number of nonzero entries to\n return. If there are more nonzero elements than the specified ``size``,\n then indices will be truncated at the end. If there are fewer nonzero\n elements than the specified size, then indices will be padded with\n ``fill_value``, which defaults to zero.\n fill_value: optional padding value when ``size`` is specified. Defaults to 0.\n\n Returns:\n Tuple of JAX Arrays of length ``a.ndim``, containing the indices of each\n nonzero value.\n\n See also:\n - :func:`jax.numpy.flatnonzero`\n - :func:`jax.numpy.where`\n\n Examples:\n\n One-dimensional array returns a length-1 tuple of indices:\n\n >>> x = jnp.array([0, 5, 0, 6, 0, 7])\n >>> jnp.nonzero(x)\n (Array([1, 3, 5], dtype=int32),)\n\n Two-dimensional array returns a length-2 tuple of indices:\n\n >>> x = jnp.array([[0, 5, 0],\n ... [6, 0, 7]])\n >>> jnp.nonzero(x)\n (Array([0, 1, 1], dtype=int32), Array([1, 0, 2], dtype=int32))\n\n In either case, the resulting tuple of indices can be used directly to extract\n the nonzero values:\n\n >>> indices = jnp.nonzero(x)\n >>> x[indices]\n Array([5, 6, 7], dtype=int32)\n\n The output of ``nonzero`` has a dynamic shape, because the number of returned\n indices depends on the contents of the input array. As such, it is incompatible\n with JIT and other JAX transformations:\n\n >>> x = jnp.array([0, 5, 0, 6, 0, 7])\n >>> jax.jit(jnp.nonzero)(x) # doctest: +IGNORE_EXCEPTION_DETAIL\n Traceback (most recent call last):\n ...\n ConcretizationTypeError: Abstract tracer value encountered where concrete value is expected: traced array with shape int32[].\n The size argument of jnp.nonzero must be statically specified to use jnp.nonzero within JAX transformations.\n\n This can be addressed by passing a static ``size`` parameter to specify the\n desired output shape:\n\n >>> nonzero_jit = jax.jit(jnp.nonzero, static_argnames='size')\n >>> nonzero_jit(x, size=3)\n (Array([1, 3, 5], dtype=int32),)\n\n If ``size`` does not match the true size, the result will be either truncated or padded:\n\n >>> nonzero_jit(x, size=2) # size < 3: indices are truncated\n (Array([1, 3], dtype=int32),)\n >>> nonzero_jit(x, size=5) # size > 3: indices are padded with zeros.\n (Array([1, 3, 5, 0, 0], dtype=int32),)\n\n You can specify a custom fill value for the padding using the ``fill_value`` argument:\n\n >>> nonzero_jit(x, size=5, fill_value=len(x))\n (Array([1, 3, 5, 6, 6], dtype=int32),)\n """"""\n arr = util.ensure_arraylike(""nonzero"", a)\n del a\n if np.ndim(arr) == 0:\n raise ValueError(""Calling nonzero on 0d arrays is not allowed. ""\n ""Use jnp.atleast_1d(scalar).nonzero() instead."")\n mask = arr if arr.dtype == bool else (arr != 0)\n calculated_size_ = mask.sum() if size is None else size\n calculated_size: int = core.concrete_dim_or_error(calculated_size_,\n ""The size argument of jnp.nonzero must be statically specified ""\n ""to use jnp.nonzero within JAX transformations."")\n if arr.size == 0 or calculated_size == 0:\n return tuple(zeros(calculated_size, int) for dim in arr.shape)\n flat_indices = reductions.cumsum(\n bincount(reductions.cumsum(mask), length=calculated_size))\n strides: np.ndarray = (np.cumprod(arr.shape[::-1])[::-1] // arr.shape).astype(flat_indices.dtype)\n out = tuple((flat_indices // stride) % size for stride, size in zip(strides, arr.shape))\n if fill_value is not None:\n fill_value_tup = fill_value if isinstance(fill_value, tuple) else arr.ndim * (fill_value,)\n if any(np.shape(val) != () for val in fill_value_tup):\n raise ValueError(f""fill_value must be a scalar or a tuple of length {arr.ndim}; got {fill_value}"")\n fill_mask = arange(calculated_size) >= mask.sum()\n out = tuple(where(fill_mask, fval, entry) for fval, entry in safe_zip(fill_value_tup, out))\n return out\n\n\n@export\ndef flatnonzero(a: ArrayLike, *, size: int | None = None,\n fill_value: None | ArrayLike | tuple[ArrayLike, ...] = None) -> Array:\n """"""Return indices of nonzero elements in a flattened array\n\n JAX implementation of :func:`numpy.flatnonzero`.\n\n ``jnp.flatnonzero(x)`` is equivalent to ``nonzero(ravel(a))[0]``. For a full\n discussion of the parameters to this function, refer to :func:`jax.numpy.nonzero`.\n\n Args:\n a: N-dimensional array.\n size: optional static integer specifying the number of nonzero entries to\n return. See :func:`jax.numpy.nonzero` for more discussion of this parameter.\n fill_value: optional padding value when ``size`` is specified. Defaults to 0.\n See :func:`jax.numpy.nonzero` for more discussion of this parameter.\n\n Returns:\n Array containing the indices of each nonzero value in the flattened array.\n\n See Also:\n - :func:`jax.numpy.nonzero`\n - :func:`jax.numpy.where`\n\n Examples:\n >>> x = jnp.array([[0, 5, 0],\n ... [6, 0, 8]])\n >>> jnp.flatnonzero(x)\n Array([1, 3, 5], dtype=int32)\n\n This is equivalent to calling :func:`~jax.numpy.nonzero` on the flattened\n array, and extracting the first entry in the resulting tuple:\n\n >>> jnp.nonzero(x.ravel())[0]\n Array([1, 3, 5], dtype=int32)\n\n The returned indices can be used to extract nonzero entries from the\n flattened array:\n\n >>> indices = jnp.flatnonzero(x)\n >>> x.ravel()[indices]\n Array([5, 6, 8], dtype=int32)\n """"""\n return nonzero(ravel(a), size=size, fill_value=fill_value)[0]\n\n\n@export\n@partial(jit, static_argnames=('axis',))\ndef unwrap(p: ArrayLike, discont: ArrayLike | None = None,\n axis: int = -1, period: ArrayLike = 2 * np.pi) -> Array:\n """"""Unwrap a periodic signal.\n\n JAX implementation of :func:`numpy.unwrap`.\n\n Args:\n p: input array\n discont: the maximum allowable discontinuity in the sequence. The\n default is ``period / 2``\n axis: the axis along which to unwrap; defaults to -1\n period: the period of the signal, which defaults to :math:`2\\pi`\n\n Returns:\n An unwrapped copy of ``p``.\n\n Examples:\n Consider a situation in which you are making measurements of the position of\n a rotating disk via the ``x`` and ``y`` locations of some point on that disk.\n The underlying variable is an always-increating angle which we'll generate\n this way, using degrees for ease of representation:\n\n >>> rng = np.random.default_rng(0)\n >>> theta = rng.integers(0, 90, size=(20,)).cumsum()\n >>> theta\n array([ 76, 133, 179, 203, 230, 233, 239, 240, 255, 328, 386, 468, 513,\n 567, 654, 719, 775, 823, 873, 957])\n\n Our observations of this angle are the ``x`` and ``y`` coordinates, given by\n the sine and cosine of this underlying angle:\n\n >>> x, y = jnp.sin(jnp.deg2rad(theta)), jnp.cos(jnp.deg2rad(theta))\n\n Now, say that given these ``x`` and ``y`` coordinates, we wish to recover\n the original angle ``theta``. We might do this via the :func:`atan2` function:\n\n >>> theta_out = jnp.rad2deg(jnp.atan2(x, y)).round()\n >>> theta_out\n Array([ 76., 133., 179., -157., -130., -127., -121., -120., -105.,\n -32., 26., 108., 153., -153., -66., -1., 55., 103.,\n 153., -123.], dtype=float32)\n\n The first few values match the input angle ``theta`` above, but after this the\n values are wrapped because the ``sin`` and ``cos`` observations obscure the phase\n information. The purpose of the :func:`unwrap` function is to recover the original\n signal from this wrapped view of it:\n\n >>> jnp.unwrap(theta_out, period=360)\n Array([ 76., 133., 179., 203., 230., 233., 239., 240., 255., 328., 386.,\n 468., 513., 567., 654., 719., 775., 823., 873., 957.], dtype=float32)\n\n It does this by assuming that the true underlying sequence does not differ by more than\n ``discont`` (which defaults to ``period / 2``) within a single step, and when it encounters\n a larger discontinuity it adds factors of the period to the data. For periodic signals\n that satisfy this assumption, :func:`unwrap` can recover the original phased signal.\n """"""\n p = util.ensure_arraylike(""unwrap"", p)\n if issubdtype(p.dtype, np.complexfloating):\n raise ValueError(""jnp.unwrap does not support complex inputs."")\n if p.shape[axis] == 0:\n return util.promote_dtypes_inexact(p)[0]\n if discont is None:\n discont = period / 2\n interval = period / 2\n dd = diff(p, axis=axis)\n ddmod = ufuncs.mod(dd + interval, period) - interval\n ddmod = where((ddmod == -interval) & (dd > 0), interval, ddmod)\n\n ph_correct = where(ufuncs.abs(dd) < discont, 0, ddmod - dd)\n\n up = concatenate((\n lax.slice_in_dim(p, 0, 1, axis=axis),\n lax.slice_in_dim(p, 1, None, axis=axis) + reductions.cumsum(ph_correct, axis=axis)\n ), axis=axis)\n\n return up\n\n\n### Padding\n\nPadValueLike = Union[T, Sequence[T], Sequence[Sequence[T]]]\nPadValue = tuple[tuple[T, T], ...]\n\nclass PadStatFunc(Protocol):\n def __call__(self, array: ArrayLike, /, *,\n axis: int | None = None,\n keepdims: bool = False) -> Array: ...\n\n\ndef _broadcast_to_pairs(nvals: PadValueLike, nd: int, name: str) -> PadValue:\n try:\n nvals = np.asarray(tree_map(\n lambda x: core.concrete_or_error(None, x, context=f""{name} argument of jnp.pad""),\n nvals))\n except ValueError as e:\n # In numpy 1.24\n if ""array has an inhomogeneous shape"" in str(e):\n raise TypeError(f'`{name}` entries must be the same shape: {nvals}') from e\n raise\n\n def as_scalar_dim(v):\n if core.is_dim(v) or not np.shape(v):\n return v\n else:\n raise TypeError(f'`{name}` entries must be the same shape: {nvals}')\n\n if nvals.shape == (nd, 2):\n # ((before_1, after_1), ..., (before_N, after_N))\n return tuple((as_scalar_dim(nval[0]), as_scalar_dim(nval[1])) for nval in nvals)\n elif nvals.shape == (1, 2):\n # ((before, after),)\n v1_2 = as_scalar_dim(nvals[0, 0]), as_scalar_dim(nvals[0, 1])\n return tuple(v1_2 for i in range(nd))\n elif nvals.shape == (2,):\n # (before, after) (not in the numpy docstring but works anyway)\n v1_2 = as_scalar_dim(nvals[0]), as_scalar_dim(nvals[1])\n return tuple(v1_2 for i in range(nd))\n elif nvals.shape == (1,):\n # (pad,)\n v = as_scalar_dim(nvals[0])\n return tuple((v, v) for i in range(nd))\n elif nvals.shape == ():\n # pad\n v = as_scalar_dim(nvals.flat[0])\n return tuple((v, v) for i in range(nd))\n else:\n raise ValueError(f""jnp.pad: {name} with {nd=} has unsupported shape {nvals.shape}. ""\n f""Valid shapes are ({nd}, 2), (1, 2), (2,), (1,), or ()."")\n\n\ndef _check_no_padding(axis_padding: tuple[Any, Any], mode: str):\n if (axis_padding[0] > 0 or axis_padding[1] > 0):\n msg = ""Cannot apply '{}' padding to empty axis""\n raise ValueError(msg.format(mode))\n\n\ndef _pad_constant(array: Array, pad_width: PadValue[int], constant_values: Array) -> Array:\n nd = np.ndim(array)\n constant_values = lax_internal._convert_element_type(\n constant_values, array.dtype, dtypes.is_weakly_typed(array))\n constant_values_nd = np.ndim(constant_values)\n\n if constant_values_nd == 0:\n widths = [(low, high, 0) for (low, high) in pad_width]\n return lax.pad(array, constant_values, widths)\n\n if constant_values_nd == 1:\n if constant_values.shape[-1] == 1:\n widths = [(low, high, 0) for (low, high) in pad_width]\n return lax.pad(array, squeeze(constant_values), widths)\n elif constant_values.shape[-1] != 2:\n raise ValueError(""jnp.pad: constant_values has unsupported shape ""\n f""{constant_values.shape}. If the shape is 1D or 2D, the ""\n ""last dimension must be of size 1 or 2."")\n\n constant_values = broadcast_to(constant_values, (nd, 2))\n for i in range(nd):\n widths = [(0, 0, 0)] * nd\n if pad_width[i][0] != 0:\n widths[i] = (pad_width[i][0], 0, 0)\n array = lax.pad(array, constant_values[i, 0], widths)\n if pad_width[i][1] != 0:\n widths[i] = (0, pad_width[i][1], 0)\n array = lax.pad(array, constant_values[i, 1], widths)\n return array\n\n\ndef _pad_wrap(array: Array, pad_width: PadValue[int]) -> Array:\n for i in range(np.ndim(array)):\n if array.shape[i] == 0:\n _check_no_padding(pad_width[i], ""wrap"")\n continue\n size = array.shape[i]\n left_repeats, left_remainder = divmod(pad_width[i][0], size)\n right_repeats, right_remainder = divmod(pad_width[i][1], size)\n total_repeats = left_repeats + right_repeats + 1\n parts = []\n if left_remainder > 0:\n parts += [lax.slice_in_dim(array, size - left_remainder, size, axis=i)]\n parts += total_repeats * [array]\n if right_remainder > 0:\n parts += [lax.slice_in_dim(array, 0, right_remainder, axis=i)]\n array = lax.concatenate(parts, dimension=i)\n return array\n\n\ndef _pad_symmetric_or_reflect(array: Array, pad_width: PadValue[int],\n mode: str, reflect_type: str) -> Array:\n assert mode in (""symmetric"", ""reflect"")\n assert reflect_type in (""even"", ""odd"")\n\n for i in range(np.ndim(array)):\n if array.shape[i] == 0:\n _check_no_padding(pad_width[i], mode)\n continue\n\n axis_size = array.shape[i]\n\n def build_padding(array, padding, before):\n if before:\n edge = lax.slice_in_dim(array, 0, 1, axis=i)\n else:\n edge = lax.slice_in_dim(array, -1, None, axis=i)\n\n # Try to give nicer error messages for unsupported shape polymorphic uses\n shape_poly_error_msg = lambda: (\n ""Shape polymorphism is supported for jnp.pad with 'reflect' or ""\n ""'symmetric' padding mode only when it is possible to determine ""\n f""at lowering time that the axis size (= {axis_size}) is larger than 1 ""\n f""and larger or equal than the padding length (= {padding}). ""\n f""Error while handling {'left' if before else 'right'} padding on axis {i}."")\n try:\n # We check that we can determine all comparisons.\n offset = 1 if (mode == ""reflect"" and axis_size > 1) else 0\n has_poly_dim = not core.is_constant_shape((axis_size, padding))\n # For shape polymorphism, ensure the loop below ends after 1 iteration\n if has_poly_dim and not (axis_size > 1 and axis_size - offset >= padding):\n raise ValueError(shape_poly_error_msg())\n except core.InconclusiveDimensionOperation as e:\n raise ValueError(shape_poly_error_msg()) from e\n\n while padding > 0:\n curr_pad = min(padding, axis_size - offset)\n padding -= curr_pad\n if has_poly_dim: assert padding == 0\n\n if before:\n start = offset\n stop = offset + curr_pad\n else:\n start = -(curr_pad + offset)\n stop = None if (mode == ""symmetric"" or axis_size == 1) else -1\n\n x = lax.slice_in_dim(array, start, stop, axis=i)\n x = flip(x, axis=i)\n\n if reflect_type == 'odd':\n x = 2 * edge - x\n if axis_size > 1:\n if before:\n edge = lax.slice_in_dim(x, 0, 1, axis=i)\n else:\n edge = lax.slice_in_dim(x, -1, None, axis=i)\n\n if before:\n array = lax.concatenate([x, array], dimension=i)\n else:\n array = lax.concatenate([array, x], dimension=i)\n return array\n\n array = build_padding(array, pad_width[i][0], before=True)\n array = build_padding(array, pad_width[i][1], before=False)\n return array\n\n\ndef _pad_edge(array: Array, pad_width: PadValue[int]) -> Array:\n nd = np.ndim(array)\n for i in range(nd):\n if array.shape[i] == 0:\n _check_no_padding(pad_width[i], ""edge"")\n continue\n\n n = array.shape[i]\n npad_before, npad_after = pad_width[i]\n\n edge_before = lax.slice_in_dim(array, 0, 1, axis=i)\n pad_before = repeat(edge_before, npad_before, axis=i)\n\n edge_after = lax.slice_in_dim(array, n-1, n, axis=i)\n pad_after = repeat(edge_after, npad_after, axis=i)\n\n array = lax.concatenate([pad_before, array, pad_after], dimension=i)\n return array\n\n\ndef _pad_linear_ramp(array: Array, pad_width: PadValue[int],\n end_values: PadValue[ArrayLike]) -> Array:\n for axis in range(np.ndim(array)):\n edge_before = lax.slice_in_dim(array, 0, 1, axis=axis)\n edge_after = lax.slice_in_dim(array, -1, None, axis=axis)\n ramp_before = linspace(\n start=end_values[axis][0],\n stop=edge_before.squeeze(axis), # Dimension is replaced by linspace\n num=pad_width[axis][0],\n endpoint=False,\n dtype=array.dtype,\n axis=axis\n )\n ramp_before = lax_internal._convert_element_type(\n ramp_before, weak_type=dtypes.is_weakly_typed(array))\n ramp_after = linspace(\n start=end_values[axis][1],\n stop=edge_after.squeeze(axis), # Dimension is replaced by linspace\n num=pad_width[axis][1],\n endpoint=False,\n dtype=array.dtype,\n axis=axis\n )\n ramp_after = lax_internal._convert_element_type(\n ramp_after, weak_type=dtypes.is_weakly_typed(array))\n\n # Reverse linear space in appropriate dimension\n ramp_after = flip(ramp_after, axis)\n\n array = lax.concatenate([ramp_before, array, ramp_after], dimension=axis)\n return array\n\n\ndef _pad_stats(array: Array, pad_width: PadValue[int],\n stat_length: PadValue[int] | None,\n stat_func: PadStatFunc) -> Array:\n nd = np.ndim(array)\n for i in range(nd):\n if stat_length is None:\n stat_before = stat_func(array, axis=i, keepdims=True)\n stat_after = stat_before\n else:\n array_length = array.shape[i]\n length_before, length_after = stat_length[i]\n if length_before == 0 or length_after == 0:\n raise ValueError(""stat_length of 0 yields no value for padding"")\n\n # Limit stat_length to length of array.\n length_before = min(length_before, array_length)\n length_after = min(length_after, array_length)\n\n slice_before = lax.slice_in_dim(array, 0, length_before, axis=i)\n slice_after = lax.slice_in_dim(array, -length_after, None, axis=i)\n stat_before = stat_func(slice_before, axis=i, keepdims=True)\n stat_after = stat_func(slice_after, axis=i, keepdims=True)\n\n if np.issubdtype(array.dtype, np.integer):\n stat_before = round(stat_before)\n stat_after = round(stat_after)\n\n stat_before = lax_internal._convert_element_type(\n stat_before, array.dtype, dtypes.is_weakly_typed(array))\n stat_after = lax_internal._convert_element_type(\n stat_after, array.dtype, dtypes.is_weakly_typed(array))\n\n npad_before, npad_after = pad_width[i]\n pad_before = repeat(stat_before, npad_before, axis=i)\n pad_after = repeat(stat_after, npad_after, axis=i)\n\n array = lax.concatenate([pad_before, array, pad_after], dimension=i)\n return array\n\n\ndef _pad_empty(array: Array, pad_width: PadValue[int]) -> Array:\n # Note: jax.numpy.empty = jax.numpy.zeros\n for i in range(np.ndim(array)):\n shape_before = array.shape[:i] + (pad_width[i][0],) + array.shape[i + 1:]\n pad_before = empty_like(array, shape=shape_before)\n\n shape_after = array.shape[:i] + (pad_width[i][1],) + array.shape[i + 1:]\n pad_after = empty_like(array, shape=shape_after)\n array = lax.concatenate([pad_before, array, pad_after], dimension=i)\n return array\n\n\ndef _pad_func(array: Array, pad_width: PadValue[int], func: Callable[..., Any], **kwargs) -> Array:\n pad_width = _broadcast_to_pairs(pad_width, np.ndim(array), ""pad_width"")\n padded = _pad_constant(array, pad_width, asarray(0))\n for axis in range(np.ndim(padded)):\n padded = apply_along_axis(func, axis, padded, pad_width[axis], axis, kwargs)\n return padded\n\n\n@partial(jit, static_argnums=(1, 2, 4, 5, 6))\ndef _pad(array: ArrayLike, pad_width: PadValueLike[int], mode: str,\n constant_values: ArrayLike, stat_length: PadValueLike[int],\n end_values: PadValueLike[ArrayLike], reflect_type: str):\n array = asarray(array)\n nd = np.ndim(array)\n\n if nd == 0:\n return array\n\n stat_funcs: dict[str, PadStatFunc] = {\n ""maximum"": reductions.amax,\n ""minimum"": reductions.amin,\n ""mean"": reductions.mean,\n ""median"": reductions.median\n }\n\n pad_width = _broadcast_to_pairs(pad_width, nd, ""pad_width"")\n pad_width_arr = np.array(pad_width)\n if pad_width_arr.shape != (nd, 2):\n raise ValueError(f""Expected pad_width to have shape {(nd, 2)}; got {pad_width_arr.shape}."")\n\n if np.any(pad_width_arr < 0):\n raise ValueError(""index can't contain negative values"")\n\n if mode == ""constant"":\n return _pad_constant(array, pad_width, asarray(constant_values))\n\n elif mode == ""wrap"":\n return _pad_wrap(array, pad_width)\n\n elif mode in (""symmetric"", ""reflect""):\n return _pad_symmetric_or_reflect(array, pad_width, str(mode), reflect_type)\n\n elif mode == ""edge"":\n return _pad_edge(array, pad_width)\n\n elif mode == ""linear_ramp"":\n end_values = _broadcast_to_pairs(end_values, nd, ""end_values"")\n return _pad_linear_ramp(array, pad_width, end_values)\n\n elif mode in stat_funcs:\n if stat_length is not None:\n stat_length = _broadcast_to_pairs(stat_length, nd, ""stat_length"")\n return _pad_stats(array, pad_width, stat_length, stat_funcs[str(mode)])\n\n elif mode == ""empty"":\n return _pad_empty(array, pad_width)\n\n else:\n assert False, (""Should not be reached since pad already handled unsupported and""\n ""not implemented modes"")\n\n\n@export\ndef pad(array: ArrayLike, pad_width: PadValueLike[int | Array | np.ndarray],\n mode: str | Callable[..., Any] = ""constant"", **kwargs) -> Array:\n """"""Add padding to an array.\n\n JAX implementation of :func:`numpy.pad`.\n\n Args:\n array: array to pad.\n pad_width: specify the pad width for each dimension of an array. Padding widths\n may be separately specified for *before* and *after* the array. Options are:\n\n - ``int`` or ``(int,)``: pad each array dimension with the same number of values\n both before and after.\n - ``(before, after)``: pad each array with ``before`` elements before, and ``after``\n elements after\n - ``((before_1, after_1), (before_2, after_2), ... (before_N, after_N))``: specify\n distinct ``before`` and ``after`` values for each array dimension.\n\n mode: a string or callable. Supported pad modes are:\n\n - ``'constant'`` (default): pad with a constant value, which defaults to zero.\n - ``'empty'``: pad with empty values (i.e. zero)\n - ``'edge'``: pad with the edge values of the array.\n - ``'wrap'``: pad by wrapping the array.\n - ``'linear_ramp'``: pad with a linear ramp to specified ``end_values``.\n - ``'maximum'``: pad with the maximum value.\n - ``'mean'``: pad with the mean value.\n - ``'median'``: pad with the median value.\n - ``'minimum'``: pad with the minimum value.\n - ``'reflect'``: pad by reflection.\n - ``'symmetric'``: pad by symmetric reflection.\n - ````: a callable function. See Notes below.\n\n constant_values: referenced for ``mode = 'constant'``. Specify the constant value\n to pad with.\n stat_length: referenced for ``mode in ['maximum', 'mean', 'median', 'minimum']``.\n An integer or tuple specifying the number of edge values to use when calculating\n the statistic.\n end_values: referenced for ``mode = 'linear_ramp'``. Specify the end values to\n ramp the padding values to.\n reflect_type: referenced for ``mode in ['reflect', 'symmetric']``. Specify whether\n to use even or odd reflection.\n\n Returns:\n A padded copy of ``array``.\n\n Notes:\n When ``mode`` is callable, it should have the following signature::\n\n def pad_func(row: Array, pad_width: tuple[int, int],\n iaxis: int, kwargs: dict) -> Array:\n ...\n\n Here ``row`` is a 1D slice of the padded array along axis ``iaxis``, with the pad\n values filled with zeros. ``pad_width`` is a tuple specifying the ``(before, after)``\n padding sizes, and ``kwargs`` are any additional keyword arguments passed to the\n :func:`jax.numpy.pad` function.\n\n Note that while in NumPy, the function should modify ``row`` in-place, in JAX the\n function should return the modified ``row``. In JAX, the custom padding function\n will be mapped across the padded axis using the :func:`jax.vmap` transformation.\n\n See also:\n - :func:`jax.numpy.resize`: resize an array\n - :func:`jax.numpy.tile`: create a larger array by tiling a smaller array.\n - :func:`jax.numpy.repeat`: create a larger array by repeating values of a smaller array.\n\n Examples:\n\n Pad a 1-dimensional array with zeros:\n\n >>> x = jnp.array([10, 20, 30, 40])\n >>> jnp.pad(x, 2)\n Array([ 0, 0, 10, 20, 30, 40, 0, 0], dtype=int32)\n >>> jnp.pad(x, (2, 4))\n Array([ 0, 0, 10, 20, 30, 40, 0, 0, 0, 0], dtype=int32)\n\n Pad a 1-dimensional array with specified values:\n\n >>> jnp.pad(x, 2, constant_values=99)\n Array([99, 99, 10, 20, 30, 40, 99, 99], dtype=int32)\n\n Pad a 1-dimensional array with the mean array value:\n\n >>> jnp.pad(x, 2, mode='mean')\n Array([25, 25, 10, 20, 30, 40, 25, 25], dtype=int32)\n\n Pad a 1-dimensional array with reflected values:\n\n >>> jnp.pad(x, 2, mode='reflect')\n Array([30, 20, 10, 20, 30, 40, 30, 20], dtype=int32)\n\n Pad a 2-dimensional array with different paddings in each dimension:\n\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6]])\n >>> jnp.pad(x, ((1, 2), (3, 0)))\n Array([[0, 0, 0, 0, 0, 0],\n [0, 0, 0, 1, 2, 3],\n [0, 0, 0, 4, 5, 6],\n [0, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 0, 0]], dtype=int32)\n\n Pad a 1-dimensional array with a custom padding function:\n\n >>> def custom_pad(row, pad_width, iaxis, kwargs):\n ... # row represents a 1D slice of the zero-padded array.\n ... before, after = pad_width\n ... before_value = kwargs.get('before_value', 0)\n ... after_value = kwargs.get('after_value', 0)\n ... row = row.at[:before].set(before_value)\n ... return row.at[len(row) - after:].set(after_value)\n >>> x = jnp.array([2, 3, 4])\n >>> jnp.pad(x, 2, custom_pad, before_value=-10, after_value=10)\n Array([-10, -10, 2, 3, 4, 10, 10], dtype=int32)\n """"""\n\n array = util.ensure_arraylike(""pad"", array)\n pad_width = _broadcast_to_pairs(pad_width, np.ndim(array), ""pad_width"")\n if pad_width and not all(core.is_dim(p[0]) and core.is_dim(p[1])\n for p in pad_width):\n raise TypeError('`pad_width` must be of integral type.')\n\n if callable(mode):\n return _pad_func(asarray(array), pad_width, mode, **kwargs)\n\n allowed_kwargs = {\n 'empty': [], 'edge': [], 'wrap': [],\n 'constant': ['constant_values'],\n 'linear_ramp': ['end_values'],\n 'maximum': ['stat_length'],\n 'mean': ['stat_length'],\n 'median': ['stat_length'],\n 'minimum': ['stat_length'],\n 'reflect': ['reflect_type'],\n 'symmetric': ['reflect_type'],\n }\n try:\n unsupported_kwargs = set(kwargs) - set(allowed_kwargs[mode])\n except KeyError:\n msg = ""Unimplemented padding mode '{}' for np.pad.""\n raise NotImplementedError(msg.format(mode))\n if unsupported_kwargs:\n raise ValueError(""unsupported keyword arguments for mode '{}': {}""\n .format(mode, unsupported_kwargs))\n # Set default value if not given.\n constant_values = kwargs.get('constant_values', 0)\n stat_length = kwargs.get('stat_length', None)\n end_values = kwargs.get('end_values', 0)\n reflect_type = kwargs.get('reflect_type', ""even"")\n\n return _pad(array, pad_width, mode, constant_values, stat_length, end_values, reflect_type)\n\n### Array-creation functions\n\n\n@export\ndef stack(arrays: np.ndarray | Array | Sequence[ArrayLike],\n axis: int = 0, out: None = None, dtype: DTypeLike | None = None) -> Array:\n """"""Join arrays along a new axis.\n\n JAX implementation of :func:`numpy.stack`.\n\n Args:\n arrays: a sequence of arrays to stack; each must have the same shape. If a\n single array is given it will be treated equivalently to\n `arrays = unstack(arrays)`, but the implementation will avoid explicit\n unstacking.\n axis: specify the axis along which to stack.\n out: unused by JAX\n dtype: optional dtype of the resulting array. If not specified, the dtype\n will be determined via type promotion rules described in :ref:`type-promotion`.\n\n Returns:\n the stacked result.\n\n See also:\n - :func:`jax.numpy.unstack`: inverse of ``stack``.\n - :func:`jax.numpy.concatenate`: concatenation along existing axes.\n - :func:`jax.numpy.vstack`: stack vertically, i.e. along axis 0.\n - :func:`jax.numpy.hstack`: stack horizontally, i.e. along axis 1.\n - :func:`jax.numpy.dstack`: stack depth-wise, i.e. along axis 2.\n - :func:`jax.numpy.column_stack`: stack columns.\n\n Examples:\n >>> x = jnp.array([1, 2, 3])\n >>> y = jnp.array([4, 5, 6])\n >>> jnp.stack([x, y])\n Array([[1, 2, 3],\n [4, 5, 6]], dtype=int32)\n >>> jnp.stack([x, y], axis=1)\n Array([[1, 4],\n [2, 5],\n [3, 6]], dtype=int32)\n\n :func:`~jax.numpy.unstack` performs the inverse operation:\n\n >>> arr = jnp.stack([x, y], axis=1)\n >>> x, y = jnp.unstack(arr, axis=1)\n >>> x\n Array([1, 2, 3], dtype=int32)\n >>> y\n Array([4, 5, 6], dtype=int32)\n """"""\n if not len(arrays):\n raise ValueError(""Need at least one array to stack."")\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.stack is not supported."")\n if isinstance(arrays, (np.ndarray, Array)):\n axis = _canonicalize_axis(axis, arrays.ndim)\n return concatenate(expand_dims(arrays, axis + 1), axis=axis, dtype=dtype)\n else:\n arrays = util.ensure_arraylike_tuple(""stack"", arrays)\n shape0 = np.shape(arrays[0])\n axis = _canonicalize_axis(axis, len(shape0) + 1)\n new_arrays = []\n for a in arrays:\n if np.shape(a) != shape0:\n raise ValueError(""All input arrays must have the same shape."")\n new_arrays.append(expand_dims(a, axis))\n return concatenate(new_arrays, axis=axis, dtype=dtype)\n\n\n@export\n@partial(jit, static_argnames=""axis"")\ndef unstack(x: ArrayLike, /, *, axis: int = 0) -> tuple[Array, ...]:\n """"""Unstack an array along an axis.\n\n JAX implementation of :func:`array_api.unstack`.\n\n Args:\n x: array to unstack. Must have ``x.ndim >= 1``.\n axis: integer axis along which to unstack. Must satisfy\n ``-x.ndim <= axis < x.ndim``.\n\n Returns:\n tuple of unstacked arrays.\n\n See also:\n - :func:`jax.numpy.stack`: inverse of ``unstack``\n - :func:`jax.numpy.split`: split array into batches along an axis.\n\n Examples:\n >>> arr = jnp.array([[1, 2, 3],\n ... [4, 5, 6]])\n >>> arrs = jnp.unstack(arr)\n >>> print(*arrs)\n [1 2 3] [4 5 6]\n\n :func:`~jax.numpy.stack` provides the inverse of this:\n\n >>> jnp.stack(arrs)\n Array([[1, 2, 3],\n [4, 5, 6]], dtype=int32)\n """"""\n x = util.ensure_arraylike(""unstack"", x)\n if x.ndim == 0:\n raise ValueError(\n ""Unstack requires arrays with rank > 0, however a scalar array was ""\n ""passed.""\n )\n dimensions = (axis,)\n return tuple(\n lax.squeeze(t, dimensions)\n for t in lax.split(x, (1,) * x.shape[axis], axis=axis)\n )\n\n\n@export\ndef tile(A: ArrayLike, reps: DimSize | Sequence[DimSize]) -> Array:\n """"""Construct an array by repeating ``A`` along specified dimensions.\n\n JAX implementation of :func:`numpy.tile`.\n\n If ``A`` is an array of shape ``(d1, d2, ..., dn)`` and ``reps`` is a sequence of integers,\n the resulting array will have a shape of ``(reps[0] * d1, reps[1] * d2, ..., reps[n] * dn)``,\n with ``A`` tiled along each dimension.\n\n Args:\n A: input array to be repeated. Can be of any shape or dimension.\n reps: specifies the number of repetitions along each axis.\n\n Returns:\n a new array where the input array has been repeated according to ``reps``.\n\n See also:\n - :func:`jax.numpy.repeat`: Construct an array from repeated elements.\n - :func:`jax.numpy.broadcast_to`: Broadcast an array to a specified shape.\n\n Examples:\n >>> arr = jnp.array([1, 2])\n >>> jnp.tile(arr, 2)\n Array([1, 2, 1, 2], dtype=int32)\n >>> arr = jnp.array([[1, 2],\n ... [3, 4,]])\n >>> jnp.tile(arr, (2, 1))\n Array([[1, 2],\n [3, 4],\n [1, 2],\n [3, 4]], dtype=int32)\n """"""\n A = util.ensure_arraylike(""tile"", A)\n try:\n iter(reps) # type: ignore[arg-type]\n except TypeError:\n reps_tup: tuple[DimSize, ...] = (reps,)\n else:\n reps_tup = tuple(reps) # type: ignore[arg-type]\n reps_tup = tuple(operator.index(rep) if core.is_constant_dim(rep) else rep\n for rep in reps_tup)\n A_shape = (1,) * (len(reps_tup) - np.ndim(A)) + np.shape(A)\n reps_tup = (1,) * (len(A_shape) - len(reps_tup)) + reps_tup\n result = broadcast_to(reshape(A, [j for i in A_shape for j in [1, i]]),\n [k for pair in zip(reps_tup, A_shape) for k in pair])\n return reshape(result, tuple(np.multiply(A_shape, reps_tup)))\n\ndef _concatenate_array(arr: ArrayLike, axis: int | None,\n dtype: DTypeLike | None = None) -> Array:\n # Fast path for concatenation when the input is an ndarray rather than a list.\n arr = asarray(arr, dtype=dtype)\n if arr.ndim == 0 or arr.shape[0] == 0:\n raise ValueError(""Need at least one array to concatenate."")\n if axis is None:\n return lax.reshape(arr, (arr.size,))\n if arr.ndim == 1:\n raise ValueError(""Zero-dimensional arrays cannot be concatenated."")\n axis = _canonicalize_axis(axis, arr.ndim - 1)\n shape = arr.shape[1:axis + 1] + (arr.shape[0] * arr.shape[axis + 1],) + arr.shape[axis + 2:]\n dimensions = [*range(1, axis + 1), 0, *range(axis + 1, arr.ndim)]\n return lax.reshape(arr, shape, dimensions)\n\n\n@export\ndef concatenate(arrays: np.ndarray | Array | Sequence[ArrayLike],\n axis: int | None = 0, dtype: DTypeLike | None = None) -> Array:\n """"""Join arrays along an existing axis.\n\n JAX implementation of :func:`numpy.concatenate`.\n\n Args:\n arrays: a sequence of arrays to concatenate; each must have the same shape\n except along the specified axis. If a single array is given it will be\n treated equivalently to `arrays = unstack(arrays)`, but the implementation\n will avoid explicit unstacking.\n axis: specify the axis along which to concatenate.\n dtype: optional dtype of the resulting array. If not specified, the dtype\n will be determined via type promotion rules described in :ref:`type-promotion`.\n\n Returns:\n the concatenated result.\n\n See also:\n - :func:`jax.lax.concatenate`: XLA concatenation API.\n - :func:`jax.numpy.concat`: Array API version of this function.\n - :func:`jax.numpy.stack`: concatenate arrays along a new axis.\n\n Examples:\n One-dimensional concatenation:\n\n >>> x = jnp.arange(3)\n >>> y = jnp.zeros(3, dtype=int)\n >>> jnp.concatenate([x, y])\n Array([0, 1, 2, 0, 0, 0], dtype=int32)\n\n Two-dimensional concatenation:\n\n >>> x = jnp.ones((2, 3))\n >>> y = jnp.zeros((2, 1))\n >>> jnp.concatenate([x, y], axis=1)\n Array([[1., 1., 1., 0.],\n [1., 1., 1., 0.]], dtype=float32)\n """"""\n if isinstance(arrays, (np.ndarray, Array)):\n return _concatenate_array(arrays, axis, dtype=dtype)\n arrays = util.ensure_arraylike_tuple(""concatenate"", arrays)\n if not len(arrays):\n raise ValueError(""Need at least one array to concatenate."")\n if axis is None:\n return concatenate([ravel(a) for a in arrays], axis=0, dtype=dtype)\n if np.ndim(arrays[0]) == 0:\n raise ValueError(""Zero-dimensional arrays cannot be concatenated."")\n axis = _canonicalize_axis(axis, np.ndim(arrays[0]))\n if dtype is None:\n arrays_out = util.promote_dtypes(*arrays)\n else:\n arrays_out = [asarray(arr, dtype=dtype) for arr in arrays]\n # lax.concatenate can be slow to compile for wide concatenations, so form a\n # tree of concatenations as a workaround especially for op-by-op mode.\n # (https://github.com/jax-ml/jax/issues/653).\n k = 16\n while len(arrays_out) > 1:\n arrays_out = [lax.concatenate(arrays_out[i:i+k], axis)\n for i in range(0, len(arrays_out), k)]\n return arrays_out[0]\n\n\n@export\ndef concat(arrays: Sequence[ArrayLike], /, *, axis: int | None = 0) -> Array:\n """"""Join arrays along an existing axis.\n\n JAX implementation of :func:`array_api.concat`.\n\n Args:\n arrays: a sequence of arrays to concatenate; each must have the same shape\n except along the specified axis. If a single array is given it will be\n treated equivalently to `arrays = unstack(arrays)`, but the implementation\n will avoid explicit unstacking.\n axis: specify the axis along which to concatenate.\n\n Returns:\n the concatenated result.\n\n See also:\n - :func:`jax.lax.concatenate`: XLA concatenation API.\n - :func:`jax.numpy.concatenate`: NumPy version of this function.\n - :func:`jax.numpy.stack`: concatenate arrays along a new axis.\n\n Examples:\n One-dimensional concatenation:\n\n >>> x = jnp.arange(3)\n >>> y = jnp.zeros(3, dtype=int)\n >>> jnp.concat([x, y])\n Array([0, 1, 2, 0, 0, 0], dtype=int32)\n\n Two-dimensional concatenation:\n\n >>> x = jnp.ones((2, 3))\n >>> y = jnp.zeros((2, 1))\n >>> jnp.concat([x, y], axis=1)\n Array([[1., 1., 1., 0.],\n [1., 1., 1., 0.]], dtype=float32)\n """"""\n util.check_arraylike(""concat"", *arrays)\n return concatenate(arrays, axis=axis)\n\n\n@export\ndef vstack(tup: np.ndarray | Array | Sequence[ArrayLike],\n dtype: DTypeLike | None = None) -> Array:\n """"""Vertically stack arrays.\n\n JAX implementation of :func:`numpy.vstack`.\n\n For arrays of two or more dimensions, this is equivalent to\n :func:`jax.numpy.concatenate` with ``axis=0``.\n\n Args:\n tup: a sequence of arrays to stack; each must have the same shape along all\n but the first axis. If a single array is given it will be treated\n equivalently to `tup = unstack(tup)`, but the implementation will avoid\n explicit unstacking.\n dtype: optional dtype of the resulting array. If not specified, the dtype\n will be determined via type promotion rules described in :ref:`type-promotion`.\n\n Returns:\n the stacked result.\n\n See also:\n - :func:`jax.numpy.stack`: stack along arbitrary axes\n - :func:`jax.numpy.concatenate`: concatenation along existing axes.\n - :func:`jax.numpy.hstack`: stack horizontally, i.e. along axis 1.\n - :func:`jax.numpy.dstack`: stack depth-wise, i.e. along axis 2.\n\n Examples:\n Scalar values:\n\n >>> jnp.vstack([1, 2, 3])\n Array([[1],\n [2],\n [3]], dtype=int32, weak_type=True)\n\n 1D arrays:\n\n >>> x = jnp.arange(4)\n >>> y = jnp.ones(4)\n >>> jnp.vstack([x, y])\n Array([[0., 1., 2., 3.],\n [1., 1., 1., 1.]], dtype=float32)\n\n 2D arrays:\n\n >>> x = x.reshape(1, 4)\n >>> y = y.reshape(1, 4)\n >>> jnp.vstack([x, y])\n Array([[0., 1., 2., 3.],\n [1., 1., 1., 1.]], dtype=float32)\n """"""\n arrs: Array | list[Array]\n if isinstance(tup, (np.ndarray, Array)):\n arrs = api.vmap(atleast_2d)(tup)\n else:\n # TODO(jakevdp): Non-array input deprecated 2023-09-22; change to error.\n util.check_arraylike(""vstack"", *tup, emit_warning=True)\n arrs = [atleast_2d(m) for m in tup]\n return concatenate(arrs, axis=0, dtype=dtype)\n\n\n@export\ndef hstack(tup: np.ndarray | Array | Sequence[ArrayLike],\n dtype: DTypeLike | None = None) -> Array:\n """"""Horizontally stack arrays.\n\n JAX implementation of :func:`numpy.hstack`.\n\n For arrays of one or more dimensions, this is equivalent to\n :func:`jax.numpy.concatenate` with ``axis=1``.\n\n Args:\n tup: a sequence of arrays to stack; each must have the same shape along all\n but the second axis. Input arrays will be promoted to at least rank 1.\n If a single array is given it will be treated equivalently to\n `tup = unstack(tup)`, but the implementation will avoid explicit unstacking.\n dtype: optional dtype of the resulting array. If not specified, the dtype\n will be determined via type promotion rules described in :ref:`type-promotion`.\n\n Returns:\n the stacked result.\n\n See also:\n - :func:`jax.numpy.stack`: stack along arbitrary axes\n - :func:`jax.numpy.concatenate`: concatenation along existing axes.\n - :func:`jax.numpy.vstack`: stack vertically, i.e. along axis 0.\n - :func:`jax.numpy.dstack`: stack depth-wise, i.e. along axis 2.\n\n Examples:\n Scalar values:\n\n >>> jnp.hstack([1, 2, 3])\n Array([1, 2, 3], dtype=int32, weak_type=True)\n\n 1D arrays:\n\n >>> x = jnp.arange(3)\n >>> y = jnp.ones(3)\n >>> jnp.hstack([x, y])\n Array([0., 1., 2., 1., 1., 1.], dtype=float32)\n\n 2D arrays:\n\n >>> x = x.reshape(3, 1)\n >>> y = y.reshape(3, 1)\n >>> jnp.hstack([x, y])\n Array([[0., 1.],\n [1., 1.],\n [2., 1.]], dtype=float32)\n """"""\n arrs: Array | list[Array]\n if isinstance(tup, (np.ndarray, Array)):\n arrs = api.vmap(atleast_1d)(tup)\n arr0_ndim = arrs.ndim - 1\n else:\n # TODO(jakevdp): Non-array input deprecated 2023-09-22; change to error.\n util.check_arraylike(""hstack"", *tup, emit_warning=True)\n arrs = [atleast_1d(m) for m in tup]\n arr0_ndim = arrs[0].ndim\n return concatenate(arrs, axis=0 if arr0_ndim == 1 else 1, dtype=dtype)\n\n\n@export\ndef dstack(tup: np.ndarray | Array | Sequence[ArrayLike],\n dtype: DTypeLike | None = None) -> Array:\n """"""Stack arrays depth-wise.\n\n JAX implementation of :func:`numpy.dstack`.\n\n For arrays of three or more dimensions, this is equivalent to\n :func:`jax.numpy.concatenate` with ``axis=2``.\n\n Args:\n tup: a sequence of arrays to stack; each must have the same shape along all\n but the third axis. Input arrays will be promoted to at least rank 3. If a\n single array is given it will be treated equivalently to `tup = unstack(tup)`,\n but the implementation will avoid explicit unstacking.\n dtype: optional dtype of the resulting array. If not specified, the dtype\n will be determined via type promotion rules described in :ref:`type-promotion`.\n\n Returns:\n the stacked result.\n\n See also:\n - :func:`jax.numpy.stack`: stack along arbitrary axes\n - :func:`jax.numpy.concatenate`: concatenation along existing axes.\n - :func:`jax.numpy.vstack`: stack vertically, i.e. along axis 0.\n - :func:`jax.numpy.hstack`: stack horizontally, i.e. along axis 1.\n\n Examples:\n Scalar values:\n\n >>> jnp.dstack([1, 2, 3])\n Array([[[1, 2, 3]]], dtype=int32, weak_type=True)\n\n 1D arrays:\n\n >>> x = jnp.arange(3)\n >>> y = jnp.ones(3)\n >>> jnp.dstack([x, y])\n Array([[[0., 1.],\n [1., 1.],\n [2., 1.]]], dtype=float32)\n\n 2D arrays:\n\n >>> x = x.reshape(1, 3)\n >>> y = y.reshape(1, 3)\n >>> jnp.dstack([x, y])\n Array([[[0., 1.],\n [1., 1.],\n [2., 1.]]], dtype=float32)\n """"""\n arrs: Array | list[Array]\n if isinstance(tup, (np.ndarray, Array)):\n arrs = api.vmap(atleast_3d)(tup)\n else:\n # TODO(jakevdp): Non-array input deprecated 2023-09-22; change to error.\n util.check_arraylike(""dstack"", *tup, emit_warning=True)\n tup = util.ensure_arraylike_tuple(""dstack"", tup)\n arrs = [atleast_3d(m) for m in tup]\n return concatenate(arrs, axis=2, dtype=dtype)\n\n\n@export\ndef column_stack(tup: np.ndarray | Array | Sequence[ArrayLike]) -> Array:\n """"""Stack arrays column-wise.\n\n JAX implementation of :func:`numpy.column_stack`.\n\n For arrays of two or more dimensions, this is equivalent to\n :func:`jax.numpy.concatenate` with ``axis=1``.\n\n Args:\n tup: a sequence of arrays to stack; each must have the same leading dimension.\n Input arrays will be promoted to at least rank 2. If a single array is given\n it will be treated equivalently to `tup = unstack(tup)`, but the implementation\n will avoid explicit unstacking.\n dtype: optional dtype of the resulting array. If not specified, the dtype\n will be determined via type promotion rules described in :ref:`type-promotion`.\n\n Returns:\n the stacked result.\n\n See also:\n - :func:`jax.numpy.stack`: stack along arbitrary axes\n - :func:`jax.numpy.concatenate`: concatenation along existing axes.\n - :func:`jax.numpy.vstack`: stack vertically, i.e. along axis 0.\n - :func:`jax.numpy.hstack`: stack horizontally, i.e. along axis 1.\n - :func:`jax.numpy.hstack`: stack depth=wise, i.e. along axis 2.\n\n Examples:\n Scalar values:\n\n >>> jnp.column_stack([1, 2, 3])\n Array([[1, 2, 3]], dtype=int32, weak_type=True)\n\n 1D arrays:\n\n >>> x = jnp.arange(3)\n >>> y = jnp.ones(3)\n >>> jnp.column_stack([x, y])\n Array([[0., 1.],\n [1., 1.],\n [2., 1.]], dtype=float32)\n\n 2D arrays:\n\n >>> x = x.reshape(3, 1)\n >>> y = y.reshape(3, 1)\n >>> jnp.column_stack([x, y])\n Array([[0., 1.],\n [1., 1.],\n [2., 1.]], dtype=float32)\n """"""\n arrs: Array | list[Array] | np.ndarray\n if isinstance(tup, (np.ndarray, Array)):\n arrs = api.vmap(lambda x: atleast_2d(x).T)(tup) if tup.ndim < 3 else tup\n else:\n # TODO(jakevdp): Non-array input deprecated 2023-09-22; change to error.\n util.check_arraylike(""column_stack"", *tup, emit_warning=True)\n arrs = [atleast_2d(arr).T if arr.ndim < 2 else arr for arr in map(asarray, tup)]\n return concatenate(arrs, axis=1)\n\n\n@export\ndef choose(a: ArrayLike, choices: Array | np.ndarray | Sequence[ArrayLike],\n out: None = None, mode: str = 'raise') -> Array:\n """"""Construct an array by stacking slices of choice arrays.\n\n JAX implementation of :func:`numpy.choose`.\n\n The semantics of this function can be confusing, but in the simplest case where\n ``a`` is a one-dimensional array, ``choices`` is a two-dimensional array, and\n all entries of ``a`` are in-bounds (i.e. ``0 <= a_i < len(choices)``), then the\n function is equivalent to the following::\n\n def choose(a, choices):\n return jnp.array([choices[a_i, i] for i, a_i in enumerate(a)])\n\n In the more general case, ``a`` may have any number of dimensions and ``choices``\n may be an arbitrary sequence of broadcast-compatible arrays. In this case, again\n for in-bound indices, the logic is equivalent to::\n\n def choose(a, choices):\n a, *choices = jnp.broadcast_arrays(a, *choices)\n choices = jnp.array(choices)\n return jnp.array([choices[a[idx], *idx] for idx in np.ndindex(a.shape)])\n\n The only additional complexity comes from the ``mode`` argument, which controls\n the behavior for out-of-bound indices in ``a`` as described below.\n\n Args:\n a: an N-dimensional array of integer indices.\n choices: an array or sequence of arrays. All arrays in the sequence must be\n mutually broadcast compatible with ``a``.\n out: unused by JAX\n mode: specify the out-of-bounds indexing mode; one of ``'raise'`` (default),\n ``'wrap'``, or ``'clip'``. Note that the default mode of ``'raise'`` is\n not compatible with JAX transformations.\n\n Returns:\n an array containing stacked slices from ``choices`` at the indices\n specified by ``a``. The shape of the result is\n ``broadcast_shapes(a.shape, *(c.shape for c in choices))``.\n\n See also:\n - :func:`jax.lax.switch`: choose between N functions based on an index.\n\n Examples:\n Here is the simplest case of a 1D index array with a 2D choice array,\n in which case this chooses the indexed value from each column:\n\n >>> choices = jnp.array([[ 1, 2, 3, 4],\n ... [ 5, 6, 7, 8],\n ... [ 9, 10, 11, 12]])\n >>> a = jnp.array([2, 0, 1, 0])\n >>> jnp.choose(a, choices)\n Array([9, 2, 7, 4], dtype=int32)\n\n The ``mode`` argument specifies what to do with out-of-bound indices;\n options are to either ``wrap`` or ``clip``:\n\n >>> a2 = jnp.array([2, 0, 1, 4]) # last index out-of-bound\n >>> jnp.choose(a2, choices, mode='clip')\n Array([ 9, 2, 7, 12], dtype=int32)\n >>> jnp.choose(a2, choices, mode='wrap')\n Array([9, 2, 7, 8], dtype=int32)\n\n In the more general case, ``choices`` may be a sequence of array-like\n objects with any broadcast-compatible shapes.\n\n >>> choice_1 = jnp.array([1, 2, 3, 4])\n >>> choice_2 = 99\n >>> choice_3 = jnp.array([[10],\n ... [20],\n ... [30]])\n >>> a = jnp.array([[0, 1, 2, 0],\n ... [1, 2, 0, 1],\n ... [2, 0, 1, 2]])\n >>> jnp.choose(a, [choice_1, choice_2, choice_3], mode='wrap')\n Array([[ 1, 99, 10, 4],\n [99, 20, 3, 99],\n [30, 2, 99, 30]], dtype=int32)\n """"""\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.choose is not supported."")\n a, *choices = util.ensure_arraylike_tuple('choose', (a, *choices))\n if not issubdtype(_dtype(a), np.integer):\n raise ValueError(""`a` array must be integer typed"")\n N = len(choices)\n\n if mode == 'raise':\n arr: Array = core.concrete_or_error(asarray, a,\n ""The error occurred because jnp.choose was jit-compiled""\n "" with mode='raise'. Use mode='wrap' or mode='clip' instead."")\n if reductions.any((arr < 0) | (arr >= N)):\n raise ValueError(""invalid entry in choice array"")\n elif mode == 'wrap':\n arr = asarray(a) % N\n elif mode == 'clip':\n arr = clip(a, 0, N - 1)\n else:\n raise ValueError(f""mode={mode!r} not understood. Must be 'raise', 'wrap', or 'clip'"")\n\n arr, *choices = broadcast_arrays(arr, *choices)\n return array(choices)[(arr,) + indices(arr.shape, sparse=True)]\n\n\ndef _atleast_nd(x: ArrayLike, n: int) -> Array:\n m = np.ndim(x)\n return lax.broadcast(x, (1,) * (n - m)) if m < n else asarray(x)\n\ndef _block(xs: ArrayLike | list[ArrayLike]) -> tuple[Array, int]:\n if isinstance(xs, tuple):\n raise ValueError(""jax.numpy.block does not allow tuples, got {}""\n .format(xs))\n elif isinstance(xs, list):\n if len(xs) == 0:\n raise ValueError(""jax.numpy.block does not allow empty list arguments"")\n xs_tup, depths = unzip2([_block(x) for x in xs])\n if any(d != depths[0] for d in depths[1:]):\n raise ValueError(""Mismatched list depths in jax.numpy.block"")\n rank = max(depths[0], max(np.ndim(x) for x in xs_tup))\n xs_tup = tuple(_atleast_nd(x, rank) for x in xs_tup)\n return concatenate(xs_tup, axis=-depths[0]), depths[0] + 1\n else:\n return asarray(xs), 1\n\n\n@export\n@jit\ndef block(arrays: ArrayLike | list[ArrayLike]) -> Array:\n """"""Create an array from a list of blocks.\n\n JAX implementation of :func:`numpy.block`.\n\n Args:\n arrays: an array, or nested list of arrays which will be concatenated\n together to form the final array.\n\n Returns:\n a single array constructed from the inputs.\n\n See also:\n - :func:`concatenate`, :func:`concat`: concatenate arrays along an existing axis.\n - :func:`stack`, :func:`vstack`, :func:`hstack`, :func:`dstack` concatenate\n arrays along a new axis.\n\n Examples:\n consider these blocks:\n\n >>> zeros = jnp.zeros((2, 2))\n >>> ones = jnp.ones((2, 2))\n >>> twos = jnp.full((2, 2), 2)\n >>> threes = jnp.full((2, 2), 3)\n\n Passing a single array to :func:`block` returns the array:\n\n >>> jnp.block(zeros)\n Array([[0., 0.],\n [0., 0.]], dtype=float32)\n\n Passing a simple list of arrays concatenates them along the last axis:\n\n >>> jnp.block([zeros, ones])\n Array([[0., 0., 1., 1.],\n [0., 0., 1., 1.]], dtype=float32)\n\n Passing a doubly-nested list of arrays concatenates the inner list along\n the last axis, and the outer list along the second-to-last axis:\n\n >>> jnp.block([[zeros, ones],\n ... [twos, threes]])\n Array([[0., 0., 1., 1.],\n [0., 0., 1., 1.],\n [2., 2., 3., 3.],\n [2., 2., 3., 3.]], dtype=float32)\n\n Note that blocks need not align in all dimensions, though the size along the axis\n of concatenation must match. For example, this is valid because after the inner,\n horizontal concatenation, the resulting blocks have a valid shape for the outer,\n vertical concatenation.\n\n >>> a = jnp.zeros((2, 1))\n >>> b = jnp.ones((2, 3))\n >>> c = jnp.full((1, 2), 2)\n >>> d = jnp.full((1, 2), 3)\n >>> jnp.block([[a, b], [c, d]])\n Array([[0., 1., 1., 1.],\n [0., 1., 1., 1.],\n [2., 2., 3., 3.]], dtype=float32)\n\n Note also that this logic generalizes to blocks in 3 or more dimensions.\n Here's a 3-dimensional block-wise array:\n\n >>> x = jnp.arange(6).reshape((1, 2, 3))\n >>> blocks = [[[x for i in range(3)] for j in range(4)] for k in range(5)]\n >>> jnp.block(blocks).shape\n (5, 8, 9)\n """"""\n out, _ = _block(arrays)\n return out\n\n\n@overload\ndef atleast_1d() -> list[Array]:\n ...\n@overload\ndef atleast_1d(x: ArrayLike, /) -> Array:\n ...\n@overload\ndef atleast_1d(x: ArrayLike, y: ArrayLike, /, *arys: ArrayLike) -> list[Array]:\n ...\n@export\n@jit\ndef atleast_1d(*arys: ArrayLike) -> Array | list[Array]:\n """"""Convert inputs to arrays with at least 1 dimension.\n\n JAX implementation of :func:`numpy.atleast_1d`.\n\n Args:\n zero or more arraylike arguments.\n\n Returns:\n an array or list of arrays corresponding to the input values. Arrays\n of shape ``()`` are converted to shape ``(1,)``, and arrays with other\n shapes are returned unchanged.\n\n See also:\n - :func:`jax.numpy.asarray`\n - :func:`jax.numpy.atleast_2d`\n - :func:`jax.numpy.atleast_3d`\n\n Examples:\n Scalar arguments are converted to 1D, length-1 arrays:\n\n >>> x = jnp.float32(1.0)\n >>> jnp.atleast_1d(x)\n Array([1.], dtype=float32)\n\n Higher dimensional inputs are returned unchanged:\n\n >>> y = jnp.arange(4)\n >>> jnp.atleast_1d(y)\n Array([0, 1, 2, 3], dtype=int32)\n\n Multiple arguments can be passed to the function at once, in which\n case a list of results is returned:\n\n >>> jnp.atleast_1d(x, y)\n [Array([1.], dtype=float32), Array([0, 1, 2, 3], dtype=int32)]\n """"""\n util.check_arraylike(""atleast_1d"", *arys, emit_warning=True)\n if len(arys) == 1:\n return array(arys[0], copy=False, ndmin=1)\n else:\n return [array(arr, copy=False, ndmin=1) for arr in arys]\n\n\n@overload\ndef atleast_2d() -> list[Array]:\n ...\n@overload\ndef atleast_2d(x: ArrayLike, /) -> Array:\n ...\n@overload\ndef atleast_2d(x: ArrayLike, y: ArrayLike, /, *arys: ArrayLike) -> list[Array]:\n ...\n@export\n@jit\ndef atleast_2d(*arys: ArrayLike) -> Array | list[Array]:\n """"""Convert inputs to arrays with at least 2 dimensions.\n\n JAX implementation of :func:`numpy.atleast_2d`.\n\n Args:\n zero or more arraylike arguments.\n\n Returns:\n an array or list of arrays corresponding to the input values. Arrays\n of shape ``()`` are converted to shape ``(1, 1)``, 1D arrays of shape\n ``(N,)`` are converted to shape ``(1, N)``, and arrays of all other\n shapes are returned unchanged.\n\n See also:\n - :func:`jax.numpy.asarray`\n - :func:`jax.numpy.atleast_1d`\n - :func:`jax.numpy.atleast_3d`\n\n Examples:\n Scalar arguments are converted to 2D, size-1 arrays:\n\n >>> x = jnp.float32(1.0)\n >>> jnp.atleast_2d(x)\n Array([[1.]], dtype=float32)\n\n One-dimensional arguments have a unit dimension prepended to the shape:\n\n >>> y = jnp.arange(4)\n >>> jnp.atleast_2d(y)\n Array([[0, 1, 2, 3]], dtype=int32)\n\n Higher dimensional inputs are returned unchanged:\n\n >>> z = jnp.ones((2, 3))\n >>> jnp.atleast_2d(z)\n Array([[1., 1., 1.],\n [1., 1., 1.]], dtype=float32)\n\n Multiple arguments can be passed to the function at once, in which\n case a list of results is returned:\n\n >>> jnp.atleast_2d(x, y)\n [Array([[1.]], dtype=float32), Array([[0, 1, 2, 3]], dtype=int32)]\n """"""\n # TODO(jakevdp): Non-array input deprecated 2023-09-22; change to error.\n util.check_arraylike(""atleast_2d"", *arys, emit_warning=True)\n if len(arys) == 1:\n return array(arys[0], copy=False, ndmin=2)\n else:\n return [array(arr, copy=False, ndmin=2) for arr in arys]\n\n\n@overload\ndef atleast_3d() -> list[Array]:\n ...\n@overload\ndef atleast_3d(x: ArrayLike, /) -> Array:\n ...\n@overload\ndef atleast_3d(x: ArrayLike, y: ArrayLike, /, *arys: ArrayLike) -> list[Array]:\n ...\n@export\n@jit\ndef atleast_3d(*arys: ArrayLike) -> Array | list[Array]:\n """"""Convert inputs to arrays with at least 3 dimensions.\n\n JAX implementation of :func:`numpy.atleast_3d`.\n\n Args:\n zero or more arraylike arguments.\n\n Returns:\n an array or list of arrays corresponding to the input values. Arrays\n of shape ``()`` are converted to shape ``(1, 1, 1)``, 1D arrays of\n shape ``(N,)`` are converted to shape ``(1, N, 1)``, 2D arrays of\n shape ``(M, N)`` are converted to shape ``(M, N, 1)``, and arrays\n of all other shapes are returned unchanged.\n\n See also:\n - :func:`jax.numpy.asarray`\n - :func:`jax.numpy.atleast_1d`\n - :func:`jax.numpy.atleast_2d`\n\n Examples:\n Scalar arguments are converted to 3D, size-1 arrays:\n\n >>> x = jnp.float32(1.0)\n >>> jnp.atleast_3d(x)\n Array([[[1.]]], dtype=float32)\n\n 1D arrays have a unit dimension prepended and appended:\n\n >>> y = jnp.arange(4)\n >>> jnp.atleast_3d(y).shape\n (1, 4, 1)\n\n 2D arrays have a unit dimension appended:\n\n >>> z = jnp.ones((2, 3))\n >>> jnp.atleast_3d(z).shape\n (2, 3, 1)\n\n Multiple arguments can be passed to the function at once, in which\n case a list of results is returned:\n\n >>> x3, y3 = jnp.atleast_3d(x, y)\n >>> print(x3)\n [[[1.]]]\n >>> print(y3)\n [[[0]\n [1]\n [2]\n [3]]]\n """"""\n # TODO(jakevdp): Non-array input deprecated 2023-09-22; change to error.\n util.check_arraylike(""atleast_3d"", *arys, emit_warning=True)\n if len(arys) == 1:\n arr = asarray(arys[0])\n if arr.ndim == 0:\n arr = lax.expand_dims(arr, dimensions=(0, 1, 2))\n elif arr.ndim == 1:\n arr = lax.expand_dims(arr, dimensions=(0, 2))\n elif arr.ndim == 2:\n arr = lax.expand_dims(arr, dimensions=(2,))\n return arr\n else:\n return [atleast_3d(arr) for arr in arys]\n\n\n@export\ndef astype(x: ArrayLike, dtype: DTypeLike | None,\n /, *, copy: bool = False,\n device: xc.Device | Sharding | None = None) -> Array:\n """"""Convert an array to a specified dtype.\n\n JAX implementation of :func:`numpy.astype`.\n\n This is implemented via :func:`jax.lax.convert_element_type`, which may\n have slightly different behavior than :func:`numpy.astype` in some cases.\n In particular, the details of float-to-int and int-to-float casts are\n implementation dependent.\n\n Args:\n x: input array to convert\n dtype: output dtype\n copy: if True, then always return a copy. If False (default) then only\n return a copy if necessary.\n device: optionally specify the device to which the output will be committed.\n\n Returns:\n An array with the same shape as ``x``, containing values of the specified\n dtype.\n\n See Also:\n - :func:`jax.lax.convert_element_type`: lower-level function for XLA-style\n dtype conversions.\n\n Examples:\n >>> x = jnp.array([0, 1, 2, 3])\n >>> x\n Array([0, 1, 2, 3], dtype=int32)\n >>> x.astype('float32')\n Array([0.0, 1.0, 2.0, 3.0], dtype=float32)\n\n >>> y = jnp.array([0.0, 0.5, 1.0])\n >>> y.astype(int) # truncates fractional values\n Array([0, 0, 1], dtype=int32)\n """"""\n x_arr = util.ensure_arraylike(""astype"", x)\n\n if dtype is None:\n dtype = dtypes.canonicalize_dtype(dtypes.float_)\n dtypes.check_user_dtype_supported(dtype, ""astype"")\n if issubdtype(x_arr.dtype, np.complexfloating):\n if dtypes.isdtype(dtype, (""integral"", ""real floating"")):\n deprecations.warn(\n ""jax-numpy-astype-complex-to-real"",\n ""Casting from complex to real dtypes will soon raise a ValueError. ""\n ""Please first use jnp.real or jnp.imag to take the real/imaginary ""\n ""component of your input."",\n stacklevel=2)\n elif np.dtype(dtype) == bool:\n # convert_element_type(complex, bool) has the wrong semantics.\n x_arr = (x_arr != _lax_const(x_arr, 0))\n\n # We offer a more specific warning than the usual ComplexWarning so we prefer\n # to issue our warning.\n result = lax_internal._convert_element_type(\n x_arr, dtype, sharding=util.normalize_device_to_sharding(device),\n warn_on_complex_to_real_cast=False)\n return _array_copy(result) if copy else result\n\n\n@export\ndef copy(a: ArrayLike, order: str | None = None) -> Array:\n """"""Return a copy of the array.\n\n JAX implementation of :func:`numpy.copy`.\n\n Args:\n a: arraylike object to copy\n order: not implemented in JAX\n\n Returns:\n a copy of the input array ``a``.\n\n See Also:\n - :func:`jax.numpy.array`: create an array with or without a copy.\n - :meth:`jax.Array.copy`: same function accessed as an array method.\n\n Examples:\n Since JAX arrays are immutable, in most cases explicit array copies\n are not necessary. One exception is when using a function with donated\n arguments (see the ``donate_argnums`` argument to :func:`jax.jit`).\n\n >>> f = jax.jit(lambda x: 2 * x, donate_argnums=0)\n >>> x = jnp.arange(4)\n >>> y = f(x)\n >>> print(y)\n [0 2 4 6]\n\n Because we marked ``x`` as being donated, the original array is no longer\n available:\n\n >>> print(x) # doctest: +IGNORE_EXCEPTION_DETAIL\n Traceback (most recent call last):\n RuntimeError: Array has been deleted with shape=int32[4].\n\n In situations like this, an explicit copy will let you keep access to the\n original buffer:\n\n >>> x = jnp.arange(4)\n >>> y = f(x.copy())\n >>> print(y)\n [0 2 4 6]\n >>> print(x)\n [0 1 2 3]\n """"""\n util.check_arraylike(""copy"", a)\n return array(a, copy=True, order=order)\n\n\n@export\ndef array_equal(a1: ArrayLike, a2: ArrayLike, equal_nan: bool = False) -> Array:\n """"""Check if two arrays are element-wise equal.\n\n JAX implementation of :func:`numpy.array_equal`.\n\n Args:\n a1: first input array to compare.\n a2: second input array to compare.\n equal_nan: Boolean. If ``True``, NaNs in ``a1`` will be considered\n equal to NaNs in ``a2``. Default is ``False``.\n\n Returns:\n Boolean scalar array indicating whether the input arrays are element-wise equal.\n\n See Also:\n - :func:`jax.numpy.allclose`\n - :func:`jax.numpy.array_equiv`\n\n Examples:\n >>> jnp.array_equal(jnp.array([1, 2, 3]), jnp.array([1, 2, 3]))\n Array(True, dtype=bool)\n >>> jnp.array_equal(jnp.array([1, 2, 3]), jnp.array([1, 2]))\n Array(False, dtype=bool)\n >>> jnp.array_equal(jnp.array([1, 2, 3]), jnp.array([1, 2, 4]))\n Array(False, dtype=bool)\n >>> jnp.array_equal(jnp.array([1, 2, float('nan')]),\n ... jnp.array([1, 2, float('nan')]))\n Array(False, dtype=bool)\n >>> jnp.array_equal(jnp.array([1, 2, float('nan')]),\n ... jnp.array([1, 2, float('nan')]), equal_nan=True)\n Array(True, dtype=bool)\n """"""\n a1, a2 = asarray(a1), asarray(a2)\n if np.shape(a1) != np.shape(a2):\n return array(False, dtype=bool)\n eq = asarray(a1 == a2)\n if equal_nan:\n eq = ufuncs.logical_or(eq, ufuncs.logical_and(ufuncs.isnan(a1), ufuncs.isnan(a2)))\n return reductions.all(eq)\n\n\n@export\ndef array_equiv(a1: ArrayLike, a2: ArrayLike) -> Array:\n """"""Check if two arrays are element-wise equal.\n\n JAX implementation of :func:`numpy.array_equiv`.\n\n This function will return ``False`` if the input arrays cannot be broadcasted\n to the same shape.\n\n Args:\n a1: first input array to compare.\n a2: second input array to compare.\n\n Returns:\n Boolean scalar array indicating whether the input arrays are\n element-wise equal after broadcasting.\n\n See Also:\n - :func:`jax.numpy.allclose`\n - :func:`jax.numpy.array_equal`\n\n Examples:\n >>> jnp.array_equiv(jnp.array([1, 2, 3]), jnp.array([1, 2, 3]))\n Array(True, dtype=bool)\n >>> jnp.array_equiv(jnp.array([1, 2, 3]), jnp.array([1, 2, 4]))\n Array(False, dtype=bool)\n >>> jnp.array_equiv(jnp.array([[1, 2, 3], [1, 2, 3]]),\n ... jnp.array([1, 2, 3]))\n Array(True, dtype=bool)\n """"""\n a1, a2 = asarray(a1), asarray(a2)\n try:\n eq = ufuncs.equal(a1, a2)\n except ValueError:\n # shapes are not broadcastable\n return array(False)\n return reductions.all(eq)\n\n\n# General np.from* style functions mostly delegate to numpy.\n\n@export\ndef frombuffer(buffer: bytes | Any, dtype: DTypeLike = float,\n count: int = -1, offset: int = 0) -> Array:\n r""""""Convert a buffer into a 1-D JAX array.\n\n JAX implementation of :func:`numpy.frombuffer`.\n\n Args:\n buffer: an object containing the data. It must be either a bytes object with\n a length that is an integer multiple of the dtype element size, or\n it must be an object exporting the `Python buffer interface`_.\n dtype: optional. Desired data type for the array. Default is ``float64``.\n This specifies the dtype used to parse the buffer, but note that after parsing,\n 64-bit values will be cast to 32-bit JAX arrays if the ``jax_enable_x64``\n flag is set to ``False``.\n count: optional integer specifying the number of items to read from the buffer.\n If -1 (default), all items from the buffer are read.\n offset: optional integer specifying the number of bytes to skip at the beginning\n of the buffer. Default is 0.\n\n Returns:\n A 1-D JAX array representing the interpreted data from the buffer.\n\n See also:\n - :func:`jax.numpy.fromstring`: convert a string of text into 1-D JAX array.\n\n Examples:\n Using a bytes buffer:\n\n >>> buf = b""\x00\x01\x02\x03\x04""\n >>> jnp.frombuffer(buf, dtype=jnp.uint8)\n Array([0, 1, 2, 3, 4], dtype=uint8)\n >>> jnp.frombuffer(buf, dtype=jnp.uint8, offset=1)\n Array([1, 2, 3, 4], dtype=uint8)\n\n Constructing a JAX array via the Python buffer interface, using Python's\n built-in :mod:`array` module.\n\n >>> from array import array\n >>> pybuffer = array('i', [0, 1, 2, 3, 4])\n >>> jnp.frombuffer(pybuffer, dtype=jnp.int32)\n Array([0, 1, 2, 3, 4], dtype=int32)\n\n .. _Python buffer interface: https://docs.python.org/3/c-api/buffer.html\n """"""\n return asarray(np.frombuffer(buffer=buffer, dtype=dtype, count=count, offset=offset))\n\n\n@export\ndef fromfile(*args, **kwargs):\n """"""Unimplemented JAX wrapper for jnp.fromfile.\n\n This function is left deliberately unimplemented because it may be non-pure and thus\n unsafe for use with JIT and other JAX transformations. Consider using\n ``jnp.asarray(np.fromfile(...))`` instead, although care should be taken if ``np.fromfile``\n is used within jax transformations because of its potential side-effect of consuming the\n file object; for more information see `Common Gotchas: Pure Functions\n `_.\n """"""\n raise NotImplementedError(\n ""jnp.fromfile() is not implemented because it may be non-pure and thus unsafe for use ""\n ""with JIT and other JAX transformations. Consider using jnp.asarray(np.fromfile(...)) ""\n ""instead, although care should be taken if np.fromfile is used within a jax transformations ""\n ""because of its potential side-effect of consuming the file object; for more information see ""\n ""https://docs.jax.dev/en/latest/notebooks/Common_Gotchas_in_JAX.html#pure-functions"")\n\n\n@export\ndef fromiter(*args, **kwargs):\n """"""Unimplemented JAX wrapper for jnp.fromiter.\n\n This function is left deliberately unimplemented because it may be non-pure and thus\n unsafe for use with JIT and other JAX transformations. Consider using\n ``jnp.asarray(np.fromiter(...))`` instead, although care should be taken if ``np.fromiter``\n is used within jax transformations because of its potential side-effect of consuming the\n iterable object; for more information see `Common Gotchas: Pure Functions\n `_.\n """"""\n raise NotImplementedError(\n ""jnp.fromiter() is not implemented because it may be non-pure and thus unsafe for use ""\n ""with JIT and other JAX transformations. Consider using jnp.asarray(np.fromiter(...)) ""\n ""instead, although care should be taken if np.fromiter is used within a jax transformations ""\n ""because of its potential side-effect of consuming the iterable object; for more information see ""\n ""https://docs.jax.dev/en/latest/notebooks/Common_Gotchas_in_JAX.html#pure-functions"")\n\n\n@export\ndef from_dlpack(x: Any, /, *, device: xc.Device | Sharding | None = None,\n copy: bool | None = None) -> Array:\n """"""Construct a JAX array via DLPack.\n\n JAX implementation of :func:`numpy.from_dlpack`.\n\n Args:\n x: An object that implements the DLPack_ protocol via the ``__dlpack__``\n and ``__dlpack_device__`` methods, or a legacy DLPack tensor on either\n CPU or GPU.\n device: An optional :class:`~jax.Device` or :class:`~jax.sharding.Sharding`,\n representing the single device onto which the returned array should be placed.\n If given, then the result is committed to the device. If unspecified,\n the resulting array will be unpacked onto the same device it originated from.\n Setting ``device`` to a device different from the source of ``external_array``\n will require a copy, meaning ``copy`` must be set to either ``True`` or ``None``.\n copy: An optional boolean, controlling whether or not a copy is performed.\n If ``copy=True`` then a copy is always performed, even if unpacked onto the\n same device. If ``copy=False`` then the copy is never performed and will raise\n an error if necessary. When ``copy=None`` (default) then a copy may be performed\n if needed for a device transfer.\n\n Returns:\n A JAX array of the input buffer.\n\n Note:\n While JAX arrays are always immutable, dlpack buffers cannot be marked as\n immutable, and it is possible for processes external to JAX to mutate them\n in-place. If a JAX Array is constructed from a dlpack buffer without copying\n and the source buffer is later modified in-place, it may lead to undefined\n behavior when using the associated JAX array.\n\n Examples:\n Passing data between NumPy and JAX via DLPack_:\n\n >>> import numpy as np\n >>> rng = np.random.default_rng(42)\n >>> x_numpy = rng.random(4, dtype='float32')\n >>> print(x_numpy)\n [0.08925092 0.773956 0.6545715 0.43887842]\n >>> hasattr(x_numpy, ""__dlpack__"") # NumPy supports the DLPack interface\n True\n\n >>> import jax.numpy as jnp\n >>> x_jax = jnp.from_dlpack(x_numpy)\n >>> print(x_jax)\n [0.08925092 0.773956 0.6545715 0.43887842]\n >>> hasattr(x_jax, ""__dlpack__"") # JAX supports the DLPack interface\n True\n\n >>> x_numpy_round_trip = np.from_dlpack(x_jax)\n >>> print(x_numpy_round_trip)\n [0.08925092 0.773956 0.6545715 0.43887842]\n\n .. _DLPack: https://dmlc.github.io/dlpack\n """"""\n from jax.dlpack import from_dlpack # pylint: disable=g-import-not-at-top\n return from_dlpack(x, device=device, copy=copy)\n\n\n@export\ndef fromfunction(function: Callable[..., Array], shape: Any,\n *, dtype: DTypeLike = float, **kwargs) -> Array:\n """"""Create an array from a function applied over indices.\n\n JAX implementation of :func:`numpy.fromfunction`. The JAX implementation\n differs in that it dispatches via :func:`jax.vmap`, and so unlike in NumPy\n the function logically operates on scalar inputs, and need not explicitly\n handle broadcasted inputs (See *Examples* below).\n\n Args:\n function: a function that takes *N* dynamic scalars and outputs a scalar.\n shape: a length-*N* tuple of integers specifying the output shape.\n dtype: optionally specify the dtype of the inputs. Defaults to floating-point.\n kwargs: additional keyword arguments are passed statically to ``function``.\n\n Returns:\n An array of shape ``shape`` if ``function`` returns a scalar, or in general\n a pytree of arrays with leading dimensions ``shape``, as determined by the\n output of ``function``.\n\n See also:\n - :func:`jax.vmap`: the core transformation that the :func:`fromfunction`\n API is built on.\n\n Examples:\n Generate a multiplication table of a given shape:\n\n >>> jnp.fromfunction(jnp.multiply, shape=(3, 6), dtype=int)\n Array([[ 0, 0, 0, 0, 0, 0],\n [ 0, 1, 2, 3, 4, 5],\n [ 0, 2, 4, 6, 8, 10]], dtype=int32)\n\n When ``function`` returns a non-scalar the output will have leading\n dimension of ``shape``:\n\n >>> def f(x):\n ... return (x + 1) * jnp.arange(3)\n >>> jnp.fromfunction(f, shape=(2,))\n Array([[0., 1., 2.],\n [0., 2., 4.]], dtype=float32)\n\n ``function`` may return multiple results, in which case each is mapped\n independently:\n\n >>> def f(x, y):\n ... return x + y, x * y\n >>> x_plus_y, x_times_y = jnp.fromfunction(f, shape=(3, 5))\n >>> print(x_plus_y)\n [[0. 1. 2. 3. 4.]\n [1. 2. 3. 4. 5.]\n [2. 3. 4. 5. 6.]]\n >>> print(x_times_y)\n [[0. 0. 0. 0. 0.]\n [0. 1. 2. 3. 4.]\n [0. 2. 4. 6. 8.]]\n\n The JAX implementation differs slightly from NumPy's implementation. In\n :func:`numpy.fromfunction`, the function is expected to explicitly operate\n element-wise on the full grid of input values:\n\n >>> def f(x, y):\n ... print(f""{x.shape = }\\n{y.shape = }"")\n ... return x + y\n ...\n >>> np.fromfunction(f, (2, 3))\n x.shape = (2, 3)\n y.shape = (2, 3)\n array([[0., 1., 2.],\n [1., 2., 3.]])\n\n In :func:`jax.numpy.fromfunction`, the function is vectorized via\n :func:`jax.vmap`, and so is expected to operate on scalar values:\n\n >>> jnp.fromfunction(f, (2, 3))\n x.shape = ()\n y.shape = ()\n Array([[0., 1., 2.],\n [1., 2., 3.]], dtype=float32)\n """"""\n shape = core.canonicalize_shape(shape, context=""shape argument of jnp.fromfunction()"")\n for i in range(len(shape)):\n in_axes = [0 if i == j else None for j in range(len(shape))]\n function = api.vmap(function, in_axes=tuple(in_axes[::-1]))\n return function(*(arange(s, dtype=dtype) for s in shape), **kwargs)\n\n\n@export\ndef fromstring(string: str, dtype: DTypeLike = float, count: int = -1, *, sep: str) -> Array:\n """"""Convert a string of text into 1-D JAX array.\n\n JAX implementation of :func:`numpy.fromstring`.\n\n Args:\n string: input string containing the data.\n dtype: optional. Desired data type for the array. Default is ``float``.\n count: optional integer specifying the number of items to read from the string.\n If -1 (default), all items are read.\n sep: the string used to separate values in the input string.\n\n Returns:\n A 1-D JAX array containing the parsed data from the input string.\n\n See also:\n - :func:`jax.numpy.frombuffer`: construct a JAX array from an object\n that implements the buffer interface.\n\n Examples:\n >>> jnp.fromstring(""1 2 3"", dtype=int, sep="" "")\n Array([1, 2, 3], dtype=int32)\n >>> jnp.fromstring(""0.1, 0.2, 0.3"", dtype=float, count=2, sep="","")\n Array([0.1, 0.2], dtype=float32)\n """"""\n return asarray(np.fromstring(string=string, dtype=dtype, count=count, sep=sep))\n\n\n@export\ndef eye(N: DimSize, M: DimSize | None = None,\n k: int | ArrayLike = 0,\n dtype: DTypeLike | None = None,\n *, device: xc.Device | Sharding | None = None) -> Array:\n """"""Create a square or rectangular identity matrix\n\n JAX implementation of :func:`numpy.eye`.\n\n Args:\n N: integer specifying the first dimension of the array.\n M: optional integer specifying the second dimension of the array;\n defaults to the same value as ``N``.\n k: optional integer specifying the offset of the diagonal. Use positive\n values for upper diagonals, and negative values for lower diagonals.\n Default is zero.\n dtype: optional dtype; defaults to floating point.\n device: optional :class:`~jax.Device` or :class:`~jax.sharding.Sharding`\n to which the created array will be committed.\n\n Returns:\n Identity array of shape ``(N, M)``, or ``(N, N)`` if ``M`` is not specified.\n\n See also:\n :func:`jax.numpy.identity`: Simpler API for generating square identity matrices.\n\n Examples:\n A simple 3x3 identity matrix:\n\n >>> jnp.eye(3)\n Array([[1., 0., 0.],\n [0., 1., 0.],\n [0., 0., 1.]], dtype=float32)\n\n Integer identity matrices with offset diagonals:\n\n >>> jnp.eye(3, k=1, dtype=int)\n Array([[0, 1, 0],\n [0, 0, 1],\n [0, 0, 0]], dtype=int32)\n >>> jnp.eye(3, k=-1, dtype=int)\n Array([[0, 0, 0],\n [1, 0, 0],\n [0, 1, 0]], dtype=int32)\n\n Non-square identity matrix:\n\n >>> jnp.eye(3, 5, k=1)\n Array([[0., 1., 0., 0., 0.],\n [0., 0., 1., 0., 0.],\n [0., 0., 0., 1., 0.]], dtype=float32)\n """"""\n # TODO(vfdev-5): optimize putting the array directly on the device specified\n # instead of putting it on default device and then on the specific device\n output = _eye(N, M=M, k=k, dtype=dtype)\n if device is not None:\n return api.device_put(output, device=device)\n return output\n\n\ndef _eye(N: DimSize, M: DimSize | None = None,\n k: int | ArrayLike = 0,\n dtype: DTypeLike | None = None) -> Array:\n dtypes.check_user_dtype_supported(dtype, ""eye"")\n if isinstance(k, int):\n k = lax_internal._clip_int_to_valid_range(k, np.int32,\n ""`argument `k` of jax.numpy.eye"")\n offset = util.ensure_arraylike(""eye"", k)\n if not (offset.shape == () and dtypes.issubdtype(offset.dtype, np.integer)):\n raise ValueError(f""k must be a scalar integer; got {k}"")\n N_int = core.canonicalize_dim(N, ""argument of 'N' jnp.eye()"")\n M_int = N_int if M is None else core.canonicalize_dim(M, ""argument 'M' of jnp.eye()"")\n if N_int < 0 or M_int < 0:\n raise ValueError(f""negative dimensions are not allowed, got {N} and {M}"")\n i = lax.broadcasted_iota(offset.dtype, (N_int, M_int), 0)\n j = lax.broadcasted_iota(offset.dtype, (N_int, M_int), 1)\n return (i + offset == j).astype(dtype)\n\n\n@export\ndef identity(n: DimSize, dtype: DTypeLike | None = None) -> Array:\n """"""Create a square identity matrix\n\n JAX implementation of :func:`numpy.identity`.\n\n Args:\n n: integer specifying the size of each array dimension.\n dtype: optional dtype; defaults to floating point.\n\n Returns:\n Identity array of shape ``(n, n)``.\n\n See also:\n :func:`jax.numpy.eye`: non-square and/or offset identity matrices.\n\n Examples:\n A simple 3x3 identity matrix:\n\n >>> jnp.identity(3)\n Array([[1., 0., 0.],\n [0., 1., 0.],\n [0., 0., 1.]], dtype=float32)\n\n A 2x2 integer identity matrix:\n\n >>> jnp.identity(2, dtype=int)\n Array([[1, 0],\n [0, 1]], dtype=int32)\n """"""\n dtypes.check_user_dtype_supported(dtype, ""identity"")\n return eye(n, dtype=dtype)\n\n\n@export\ndef arange(start: ArrayLike | DimSize, stop: ArrayLike | DimSize | None = None,\n step: ArrayLike | None = None, dtype: DTypeLike | None = None,\n *, device: xc.Device | Sharding | None = None) -> Array:\n """"""Create an array of evenly-spaced values.\n\n JAX implementation of :func:`numpy.arange`, implemented in terms of\n :func:`jax.lax.iota`.\n\n Similar to Python's :func:`range` function, this can be called with a few\n different positional signatures:\n\n - ``jnp.arange(stop)``: generate values from 0 to ``stop``, stepping by 1.\n - ``jnp.arange(start, stop)``: generate values from ``start`` to ``stop``,\n stepping by 1.\n - ``jnp.arange(start, stop, step)``: generate values from ``start`` to ``stop``,\n stepping by ``step``.\n\n Like with Python's :func:`range` function, the starting value is inclusive,\n and the stop value is exclusive.\n\n Args:\n start: start of the interval, inclusive.\n stop: optional end of the interval, exclusive. If not specified, then\n ``(start, stop) = (0, start)``\n step: optional step size for the interval. Default = 1.\n dtype: optional dtype for the returned array; if not specified it will\n be determined via type promotion of `start`, `stop`, and `step`.\n device: (optional) :class:`~jax.Device` or :class:`~jax.sharding.Sharding`\n to which the created array will be committed.\n\n Returns:\n Array of evenly-spaced values from ``start`` to ``stop``, separated by ``step``.\n\n Note:\n Using ``arange`` with a floating-point ``step`` argument can lead to unexpected\n results due to accumulation of floating-point errors, especially with\n lower-precision data types like ``float8_*`` and ``bfloat16``.\n To avoid precision errors, consider generating a range of integers, and scaling\n it to the desired range. For example, instead of this::\n\n jnp.arange(-1, 1, 0.01, dtype='bfloat16')\n\n it can be more accurate to generate a sequence of integers, and scale them::\n\n (jnp.arange(-100, 100) * 0.01).astype('bfloat16')\n\n Examples:\n Single-argument version specifies only the ``stop`` value:\n\n >>> jnp.arange(4)\n Array([0, 1, 2, 3], dtype=int32)\n\n Passing a floating-point ``stop`` value leads to a floating-point result:\n\n >>> jnp.arange(4.0)\n Array([0., 1., 2., 3.], dtype=float32)\n\n Two-argument version specifies ``start`` and ``stop``, with ``step=1``:\n\n >>> jnp.arange(1, 6)\n Array([1, 2, 3, 4, 5], dtype=int32)\n\n Three-argument version specifies ``start``, ``stop``, and ``step``:\n\n >>> jnp.arange(0, 2, 0.5)\n Array([0. , 0.5, 1. , 1.5], dtype=float32)\n\n See Also:\n - :func:`jax.numpy.linspace`: generate a fixed number of evenly-spaced values.\n - :func:`jax.lax.iota`: directly generate integer sequences in XLA.\n """"""\n # TODO(vfdev-5): optimize putting the array directly on the device specified\n # instead of putting it on default device and then on the specific device\n output = _arange(start, stop=stop, step=step, dtype=dtype)\n if device is not None:\n return api.device_put(output, device=device)\n return output\n\n\ndef _arange(start: ArrayLike | DimSize, stop: ArrayLike | DimSize | None = None,\n step: ArrayLike | None = None, dtype: DTypeLike | None = None) -> Array:\n dtypes.check_user_dtype_supported(dtype, ""arange"")\n if not config.dynamic_shapes.value:\n util.check_arraylike(""arange"", start)\n if stop is None and step is None:\n start = core.concrete_or_error(None, start, ""It arose in the jnp.arange argument 'stop'"")\n else:\n start = core.concrete_or_error(None, start, ""It arose in the jnp.arange argument 'start'"")\n util.check_arraylike_or_none(""arange"", None, stop, step)\n stop = core.concrete_or_error(None, stop, ""It arose in the jnp.arange argument 'stop'"")\n step = core.concrete_or_error(None, step, ""It arose in the jnp.arange argument 'step'"")\n start_name = ""stop"" if stop is None and step is None else ""start""\n for name, val in [(start_name, start), (""stop"", stop), (""step"", step)]:\n if val is not None and np.ndim(val) != 0:\n raise ValueError(f""jax.numpy.arange: arguments must be scalars; got {name}={val}"")\n if any(core.is_symbolic_dim(v) for v in (start, stop, step)):\n # Some dynamic shapes\n if stop is None and step is None:\n stop = start\n start = 0\n step = 1\n elif stop is not None and step is None:\n step = 1\n return _arange_dynamic(start, stop, step, dtype or dtypes.canonicalize_dtype(np.int64))\n if dtype is None:\n dtype = result_type(start, *(x for x in [stop, step] if x is not None))\n dtype = dtypes.jax_dtype(dtype)\n if stop is None and step is None:\n start_dtype = _dtype(start)\n if (not dtypes.issubdtype(start_dtype, np.integer) and\n not dtypes.issubdtype(start_dtype, dtypes.extended)):\n ceil_ = ufuncs.ceil if isinstance(start, core.Tracer) else np.ceil\n start = ceil_(start).astype(int)\n return lax.iota(dtype, start) # type: ignore[arg-type]\n else:\n if step is None and start == 0 and stop is not None:\n return lax.iota(dtype, np.ceil(stop).astype(int))\n return array(np.arange(start, stop=stop, step=step, dtype=dtype))\n\n\ndef _arange_dynamic(\n start: DimSize, stop: DimSize, step: DimSize, dtype: DTypeLike) -> Array:\n # Here if at least one of start, stop, step are dynamic.\n if any(not core.is_dim(v) for v in (start, stop, step)):\n raise ValueError(\n ""In arange with non-constant arguments all of start, stop, and step ""\n f""must be either dimension expressions or integers: start={start}, ""\n f""stop={stop}, step={step}"")\n # Must resolve statically if step is {<0, ==0, >0}\n try:\n if step == 0:\n raise ValueError(""arange has step == 0"")\n step_gt_0 = (step > 0)\n except core.InconclusiveDimensionOperation as e:\n raise core.InconclusiveDimensionOperation(\n f""In arange with non-constant arguments the step ({step}) must "" +\n f""be resolved statically if it is > 0 or < 0.\nDetails: {e}"")\n gap = step if step_gt_0 else - step\n distance = (stop - start) if step_gt_0 else (start - stop)\n size = core.max_dim(0, distance + gap - 1) // gap\n return (array(start, dtype=dtype) +\n array(step, dtype=dtype) * lax.iota(dtype, size))\n\n\n@export\ndef meshgrid(*xi: ArrayLike, copy: bool = True, sparse: bool = False,\n indexing: str = 'xy') -> list[Array]:\n """"""Construct N-dimensional grid arrays from N 1-dimensional vectors.\n\n JAX implementation of :func:`numpy.meshgrid`.\n\n Args:\n xi: N arrays to convert to a grid.\n copy: whether to copy the input arrays. JAX supports only ``copy=True``,\n though under JIT compilation the compiler may opt to avoid copies.\n sparse: if False (default), then each returned arrays will be of shape\n ``[len(x1), len(x2), ..., len(xN)]``. If False, then returned arrays\n will be of shape ``[1, 1, ..., len(xi), ..., 1, 1]``.\n indexing: options are ``'xy'`` for cartesian indexing (default) or ``'ij'``\n for matrix indexing.\n\n Returns:\n A length-N list of grid arrays.\n\n See also:\n - :func:`jax.numpy.indices`: generate a grid of indices.\n - :obj:`jax.numpy.mgrid`: create a meshgrid using indexing syntax.\n - :obj:`jax.numpy.ogrid`: create an open meshgrid using indexing syntax.\n\n Examples:\n For the following examples, we'll use these 1D arrays as inputs:\n\n >>> x = jnp.array([1, 2])\n >>> y = jnp.array([10, 20, 30])\n\n 2D cartesian mesh grid:\n\n >>> x_grid, y_grid = jnp.meshgrid(x, y)\n >>> print(x_grid)\n [[1 2]\n [1 2]\n [1 2]]\n >>> print(y_grid)\n [[10 10]\n [20 20]\n [30 30]]\n\n 2D sparse cartesian mesh grid:\n\n >>> x_grid, y_grid = jnp.meshgrid(x, y, sparse=True)\n >>> print(x_grid)\n [[1 2]]\n >>> print(y_grid)\n [[10]\n [20]\n [30]]\n\n 2D matrix-index mesh grid:\n\n >>> x_grid, y_grid = jnp.meshgrid(x, y, indexing='ij')\n >>> print(x_grid)\n [[1 1 1]\n [2 2 2]]\n >>> print(y_grid)\n [[10 20 30]\n [10 20 30]]\n """"""\n args = list(util.ensure_arraylike_tuple(""meshgrid"", tuple(xi)))\n if not copy:\n raise ValueError(""jax.numpy.meshgrid only supports copy=True"")\n if indexing not in [""xy"", ""ij""]:\n raise ValueError(f""Valid values for indexing are 'xy' and 'ij', got {indexing}"")\n if any(a.ndim != 1 for a in args):\n raise ValueError(""Arguments to jax.numpy.meshgrid must be 1D, got shapes ""\n f""{[a.shape for a in args]}"")\n if indexing == ""xy"" and len(args) >= 2:\n args[0], args[1] = args[1], args[0]\n shape = [1 if sparse else a.shape[0] for a in args]\n _a_shape = lambda i, a: [*shape[:i], a.shape[0], *shape[i + 1:]] if sparse else shape\n output = [lax.broadcast_in_dim(a, _a_shape(i, a), (i,)) for i, a, in enumerate(args)]\n if indexing == ""xy"" and len(args) >= 2:\n output[0], output[1] = output[1], output[0]\n return output\n\n\n@export\n@jit\ndef i0(x: ArrayLike) -> Array:\n r""""""Calculate modified Bessel function of first kind, zeroth order.\n\n JAX implementation of :func:`numpy.i0`.\n\n Modified Bessel function of first kind, zeroth order is defined by:\n\n .. math::\n\n \mathrm{i0}(x) = I_0(x) = \sum_{k=0}^{\infty} \frac{(x^2/4)^k}{(k!)^2}\n\n Args:\n x: scalar or array. Specifies the argument of Bessel function. Complex inputs\n are not supported.\n\n Returns:\n An array containing the corresponding values of the modified Bessel function\n of ``x``.\n\n See also:\n - :func:`jax.scipy.special.i0`: Calculates the modified Bessel function of\n zeroth order.\n - :func:`jax.scipy.special.i1`: Calculates the modified Bessel function of\n first order.\n - :func:`jax.scipy.special.i0e`: Calculates the exponentially scaled modified\n Bessel function of zeroth order.\n\n Examples:\n >>> x = jnp.array([-2, -1, 0, 1, 2])\n >>> jnp.i0(x)\n Array([2.2795851, 1.266066 , 1.0000001, 1.266066 , 2.2795851], dtype=float32)\n """"""\n x_arr, = util.promote_args_inexact(""i0"", x)\n if not issubdtype(x_arr.dtype, np.floating):\n raise ValueError(f""Unsupported input type to jax.numpy.i0: {_dtype(x)}"")\n return _i0(x_arr)\n\n\n@custom_jvp\ndef _i0(x):\n abs_x = lax.abs(x)\n return lax.mul(lax.exp(abs_x), lax.bessel_i0e(abs_x))\n\n@_i0.defjvp\ndef _i0_jvp(primals, tangents):\n primal_out, tangent_out = api.jvp(_i0.fun, primals, tangents)\n return primal_out, where(primals[0] == 0, 0.0, tangent_out)\n\n@export\ndef ix_(*args: ArrayLike) -> tuple[Array, ...]:\n """"""Return a multi-dimensional grid (open mesh) from N one-dimensional sequences.\n\n JAX implementation of :func:`numpy.ix_`.\n\n Args:\n *args: N one-dimensional arrays\n\n Returns:\n Tuple of Jax arrays forming an open mesh, each with N dimensions.\n\n See Also:\n - :obj:`jax.numpy.ogrid`\n - :obj:`jax.numpy.mgrid`\n - :func:`jax.numpy.meshgrid`\n\n Examples:\n >>> rows = jnp.array([0, 2])\n >>> cols = jnp.array([1, 3])\n >>> open_mesh = jnp.ix_(rows, cols)\n >>> open_mesh\n (Array([[0],\n [2]], dtype=int32), Array([[1, 3]], dtype=int32))\n >>> [grid.shape for grid in open_mesh]\n [(2, 1), (1, 2)]\n >>> x = jnp.array([[10, 20, 30, 40],\n ... [50, 60, 70, 80],\n ... [90, 100, 110, 120],\n ... [130, 140, 150, 160]])\n >>> x[open_mesh]\n Array([[ 20, 40],\n [100, 120]], dtype=int32)\n """"""\n args = util.ensure_arraylike_tuple(""ix"", args)\n n = len(args)\n output = []\n for i, a in enumerate(args):\n if len(a.shape) != 1:\n msg = ""Arguments to jax.numpy.ix_ must be 1-dimensional, got shape {}""\n raise ValueError(msg.format(a.shape))\n if _dtype(a) == bool:\n raise NotImplementedError(\n ""Boolean arguments to jax.numpy.ix_ are not implemented"")\n shape = [1] * n\n shape[i] = a.shape[0]\n if a.size == 0:\n # Numpy uses an integer index type for empty arrays.\n output.append(lax.full(shape, np.zeros((), np.intp)))\n else:\n output.append(lax.broadcast_in_dim(a, shape, (i,)))\n return tuple(output)\n\n\n@overload\ndef indices(dimensions: Sequence[int], dtype: DTypeLike | None = None,\n sparse: Literal[False] = False) -> Array: ...\n@overload\ndef indices(dimensions: Sequence[int], dtype: DTypeLike | None = None,\n *, sparse: Literal[True]) -> tuple[Array, ...]: ...\n@overload\ndef indices(dimensions: Sequence[int], dtype: DTypeLike | None = None,\n sparse: bool = False) -> Array | tuple[Array, ...]: ...\n@export\ndef indices(dimensions: Sequence[int], dtype: DTypeLike | None = None,\n sparse: bool = False) -> Array | tuple[Array, ...]:\n """"""Generate arrays of grid indices.\n\n JAX implementation of :func:`numpy.indices`.\n\n Args:\n dimensions: the shape of the grid.\n dtype: the dtype of the indices (defaults to integer).\n sparse: if True, then return sparse indices. Default is False, which\n returns dense indices.\n\n Returns:\n An array of shape ``(len(dimensions), *dimensions)`` If ``sparse`` is False,\n or a sequence of arrays of the same length as ``dimensions`` if ``sparse`` is True.\n\n See also:\n - :func:`jax.numpy.meshgrid`: generate a grid from arbitrary input arrays.\n - :obj:`jax.numpy.mgrid`: generate dense indices using a slicing syntax.\n - :obj:`jax.numpy.ogrid`: generate sparse indices using a slicing syntax.\n\n Examples:\n >>> jnp.indices((2, 3))\n Array([[[0, 0, 0],\n [1, 1, 1]],\n \n [[0, 1, 2],\n [0, 1, 2]]], dtype=int32)\n >>> jnp.indices((2, 3), sparse=True)\n (Array([[0],\n [1]], dtype=int32), Array([[0, 1, 2]], dtype=int32))\n """"""\n dtypes.check_user_dtype_supported(dtype, ""indices"")\n dtype = dtype or dtypes.canonicalize_dtype(dtypes.int_)\n dimensions = tuple(\n core.concrete_or_error(operator.index, d, ""dimensions argument of jnp.indices"")\n for d in dimensions)\n N = len(dimensions)\n output = []\n s = dimensions\n for i, dim in enumerate(dimensions):\n idx = lax.iota(dtype, dim)\n if sparse:\n s = (1,)*i + (dim,) + (1,)*(N - i - 1)\n output.append(lax.broadcast_in_dim(idx, s, (i,)))\n if sparse:\n return tuple(output)\n return stack(output, 0) if output else array([], dtype=dtype)\n\n\n@export\ndef repeat(a: ArrayLike, repeats: ArrayLike, axis: int | None = None, *,\n total_repeat_length: int | None = None,\n out_sharding: NamedSharding | P | None = None) -> Array:\n """"""Construct an array from repeated elements.\n\n JAX implementation of :func:`numpy.repeat`.\n\n Args:\n a: N-dimensional array\n repeats: 1D integer array specifying the number of repeats. Must match the\n length of the repeated axis.\n axis: integer specifying the axis of ``a`` along which to construct the\n repeated array. If None (default) then ``a`` is first flattened.\n total_repeat_length: this must be specified statically for ``jnp.repeat``\n to be compatible with :func:`~jax.jit` and other JAX transformations.\n If ``sum(repeats)`` is larger than the specified ``total_repeat_length``,\n the remaining values will be discarded. If ``sum(repeats)`` is smaller\n than ``total_repeat_length``, the final value will be repeated.\n\n Returns:\n an array constructed from repeated values of ``a``.\n\n See Also:\n - :func:`jax.numpy.tile`: repeat a full array rather than individual values.\n\n Examples:\n Repeat each value twice along the last axis:\n\n >>> a = jnp.array([[1, 2],\n ... [3, 4]])\n >>> jnp.repeat(a, 2, axis=-1)\n Array([[1, 1, 2, 2],\n [3, 3, 4, 4]], dtype=int32)\n\n If ``axis`` is not specified, the input array will be flattened:\n\n >>> jnp.repeat(a, 2)\n Array([1, 1, 2, 2, 3, 3, 4, 4], dtype=int32)\n\n Pass an array to ``repeats`` to repeat each value a different number of times:\n\n >>> repeats = jnp.array([2, 3])\n >>> jnp.repeat(a, repeats, axis=1)\n Array([[1, 1, 2, 2, 2],\n [3, 3, 4, 4, 4]], dtype=int32)\n\n In order to use ``repeat`` within ``jit`` and other JAX transformations, the\n size of the output must be specified statically using ``total_repeat_length``:\n\n >>> jit_repeat = jax.jit(jnp.repeat, static_argnames=['axis', 'total_repeat_length'])\n >>> jit_repeat(a, repeats, axis=1, total_repeat_length=5)\n Array([[1, 1, 2, 2, 2],\n [3, 3, 4, 4, 4]], dtype=int32)\n\n If `total_repeat_length` is smaller than ``sum(repeats)``, the result will be truncated:\n\n >>> jit_repeat(a, repeats, axis=1, total_repeat_length=4)\n Array([[1, 1, 2, 2],\n [3, 3, 4, 4]], dtype=int32)\n\n If it is larger, then the additional entries will be filled with the final value:\n\n >>> jit_repeat(a, repeats, axis=1, total_repeat_length=7)\n Array([[1, 1, 2, 2, 2, 2, 2],\n [3, 3, 4, 4, 4, 4, 4]], dtype=int32)\n """"""\n if out_sharding is not None:\n return _auto_repeat(_repeat, a, repeats, axis, total_repeat_length,\n out_sharding)\n ctx_mesh = get_abstract_mesh()\n if ctx_mesh._are_all_axes_explicit:\n aval = core.typeof(a)\n if axis is None or aval.sharding.spec[axis] is not None:\n raise ValueError(\n ""Please pass sharding to `jnp.repeat` via `out_sharding` parameter."")\n assert axis is not None and aval.sharding.spec[axis] is None\n out_sharding = (NamedSharding(ctx_mesh, P())\n if aval.sharding.mesh.empty else aval.sharding)\n return _auto_repeat(_repeat, a, repeats, axis, total_repeat_length,\n out_sharding)\n try:\n return _repeat(a, repeats=repeats, axis=axis,\n total_repeat_length=total_repeat_length)\n except core.ShardingTypeError as e:\n raise ValueError(\n ""Please pass sharding to `jnp.repeat` via `out_sharding` parameter."")\n\ndef _auto_repeat(fun, a, repeats, axis, total_repeat_length, out_sharding):\n if total_repeat_length is None:\n return auto_axes(partial(fun, repeats=repeats, axis=axis,\n total_repeat_length=total_repeat_length),\n out_sharding=out_sharding)(a)\n else:\n return auto_axes(\n partial(fun, axis=axis, total_repeat_length=total_repeat_length),\n out_sharding=out_sharding)(a, repeats=repeats)\n\ndef _repeat(a: ArrayLike, *, repeats: ArrayLike, axis: int | None = None,\n total_repeat_length: int | None = None) -> Array:\n if core.is_dim(repeats):\n util.check_arraylike(""repeat"", a)\n else:\n util.check_arraylike(""repeat"", a, repeats)\n arr = asarray(a)\n\n if axis is None:\n arr = arr.ravel()\n axis = 0\n\n axis = core.concrete_or_error(operator.index, axis, ""'axis' argument of jnp.repeat()"")\n assert isinstance(axis, int) # to appease mypy\n\n if core.is_symbolic_dim(repeats):\n if total_repeat_length is not None:\n raise ValueError(""jnp.repeat with a non-constant `repeats` is supported only ""\n f""when `total_repeat_length` is None. ({repeats=} {total_repeat_length=})"")\n\n # If total_repeat_length is not given, use a default.\n if total_repeat_length is None:\n repeats = core.concrete_or_error(None, repeats,\n ""When jit-compiling jnp.repeat, the total number of repeats must be static. ""\n ""To fix this, either specify a static value for `repeats`, or pass a static ""\n ""value to `total_repeat_length`."")\n\n # Fast path for when repeats is a scalar.\n if np.ndim(repeats) == 0 and np.ndim(arr) != 0:\n input_shape = arr.shape\n axis = _canonicalize_axis(axis, len(input_shape))\n aux_axis = axis + 1\n aux_shape: list[DimSize] = list(input_shape)\n aux_shape.insert(aux_axis, operator.index(repeats) if core.is_constant_dim(repeats) else repeats) # type: ignore\n arr = lax.broadcast_in_dim(\n arr, aux_shape, [i for i in range(len(aux_shape)) if i != aux_axis])\n result_shape: list[DimSize] = list(input_shape)\n result_shape[axis] *= repeats\n return arr.reshape(result_shape)\n\n repeats = np.ravel(repeats)\n if arr.ndim != 0:\n repeats = np.broadcast_to(repeats, [arr.shape[axis]])\n total_repeat_length = np.sum(repeats)\n else:\n repeats = ravel(repeats)\n if arr.ndim != 0:\n repeats = broadcast_to(repeats, [arr.shape[axis]])\n\n # Special case when a is a scalar.\n if arr.ndim == 0:\n if np.shape(repeats) == (1,):\n return full([total_repeat_length], arr)\n else:\n raise ValueError('`repeat` with a scalar parameter `a` is only '\n 'implemented for scalar values of the parameter `repeats`.')\n\n # Special case if total_repeat_length is zero.\n if total_repeat_length == 0:\n result_shape = list(arr.shape)\n result_shape[axis] = 0\n return reshape(array([], dtype=arr.dtype), result_shape)\n\n # If repeats is on a zero sized axis, then return the array.\n if arr.shape[axis] == 0:\n return arr\n\n # This implementation of repeat avoid having to instantiate a large.\n # intermediate tensor.\n\n # Modify repeats from e.g. [1,2,0,5] -> [0,1,2,0] for exclusive repeat.\n exclusive_repeats = roll(repeats, shift=1).at[0].set(0)\n # Cumsum to get indices of new number in repeated tensor, e.g. [0, 1, 3, 3]\n scatter_indices = reductions.cumsum(exclusive_repeats)\n # Scatter these onto a zero buffer, e.g. [1,1,0,2,0,0,0,0]\n block_split_indicators = zeros([total_repeat_length], dtype='int32')\n block_split_indicators = block_split_indicators.at[scatter_indices].add(1)\n # Cumsum again to get scatter indices for repeat, e.g. [0,1,1,3,3,3,3,3]\n gather_indices = reductions.cumsum(block_split_indicators) - 1\n return indexing.take(arr, gather_indices, axis=axis)\n\n\n@export\n@partial(jit, static_argnames=('axis',))\ndef trapezoid(y: ArrayLike, x: ArrayLike | None = None, dx: ArrayLike = 1.0,\n axis: int = -1) -> Array:\n r""""""\n Integrate along the given axis using the composite trapezoidal rule.\n\n JAX implementation of :func:`numpy.trapezoid`\n\n The trapezoidal rule approximates the integral under a curve by summing the\n areas of trapezoids formed between adjacent data points.\n\n Args:\n y: array of data to integrate.\n x: optional array of sample points corresponding to the ``y`` values. If not\n provided, ``x`` defaults to equally spaced with spacing given by ``dx``.\n dx: The spacing between sample points when `x` is None (default: 1.0).\n axis: The axis along which to integrate (default: -1)\n\n Returns:\n The definite integral approximated by the trapezoidal rule.\n\n Examples:\n Integrate over a regular grid, with spacing 1.0:\n\n >>> y = jnp.array([1, 2, 3, 2, 3, 2, 1])\n >>> jnp.trapezoid(y, dx=1.0)\n Array(13., dtype=float32)\n\n Integrate over an irregular grid:\n\n >>> x = jnp.array([0, 2, 5, 7, 10, 15, 20])\n >>> jnp.trapezoid(y, x)\n Array(43., dtype=float32)\n\n Approximate :math:`\int_0^{2\pi} \sin^2(x)dx`, which equals :math:`\pi`:\n\n >>> x = jnp.linspace(0, 2 * jnp.pi, 1000)\n >>> y = jnp.sin(x) ** 2\n >>> result = jnp.trapezoid(y, x)\n >>> jnp.allclose(result, jnp.pi)\n Array(True, dtype=bool)\n """"""\n # TODO(phawkins): remove this annotation after fixing jnp types.\n dx_array: Array\n if x is None:\n y = util.ensure_arraylike('trapezoid', y)\n y_arr, = util.promote_dtypes_inexact(y)\n dx_array = asarray(dx)\n else:\n y, x = util.ensure_arraylike('trapezoid', y, x)\n y_arr, x_arr = util.promote_dtypes_inexact(y, x)\n if x_arr.ndim == 1:\n dx_array = diff(x_arr)\n else:\n dx_array = moveaxis(diff(x_arr, axis=axis), axis, -1)\n y_arr = moveaxis(y_arr, axis, -1)\n return 0.5 * (dx_array * (y_arr[..., 1:] + y_arr[..., :-1])).sum(-1)\n\n\n@export\ndef tri(N: int, M: int | None = None, k: int = 0, dtype: DTypeLike | None = None) -> Array:\n r""""""Return an array with ones on and below the diagonal and zeros elsewhere.\n\n JAX implementation of :func:`numpy.tri`\n\n Args:\n N: int. Dimension of the rows of the returned array.\n M: optional, int. Dimension of the columns of the returned array. If not\n specified, then ``M = N``.\n k: optional, int, default=0. Specifies the sub-diagonal on and below which\n the array is filled with ones. ``k=0`` refers to main diagonal, ``k<0``\n refers to sub-diagonal below the main diagonal and ``k>0`` refers to\n sub-diagonal above the main diagonal.\n dtype: optional, data type of the returned array. The default type is float.\n\n Returns:\n An array of shape ``(N, M)`` containing the lower triangle with elements\n below the sub-diagonal specified by ``k`` are set to one and zero elsewhere.\n\n See also:\n - :func:`jax.numpy.tril`: Returns a lower triangle of an array.\n - :func:`jax.numpy.triu`: Returns an upper triangle of an array.\n\n Examples:\n >>> jnp.tri(3)\n Array([[1., 0., 0.],\n [1., 1., 0.],\n [1., 1., 1.]], dtype=float32)\n\n When ``M`` is not equal to ``N``:\n\n >>> jnp.tri(3, 4)\n Array([[1., 0., 0., 0.],\n [1., 1., 0., 0.],\n [1., 1., 1., 0.]], dtype=float32)\n\n when ``k>0``:\n\n >>> jnp.tri(3, k=1)\n Array([[1., 1., 0.],\n [1., 1., 1.],\n [1., 1., 1.]], dtype=float32)\n\n When ``k<0``:\n\n >>> jnp.tri(3, 4, k=-1)\n Array([[0., 0., 0., 0.],\n [1., 0., 0., 0.],\n [1., 1., 0., 0.]], dtype=float32)\n """"""\n dtypes.check_user_dtype_supported(dtype, ""tri"")\n M = M if M is not None else N\n dtype = dtype or np.dtype('float32')\n return lax_internal._tri(dtype, (N, M), k)\n\n\n@export\n@partial(jit, static_argnames=('k',))\ndef tril(m: ArrayLike, k: int = 0) -> Array:\n r""""""Return lower triangle of an array.\n\n JAX implementation of :func:`numpy.tril`\n\n Args:\n m: input array. Must have ``m.ndim >= 2``.\n k: k: optional, int, default=0. Specifies the sub-diagonal above which the\n elements of the array are set to zero. ``k=0`` refers to main diagonal,\n ``k<0`` refers to sub-diagonal below the main diagonal and ``k>0`` refers\n to sub-diagonal above the main diagonal.\n\n Returns:\n An array with same shape as input containing the lower triangle of the given\n array with elements above the sub-diagonal specified by ``k`` are set to\n zero.\n\n See also:\n - :func:`jax.numpy.triu`: Returns an upper triangle of an array.\n - :func:`jax.numpy.tri`: Returns an array with ones on and below the\n diagonal and zeros elsewhere.\n\n Examples:\n >>> x = jnp.array([[1, 2, 3, 4],\n ... [5, 6, 7, 8],\n ... [9, 10, 11, 12]])\n >>> jnp.tril(x)\n Array([[ 1, 0, 0, 0],\n [ 5, 6, 0, 0],\n [ 9, 10, 11, 0]], dtype=int32)\n >>> jnp.tril(x, k=1)\n Array([[ 1, 2, 0, 0],\n [ 5, 6, 7, 0],\n [ 9, 10, 11, 12]], dtype=int32)\n >>> jnp.tril(x, k=-1)\n Array([[ 0, 0, 0, 0],\n [ 5, 0, 0, 0],\n [ 9, 10, 0, 0]], dtype=int32)\n\n When ``m.ndim > 2``, ``jnp.tril`` operates batch-wise on the trailing axes.\n\n >>> x1 = jnp.array([[[1, 2],\n ... [3, 4]],\n ... [[5, 6],\n ... [7, 8]]])\n >>> jnp.tril(x1)\n Array([[[1, 0],\n [3, 4]],\n \n [[5, 0],\n [7, 8]]], dtype=int32)\n """"""\n m = util.ensure_arraylike(""tril"", m)\n m_shape = np.shape(m)\n if len(m_shape) < 2:\n raise ValueError(""Argument to jax.numpy.tril must be at least 2D"")\n N, M = m_shape[-2:]\n mask = tri(N, M, k=k, dtype=bool)\n return lax.select(lax.broadcast(mask, m_shape[:-2]), m, zeros_like(m))\n\n\n@export\n@partial(jit, static_argnames=('k',))\ndef triu(m: ArrayLike, k: int = 0) -> Array:\n r""""""Return upper triangle of an array.\n\n JAX implementation of :func:`numpy.triu`\n\n Args:\n m: input array. Must have ``m.ndim >= 2``.\n k: optional, int, default=0. Specifies the sub-diagonal below which the\n elements of the array are set to zero. ``k=0`` refers to main diagonal,\n ``k<0`` refers to sub-diagonal below the main diagonal and ``k>0`` refers\n to sub-diagonal above the main diagonal.\n\n Returns:\n An array with same shape as input containing the upper triangle of the given\n array with elements below the sub-diagonal specified by ``k`` are set to\n zero.\n\n See also:\n - :func:`jax.numpy.tril`: Returns a lower triangle of an array.\n - :func:`jax.numpy.tri`: Returns an array with ones on and below the\n diagonal and zeros elsewhere.\n\n Examples:\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6],\n ... [7, 8, 9],\n ... [10, 11, 12]])\n >>> jnp.triu(x)\n Array([[1, 2, 3],\n [0, 5, 6],\n [0, 0, 9],\n [0, 0, 0]], dtype=int32)\n >>> jnp.triu(x, k=1)\n Array([[0, 2, 3],\n [0, 0, 6],\n [0, 0, 0],\n [0, 0, 0]], dtype=int32)\n >>> jnp.triu(x, k=-1)\n Array([[ 1, 2, 3],\n [ 4, 5, 6],\n [ 0, 8, 9],\n [ 0, 0, 12]], dtype=int32)\n\n When ``m.ndim > 2``, ``jnp.triu`` operates batch-wise on the trailing axes.\n\n >>> x1 = jnp.array([[[1, 2],\n ... [3, 4]],\n ... [[5, 6],\n ... [7, 8]]])\n >>> jnp.triu(x1)\n Array([[[1, 2],\n [0, 4]],\n \n [[5, 6],\n [0, 8]]], dtype=int32)\n """"""\n m = util.ensure_arraylike(""triu"", m)\n m_shape = np.shape(m)\n if len(m_shape) < 2:\n raise ValueError(""Argument to jax.numpy.triu must be at least 2D"")\n N, M = m_shape[-2:]\n mask = tri(N, M, k=k - 1, dtype=bool)\n return lax.select(lax.broadcast(mask, m_shape[:-2]), zeros_like(m), m)\n\n\n@export\n@partial(jit, static_argnames=('axis1', 'axis2', 'dtype'))\ndef trace(a: ArrayLike, offset: int | ArrayLike = 0, axis1: int = 0, axis2: int = 1,\n dtype: DTypeLike | None = None, out: None = None) -> Array:\n """"""Calculate sum of the diagonal of input along the given axes.\n\n JAX implementation of :func:`numpy.trace`.\n\n Args:\n a: input array. Must have ``a.ndim >= 2``.\n offset: optional, int, default=0. Diagonal offset from the main diagonal.\n Can be positive or negative.\n axis1: optional, default=0. The first axis along which to take the sum of\n diagonal. Must be a static integer value.\n axis2: optional, default=1. The second axis along which to take the sum of\n diagonal. Must be a static integer value.\n dtype: optional. The dtype of the output array. Should be provided as static\n argument in JIT compilation.\n out: Not used by JAX.\n\n Returns:\n An array of dimension x.ndim-2 containing the sum of the diagonal elements\n along axes (axis1, axis2)\n\n See also:\n - :func:`jax.numpy.diag`: Returns the specified diagonal or constructs a diagonal\n array\n - :func:`jax.numpy.diagonal`: Returns the specified diagonal of an array.\n - :func:`jax.numpy.diagflat`: Returns a 2-D array with the flattened input array\n laid out on the diagonal.\n\n Examples:\n >>> x = jnp.arange(1, 9).reshape(2, 2, 2)\n >>> x\n Array([[[1, 2],\n [3, 4]],\n \n [[5, 6],\n [7, 8]]], dtype=int32)\n >>> jnp.trace(x)\n Array([ 8, 10], dtype=int32)\n >>> jnp.trace(x, offset=1)\n Array([3, 4], dtype=int32)\n >>> jnp.trace(x, axis1=1, axis2=2)\n Array([ 5, 13], dtype=int32)\n >>> jnp.trace(x, offset=1, axis1=1, axis2=2)\n Array([2, 6], dtype=int32)\n """"""\n a = util.ensure_arraylike(""trace"", a)\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.trace is not supported."")\n\n if _canonicalize_axis(axis1, np.ndim(a)) == _canonicalize_axis(axis2, np.ndim(a)):\n raise ValueError(f""axis1 and axis2 can not be same. axis1={axis1} and axis2={axis2}"")\n\n dtypes.check_user_dtype_supported(dtype, ""trace"")\n\n a_shape = np.shape(a)\n a = moveaxis(a, (axis1, axis2), (-2, -1))\n\n # Mask out the diagonal and reduce.\n a = where(eye(a_shape[axis1], a_shape[axis2], k=offset, dtype=bool),\n a, zeros_like(a))\n return reductions.sum(a, axis=(-2, -1), dtype=dtype)\n\n\n@export\ndef mask_indices(n: int,\n mask_func: Callable[[ArrayLike, int], Array],\n k: int = 0, *, size: int | None = None) -> tuple[Array, Array]:\n """"""Return indices of a mask of an (n, n) array.\n\n Args:\n n: static integer array dimension.\n mask_func: a function that takes a shape ``(n, n)`` array and\n an optional offset ``k``, and returns a shape ``(n, n)`` mask.\n Examples of functions with this signature are\n :func:`~jax.numpy.triu` and :func:`~jax.numpy.tril`.\n k: a scalar value passed to ``mask_func``.\n size: optional argument specifying the static size of the output arrays.\n This is passed to :func:`~jax.numpy.nonzero` when generating the indices\n from the mask.\n\n Returns:\n a tuple of indices where ``mask_func`` is nonzero.\n\n See also:\n - :func:`jax.numpy.triu_indices`: compute ``mask_indices`` for :func:`~jax.numpy.triu`.\n - :func:`jax.numpy.tril_indices`: compute ``mask_indices`` for :func:`~jax.numpy.tril`.\n\n Examples:\n Calling ``mask_indices`` on built-in masking functions:\n\n >>> jnp.mask_indices(3, jnp.triu)\n (Array([0, 0, 0, 1, 1, 2], dtype=int32), Array([0, 1, 2, 1, 2, 2], dtype=int32))\n\n >>> jnp.mask_indices(3, jnp.tril)\n (Array([0, 1, 1, 2, 2, 2], dtype=int32), Array([0, 0, 1, 0, 1, 2], dtype=int32))\n\n Calling ``mask_indices`` on a custom masking function:\n\n >>> def mask_func(x, k=0):\n ... i = jnp.arange(x.shape[0])[:, None]\n ... j = jnp.arange(x.shape[1])\n ... return (i + 1) % (j + 1 + k) == 0\n >>> mask_func(jnp.ones((3, 3)))\n Array([[ True, False, False],\n [ True, True, False],\n [ True, False, True]], dtype=bool)\n >>> jnp.mask_indices(3, mask_func)\n (Array([0, 1, 1, 2, 2], dtype=int32), Array([0, 0, 1, 0, 2], dtype=int32))\n """"""\n i, j = nonzero(mask_func(ones((n, n)), k), size=size)\n return (i, j)\n\n\ndef _triu_size(n, m, k):\n if k < 0:\n return n * m - _triu_size(m, n, (1 - k))\n elif k >= m:\n return 0\n else:\n mk = min(n, m - k)\n return mk * (mk + 1) // 2 + mk * (m - k - mk)\n\n\n@export\ndef triu_indices(n: int, k: int = 0, m: int | None = None) -> tuple[Array, Array]:\n """"""Return the indices of upper triangle of an array of size ``(n, m)``.\n\n JAX implementation of :func:`numpy.triu_indices`.\n\n Args:\n n: int. Number of rows of the array for which the indices are returned.\n k: optional, int, default=0. Specifies the sub-diagonal on and above which\n the indices of upper triangle are returned. ``k=0`` refers to main diagonal,\n ``k<0`` refers to sub-diagonal below the main diagonal and ``k>0`` refers\n to sub-diagonal above the main diagonal.\n m: optional, int. Number of columns of the array for which the indices are\n returned. If not specified, then ``m = n``.\n\n Returns:\n A tuple of two arrays containing the indices of the upper triangle, one along\n each axis.\n\n See also:\n - :func:`jax.numpy.tril_indices`: Returns the indices of lower triangle of an\n array of size ``(n, m)``.\n - :func:`jax.numpy.triu_indices_from`: Returns the indices of upper triangle\n of a given array.\n - :func:`jax.numpy.tril_indices_from`: Returns the indices of lower triangle\n of a given array.\n\n Examples:\n If only ``n`` is provided in input, the indices of upper triangle of an array\n of size ``(n, n)`` array are returned.\n\n >>> jnp.triu_indices(3)\n (Array([0, 0, 0, 1, 1, 2], dtype=int32), Array([0, 1, 2, 1, 2, 2], dtype=int32))\n\n If both ``n`` and ``m`` are provided in input, the indices of upper triangle\n of an ``(n, m)`` array are returned.\n\n >>> jnp.triu_indices(3, m=2)\n (Array([0, 0, 1], dtype=int32), Array([0, 1, 1], dtype=int32))\n\n If ``k = 1``, the indices on and above the first sub-diagonal above the main\n diagonal are returned.\n\n >>> jnp.triu_indices(3, k=1)\n (Array([0, 0, 1], dtype=int32), Array([1, 2, 2], dtype=int32))\n\n If ``k = -1``, the indices on and above the first sub-diagonal below the main\n diagonal are returned.\n\n >>> jnp.triu_indices(3, k=-1)\n (Array([0, 0, 0, 1, 1, 1, 2, 2], dtype=int32), Array([0, 1, 2, 0, 1, 2, 1, 2], dtype=int32))\n """"""\n n = core.concrete_or_error(operator.index, n, ""n argument of jnp.triu_indices"")\n k = core.concrete_or_error(operator.index, k, ""k argument of jnp.triu_indices"")\n m = n if m is None else core.concrete_or_error(operator.index, m, ""m argument of jnp.triu_indices"")\n i, j = nonzero(triu(ones((n, m)), k=k), size=_triu_size(n, m, k))\n return i, j\n\n\n@export\ndef tril_indices(n: int, k: int = 0, m: int | None = None) -> tuple[Array, Array]:\n """"""Return the indices of lower triangle of an array of size ``(n, m)``.\n\n JAX implementation of :func:`numpy.tril_indices`.\n\n Args:\n n: int. Number of rows of the array for which the indices are returned.\n k: optional, int, default=0. Specifies the sub-diagonal on and below which\n the indices of lower triangle are returned. ``k=0`` refers to main diagonal,\n ``k<0`` refers to sub-diagonal below the main diagonal and ``k>0`` refers\n to sub-diagonal above the main diagonal.\n m: optional, int. Number of columns of the array for which the indices are\n returned. If not specified, then ``m = n``.\n\n Returns:\n A tuple of two arrays containing the indices of the lower triangle, one along\n each axis.\n\n See also:\n - :func:`jax.numpy.triu_indices`: Returns the indices of upper triangle of an\n array of size ``(n, m)``.\n - :func:`jax.numpy.triu_indices_from`: Returns the indices of upper triangle\n of a given array.\n - :func:`jax.numpy.tril_indices_from`: Returns the indices of lower triangle\n of a given array.\n\n Examples:\n If only ``n`` is provided in input, the indices of lower triangle of an array\n of size ``(n, n)`` array are returned.\n\n >>> jnp.tril_indices(3)\n (Array([0, 1, 1, 2, 2, 2], dtype=int32), Array([0, 0, 1, 0, 1, 2], dtype=int32))\n\n If both ``n`` and ``m`` are provided in input, the indices of lower triangle\n of an ``(n, m)`` array are returned.\n\n >>> jnp.tril_indices(3, m=2)\n (Array([0, 1, 1, 2, 2], dtype=int32), Array([0, 0, 1, 0, 1], dtype=int32))\n\n If ``k = 1``, the indices on and below the first sub-diagonal above the main\n diagonal are returned.\n\n >>> jnp.tril_indices(3, k=1)\n (Array([0, 0, 1, 1, 1, 2, 2, 2], dtype=int32), Array([0, 1, 0, 1, 2, 0, 1, 2], dtype=int32))\n\n If ``k = -1``, the indices on and below the first sub-diagonal below the main\n diagonal are returned.\n\n >>> jnp.tril_indices(3, k=-1)\n (Array([1, 2, 2], dtype=int32), Array([0, 0, 1], dtype=int32))\n """"""\n n = core.concrete_or_error(operator.index, n, ""n argument of jnp.triu_indices"")\n k = core.concrete_or_error(operator.index, k, ""k argument of jnp.triu_indices"")\n m = n if m is None else core.concrete_or_error(operator.index, m, ""m argument of jnp.triu_indices"")\n i, j = nonzero(tril(ones((n, m)), k=k), size=_triu_size(m, n, -k))\n return i, j\n\n\n@export\ndef triu_indices_from(arr: ArrayLike | SupportsShape, k: int = 0) -> tuple[Array, Array]:\n """"""Return the indices of upper triangle of a given array.\n\n JAX implementation of :func:`numpy.triu_indices_from`.\n\n Args:\n arr: input array. Must have ``arr.ndim == 2``.\n k: optional, int, default=0. Specifies the sub-diagonal on and above which\n the indices of upper triangle are returned. ``k=0`` refers to main diagonal,\n ``k<0`` refers to sub-diagonal below the main diagonal and ``k>0`` refers\n to sub-diagonal above the main diagonal.\n\n Returns:\n A tuple of two arrays containing the indices of the upper triangle, one along\n each axis.\n\n See also:\n - :func:`jax.numpy.tril_indices_from`: Returns the indices of lower triangle\n of a given array.\n - :func:`jax.numpy.triu_indices`: Returns the indices of upper triangle of an\n array of size ``(n, m)``.\n - :func:`jax.numpy.triu`: Return an upper triangle of an array.\n\n Examples:\n >>> arr = jnp.array([[1, 2, 3],\n ... [4, 5, 6],\n ... [7, 8, 9]])\n >>> jnp.triu_indices_from(arr)\n (Array([0, 0, 0, 1, 1, 2], dtype=int32), Array([0, 1, 2, 1, 2, 2], dtype=int32))\n\n Elements indexed by ``jnp.triu_indices_from`` correspond to those in the\n output of ``jnp.triu``.\n\n >>> ind = jnp.triu_indices_from(arr)\n >>> arr[ind]\n Array([1, 2, 3, 5, 6, 9], dtype=int32)\n >>> jnp.triu(arr)\n Array([[1, 2, 3],\n [0, 5, 6],\n [0, 0, 9]], dtype=int32)\n\n When ``k > 0``:\n\n >>> jnp.triu_indices_from(arr, k=1)\n (Array([0, 0, 1], dtype=int32), Array([1, 2, 2], dtype=int32))\n\n When ``k < 0``:\n\n >>> jnp.triu_indices_from(arr, k=-1)\n (Array([0, 0, 0, 1, 1, 1, 2, 2], dtype=int32), Array([0, 1, 2, 0, 1, 2, 1, 2], dtype=int32))\n """"""\n if hasattr(arr, ""shape""):\n arr_shape = arr.shape\n else:\n arr = util.ensure_arraylike(""triu_indices_from"", arr)\n arr_shape = arr.shape\n if len(arr_shape) != 2:\n raise ValueError(""Only 2-D inputs are accepted"")\n return triu_indices(arr_shape[0], k=k, m=arr_shape[1])\n\n\n@export\ndef tril_indices_from(arr: ArrayLike | SupportsShape, k: int = 0) -> tuple[Array, Array]:\n """"""Return the indices of lower triangle of a given array.\n\n JAX implementation of :func:`numpy.tril_indices_from`.\n\n Args:\n arr: input array. Must have ``arr.ndim == 2``.\n k: optional, int, default=0. Specifies the sub-diagonal on and below which\n the indices of upper triangle are returned. ``k=0`` refers to main diagonal,\n ``k<0`` refers to sub-diagonal below the main diagonal and ``k>0`` refers\n to sub-diagonal above the main diagonal.\n\n Returns:\n A tuple of two arrays containing the indices of the lower triangle, one along\n each axis.\n\n See also:\n - :func:`jax.numpy.triu_indices_from`: Returns the indices of upper triangle\n of a given array.\n - :func:`jax.numpy.tril_indices`: Returns the indices of lower triangle of an\n array of size ``(n, m)``.\n - :func:`jax.numpy.tril`: Returns a lower triangle of an array\n\n Examples:\n >>> arr = jnp.array([[1, 2, 3],\n ... [4, 5, 6],\n ... [7, 8, 9]])\n >>> jnp.tril_indices_from(arr)\n (Array([0, 1, 1, 2, 2, 2], dtype=int32), Array([0, 0, 1, 0, 1, 2], dtype=int32))\n\n Elements indexed by ``jnp.tril_indices_from`` correspond to those in the\n output of ``jnp.tril``.\n\n >>> ind = jnp.tril_indices_from(arr)\n >>> arr[ind]\n Array([1, 4, 5, 7, 8, 9], dtype=int32)\n >>> jnp.tril(arr)\n Array([[1, 0, 0],\n [4, 5, 0],\n [7, 8, 9]], dtype=int32)\n\n When ``k > 0``:\n\n >>> jnp.tril_indices_from(arr, k=1)\n (Array([0, 0, 1, 1, 1, 2, 2, 2], dtype=int32), Array([0, 1, 0, 1, 2, 0, 1, 2], dtype=int32))\n\n When ``k < 0``:\n\n >>> jnp.tril_indices_from(arr, k=-1)\n (Array([1, 2, 2], dtype=int32), Array([0, 0, 1], dtype=int32))\n """"""\n if hasattr(arr, ""shape""):\n arr_shape = arr.shape\n else:\n arr = util.ensure_arraylike(""tril_indices_from"", arr)\n arr_shape = arr.shape\n if len(arr_shape) != 2:\n raise ValueError(""Only 2-D inputs are accepted"")\n return tril_indices(arr_shape[0], k=k, m=arr_shape[1])\n\n\n@export\ndef fill_diagonal(a: ArrayLike, val: ArrayLike, wrap: bool = False, *,\n inplace: bool = True) -> Array:\n """"""Return a copy of the array with the diagonal overwritten.\n\n JAX implementation of :func:`numpy.fill_diagonal`.\n\n The semantics of :func:`numpy.fill_diagonal` are to modify arrays in-place, which\n is not possible for JAX's immutable arrays. The JAX version returns a modified\n copy of the input, and adds the ``inplace`` parameter which must be set to\n `False`` by the user as a reminder of this API difference.\n\n Args:\n a: input array. Must have ``a.ndim >= 2``. If ``a.ndim >= 3``, then all\n dimensions must be the same size.\n val: scalar or array with which to fill the diagonal. If an array, it will\n be flattened and repeated to fill the diagonal entries.\n inplace: must be set to False to indicate that the input is not modified\n in-place, but rather a modified copy is returned.\n\n Returns:\n A copy of ``a`` with the diagonal set to ``val``.\n\n Examples:\n >>> x = jnp.zeros((3, 3), dtype=int)\n >>> jnp.fill_diagonal(x, jnp.array([1, 2, 3]), inplace=False)\n Array([[1, 0, 0],\n [0, 2, 0],\n [0, 0, 3]], dtype=int32)\n\n Unlike :func:`numpy.fill_diagonal`, the input ``x`` is not modified.\n\n If the diagonal value has too many entries, it will be truncated\n\n >>> jnp.fill_diagonal(x, jnp.arange(100, 200), inplace=False)\n Array([[100, 0, 0],\n [ 0, 101, 0],\n [ 0, 0, 102]], dtype=int32)\n\n If the diagonal has too few entries, it will be repeated:\n\n >>> x = jnp.zeros((4, 4), dtype=int)\n >>> jnp.fill_diagonal(x, jnp.array([3, 4]), inplace=False)\n Array([[3, 0, 0, 0],\n [0, 4, 0, 0],\n [0, 0, 3, 0],\n [0, 0, 0, 4]], dtype=int32)\n\n For non-square arrays, the diagonal of the leading square slice is filled:\n\n >>> x = jnp.zeros((3, 5), dtype=int)\n >>> jnp.fill_diagonal(x, 1, inplace=False)\n Array([[1, 0, 0, 0, 0],\n [0, 1, 0, 0, 0],\n [0, 0, 1, 0, 0]], dtype=int32)\n\n And for square N-dimensional arrays, the N-dimensional diagonal is filled:\n\n >>> y = jnp.zeros((2, 2, 2))\n >>> jnp.fill_diagonal(y, 1, inplace=False)\n Array([[[1., 0.],\n [0., 0.]],\n \n [[0., 0.],\n [0., 1.]]], dtype=float32)\n """"""\n if inplace:\n raise NotImplementedError(""JAX arrays are immutable, must use inplace=False"")\n if wrap:\n raise NotImplementedError(""wrap=True is not implemented, must use wrap=False"")\n a, val = util.ensure_arraylike(""fill_diagonal"", a, val)\n if a.ndim < 2:\n raise ValueError(""array must be at least 2-d"")\n if a.ndim > 2 and not all(n == a.shape[0] for n in a.shape[1:]):\n raise ValueError(""All dimensions of input must be of equal length"")\n n = min(a.shape)\n idx = diag_indices(n, a.ndim)\n return a.at[idx].set(val if val.ndim == 0 else _tile_to_size(val.ravel(), n))\n\n\n@export\ndef diag_indices(n: int, ndim: int = 2) -> tuple[Array, ...]:\n """"""Return indices for accessing the main diagonal of a multidimensional array.\n\n JAX implementation of :func:`numpy.diag_indices`.\n\n Args:\n n: int. The size of each dimension of the square array.\n ndim: optional, int, default=2. The number of dimensions of the array.\n\n Returns:\n A tuple of arrays, each of length `n`, containing the indices to access\n the main diagonal.\n\n See also:\n - :func:`jax.numpy.diag_indices_from`\n - :func:`jax.numpy.diagonal`\n\n Examples:\n >>> jnp.diag_indices(3)\n (Array([0, 1, 2], dtype=int32), Array([0, 1, 2], dtype=int32))\n >>> jnp.diag_indices(4, ndim=3)\n (Array([0, 1, 2, 3], dtype=int32),\n Array([0, 1, 2, 3], dtype=int32),\n Array([0, 1, 2, 3], dtype=int32))\n """"""\n n = core.concrete_or_error(operator.index, n, ""'n' argument of jnp.diag_indices()"")\n ndim = core.concrete_or_error(operator.index, ndim, ""'ndim' argument of jnp.diag_indices()"")\n if n < 0:\n raise ValueError(""n argument to diag_indices must be nonnegative, got {}""\n .format(n))\n if ndim < 0:\n raise ValueError(""ndim argument to diag_indices must be nonnegative, got {}""\n .format(ndim))\n return (lax.iota(dtypes.int_, n),) * ndim\n\n\n@export\ndef diag_indices_from(arr: ArrayLike) -> tuple[Array, ...]:\n """"""Return indices for accessing the main diagonal of a given array.\n\n JAX implementation of :func:`numpy.diag_indices_from`.\n\n Args:\n arr: Input array. Must be at least 2-dimensional and have equal length along\n all dimensions.\n\n Returns:\n A tuple of arrays containing the indices to access the main diagonal of\n the input array.\n\n See also:\n - :func:`jax.numpy.diag_indices`\n - :func:`jax.numpy.diagonal`\n\n Examples:\n >>> arr = jnp.array([[1, 2, 3],\n ... [4, 5, 6],\n ... [7, 8, 9]])\n >>> jnp.diag_indices_from(arr)\n (Array([0, 1, 2], dtype=int32), Array([0, 1, 2], dtype=int32))\n >>> arr = jnp.array([[[1, 2], [3, 4]],\n ... [[5, 6], [7, 8]]])\n >>> jnp.diag_indices_from(arr)\n (Array([0, 1], dtype=int32),\n Array([0, 1], dtype=int32),\n Array([0, 1], dtype=int32))\n """"""\n arr = util.ensure_arraylike(""diag_indices_from"", arr)\n nd = np.ndim(arr)\n if not np.ndim(arr) >= 2:\n raise ValueError(""input array must be at least 2-d"")\n\n s = np.shape(arr)\n if len(set(np.shape(arr))) != 1:\n raise ValueError(""All dimensions of input must be of equal length"")\n\n return diag_indices(s[0], ndim=nd)\n\n\n@export\n@partial(jit, static_argnames=('offset', 'axis1', 'axis2'))\ndef diagonal(a: ArrayLike, offset: int = 0, axis1: int = 0,\n axis2: int = 1) -> Array:\n """"""Returns the specified diagonal of an array.\n\n JAX implementation of :func:`numpy.diagonal`.\n\n The JAX version always returns a copy of the input, although if this is used\n within a JIT compilation, the compiler may avoid the copy.\n\n Args:\n a: Input array. Must be at least 2-dimensional.\n offset: optional, default=0. Diagonal offset from the main diagonal.\n Must be a static integer value. Can be positive or negative.\n axis1: optional, default=0. The first axis along which to take the diagonal.\n axis2: optional, default=1. The second axis along which to take the diagonal.\n\n Returns:\n A 1D array for 2D input, and in general a N-1 dimensional array\n for N-dimensional input.\n\n See also:\n - :func:`jax.numpy.diag`\n - :func:`jax.numpy.diagflat`\n\n Examples:\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6],\n ... [7, 8, 9]])\n >>> jnp.diagonal(x)\n Array([1, 5, 9], dtype=int32)\n >>> jnp.diagonal(x, offset=1)\n Array([2, 6], dtype=int32)\n >>> jnp.diagonal(x, offset=-1)\n Array([4, 8], dtype=int32)\n """"""\n a = util.ensure_arraylike(""diagonal"", a)\n\n if np.ndim(a) < 2:\n raise ValueError(""diagonal requires an array of at least two dimensions."")\n offset = core.concrete_or_error(operator.index, offset, ""'offset' argument of jnp.diagonal()"")\n\n def _default_diag(a):\n a_shape = np.shape(a)\n\n a = moveaxis(a, (axis1, axis2), (-2, -1))\n\n diag_size = max(\n 0, min(a_shape[axis1] + min(offset, 0), a_shape[axis2] - max(offset, 0))\n )\n i = arange(diag_size)\n j = arange(abs(offset), abs(offset) + diag_size)\n return a[..., i, j] if offset >= 0 else a[..., j, i]\n\n\n # The mosaic lowering rule for diag is only defined for square arrays.\n # TODO(mvoz): Add support for offsets.\n if np.shape(a)[0] != np.shape(a)[1] or np.ndim(a) != 2 or offset != 0 or _dtype(a) == bool:\n return _default_diag(a)\n else:\n a_shape_eye = eye(np.shape(a)[0], dtype=_dtype(a))\n\n def _mosaic_diag(a):\n def _sum(x, axis):\n return lax.reduce(\n x,\n np.array(0, _dtype(x)),\n lax.add if _dtype(x) != bool else lax.bitwise_or,\n (axis,),\n )\n return _sum(lax.mul(a_shape_eye, a), axis=0)\n return lax.platform_dependent(a, default=_default_diag, mosaic=_mosaic_diag)\n\n\n@export\ndef diag(v: ArrayLike, k: int = 0) -> Array:\n """"""Returns the specified diagonal or constructs a diagonal array.\n\n JAX implementation of :func:`numpy.diag`.\n\n The JAX version always returns a copy of the input, although if this is used\n within a JIT compilation, the compiler may avoid the copy.\n\n Args:\n v: Input array. Can be a 1-D array to create a diagonal matrix or a\n 2-D array to extract a diagonal.\n k: optional, default=0. Diagonal offset. Positive values place the diagonal\n above the main diagonal, negative values place it below the main diagonal.\n\n Returns:\n If `v` is a 2-D array, a 1-D array containing the diagonal elements.\n If `v` is a 1-D array, a 2-D array with the input elements placed along the\n specified diagonal.\n\n See also:\n - :func:`jax.numpy.diagflat`\n - :func:`jax.numpy.diagonal`\n\n Examples:\n Creating a diagonal matrix from a 1-D array:\n\n >>> jnp.diag(jnp.array([1, 2, 3]))\n Array([[1, 0, 0],\n [0, 2, 0],\n [0, 0, 3]], dtype=int32)\n\n Specifying a diagonal offset:\n\n >>> jnp.diag(jnp.array([1, 2, 3]), k=1)\n Array([[0, 1, 0, 0],\n [0, 0, 2, 0],\n [0, 0, 0, 3],\n [0, 0, 0, 0]], dtype=int32)\n\n Extracting a diagonal from a 2-D array:\n\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6],\n ... [7, 8, 9]])\n >>> jnp.diag(x)\n Array([1, 5, 9], dtype=int32)\n """"""\n v = util.ensure_arraylike(""diag"", v)\n return _diag(v, operator.index(k))\n\n@partial(jit, static_argnames=('k',))\ndef _diag(v: Array, k: int):\n v_shape = np.shape(v)\n if len(v_shape) == 1:\n zero = lambda x: lax.full_like(x, shape=(), fill_value=0)\n n = v_shape[0] + abs(k)\n v = lax.pad(v, zero(v), ((max(0, k), max(0, -k), 0),))\n return where(eye(n, k=k, dtype=bool), v, zeros_like(v))\n elif len(v_shape) == 2:\n return diagonal(v, offset=k)\n else:\n raise ValueError(""diag input must be 1d or 2d"")\n\n\n@export\ndef diagflat(v: ArrayLike, k: int = 0) -> Array:\n """"""Return a 2-D array with the flattened input array laid out on the diagonal.\n\n JAX implementation of :func:`numpy.diagflat`.\n\n This differs from `np.diagflat` for some scalar values of `v`. JAX always returns\n a two-dimensional array, whereas NumPy may return a scalar depending on the type\n of `v`.\n\n Args:\n v: Input array. Can be N-dimensional but is flattened to 1D.\n k: optional, default=0. Diagonal offset. Positive values place the diagonal\n above the main diagonal, negative values place it below the main diagonal.\n\n Returns:\n A 2D array with the input elements placed along the diagonal with the\n specified offset (k). The remaining entries are filled with zeros.\n\n See also:\n - :func:`jax.numpy.diag`\n - :func:`jax.numpy.diagonal`\n\n Examples:\n >>> jnp.diagflat(jnp.array([1, 2, 3]))\n Array([[1, 0, 0],\n [0, 2, 0],\n [0, 0, 3]], dtype=int32)\n >>> jnp.diagflat(jnp.array([1, 2, 3]), k=1)\n Array([[0, 1, 0, 0],\n [0, 0, 2, 0],\n [0, 0, 0, 3],\n [0, 0, 0, 0]], dtype=int32)\n >>> a = jnp.array([[1, 2],\n ... [3, 4]])\n >>> jnp.diagflat(a)\n Array([[1, 0, 0, 0],\n [0, 2, 0, 0],\n [0, 0, 3, 0],\n [0, 0, 0, 4]], dtype=int32)\n """"""\n util.check_arraylike(""diagflat"", v)\n v_ravel = ravel(v)\n v_length = len(v_ravel)\n adj_length = v_length + abs(k)\n res = zeros(adj_length*adj_length, dtype=v_ravel.dtype)\n i = arange(0, adj_length-abs(k))\n if (k >= 0):\n fi = i+k+i*adj_length\n else:\n fi = i+(i-k)*adj_length\n res = res.at[fi].set(v_ravel)\n res = res.reshape(adj_length, adj_length)\n return res\n\n\n# TODO(jakevdp): add support for N-dimensional inputs as in NumPy v2.2\n@export\ndef trim_zeros(filt: ArrayLike, trim: str ='fb') -> Array:\n """"""Trim leading and/or trailing zeros of the input array.\n\n JAX implementation of :func:`numpy.trim_zeros`.\n\n Args:\n filt: input array. Must have ``filt.ndim == 1``.\n trim: string, optional, default = ``fb``. Specifies from which end the input\n is trimmed.\n\n - ``f`` - trims only the leading zeros.\n - ``b`` - trims only the trailing zeros.\n - ``fb`` - trims both leading and trailing zeros.\n\n Returns:\n An array containing the trimmed input with same dtype as ``filt``.\n\n Examples:\n >>> x = jnp.array([0, 0, 2, 0, 1, 4, 3, 0, 0, 0])\n >>> jnp.trim_zeros(x)\n Array([2, 0, 1, 4, 3], dtype=int32)\n """"""\n # Non-array inputs are deprecated 2024-09-11\n util.check_arraylike(""trim_zeros"", filt, emit_warning=True)\n core.concrete_or_error(None, filt,\n ""Error arose in the `filt` argument of trim_zeros()"")\n filt_arr = asarray(filt)\n del filt\n if filt_arr.ndim != 1:\n # Added on 2024-09-11\n if deprecations.is_accelerated(""jax-numpy-trimzeros-not-1d-array""):\n raise TypeError(f""'filt' must be 1-D array, but received {filt_arr.ndim}-D array."")\n warnings.warn(\n ""Passing arrays with ndim != 1 to jnp.trim_zeros() is deprecated. Currently, it ""\n ""works with Arrays having ndim != 1. In the future this will result in an error."",\n DeprecationWarning, stacklevel=2)\n nz = (filt_arr == 0)\n if reductions.all(nz):\n return empty(0, filt_arr.dtype)\n start: Array | int = argmin(nz) if 'f' in trim.lower() else 0\n end: Array | int = argmin(nz[::-1]) if 'b' in trim.lower() else 0\n return filt_arr[start:len(filt_arr) - end]\n\n\ndef trim_zeros_tol(filt, tol, trim='fb'):\n filt = core.concrete_or_error(asarray, filt,\n ""Error arose in the `filt` argument of trim_zeros_tol()"")\n nz = (ufuncs.abs(filt) < tol)\n if reductions.all(nz):\n return empty(0, _dtype(filt))\n start = argmin(nz) if 'f' in trim.lower() else 0\n end = argmin(nz[::-1]) if 'b' in trim.lower() else 0\n return filt[start:len(filt) - end]\n\n\n@export\n@partial(jit, static_argnames=('axis',))\ndef append(\n arr: ArrayLike, values: ArrayLike, axis: int | None = None\n) -> Array:\n """"""Return a new array with values appended to the end of the original array.\n\n JAX implementation of :func:`numpy.append`.\n\n Args:\n arr: original array.\n values: values to be appended to the array. The ``values`` must have\n the same number of dimensions as ``arr``, and all dimensions must\n match except in the specified axis.\n axis: axis along which to append values. If None (default), both ``arr``\n and ``values`` will be flattened before appending.\n\n Returns:\n A new array with values appended to ``arr``.\n\n See also:\n - :func:`jax.numpy.insert`\n - :func:`jax.numpy.delete`\n\n Examples:\n >>> a = jnp.array([1, 2, 3])\n >>> b = jnp.array([4, 5, 6])\n >>> jnp.append(a, b)\n Array([1, 2, 3, 4, 5, 6], dtype=int32)\n\n Appending along a specific axis:\n\n >>> a = jnp.array([[1, 2],\n ... [3, 4]])\n >>> b = jnp.array([[5, 6]])\n >>> jnp.append(a, b, axis=0)\n Array([[1, 2],\n [3, 4],\n [5, 6]], dtype=int32)\n\n Appending along a trailing axis:\n\n >>> a = jnp.array([[1, 2, 3],\n ... [4, 5, 6]])\n >>> b = jnp.array([[7], [8]])\n >>> jnp.append(a, b, axis=1)\n Array([[1, 2, 3, 7],\n [4, 5, 6, 8]], dtype=int32)\n """"""\n if axis is None:\n return concatenate([ravel(arr), ravel(values)], 0)\n else:\n return concatenate([arr, values], axis=axis)\n\n\n@export\ndef delete(\n arr: ArrayLike,\n obj: ArrayLike | slice,\n axis: int | None = None,\n *,\n assume_unique_indices: bool = False,\n) -> Array:\n """"""Delete entry or entries from an array.\n\n JAX implementation of :func:`numpy.delete`.\n\n Args:\n arr: array from which entries will be deleted.\n obj: index, indices, or slice to be deleted.\n axis: axis along which entries will be deleted.\n assume_unique_indices: In case of array-like integer (not boolean) indices,\n assume the indices are unique, and perform the deletion in a way that is\n compatible with JIT and other JAX transformations.\n\n Returns:\n Copy of ``arr`` with specified indices deleted.\n\n Note:\n ``delete()`` usually requires the index specification to be static. If the\n index is an integer array that is guaranteed to contain unique entries, you\n may specify ``assume_unique_indices=True`` to perform the operation in a\n manner that does not require static indices.\n\n See also:\n - :func:`jax.numpy.insert`: insert entries into an array.\n\n Examples:\n Delete entries from a 1D array:\n\n >>> a = jnp.array([4, 5, 6, 7, 8, 9])\n >>> jnp.delete(a, 2)\n Array([4, 5, 7, 8, 9], dtype=int32)\n >>> jnp.delete(a, slice(1, 4)) # delete a[1:4]\n Array([4, 8, 9], dtype=int32)\n >>> jnp.delete(a, slice(None, None, 2)) # delete a[::2]\n Array([5, 7, 9], dtype=int32)\n\n Delete entries from a 2D array along a specified axis:\n\n >>> a2 = jnp.array([[4, 5, 6],\n ... [7, 8, 9]])\n >>> jnp.delete(a2, 1, axis=1)\n Array([[4, 6],\n [7, 9]], dtype=int32)\n\n Delete multiple entries via a sequence of indices:\n\n >>> indices = jnp.array([0, 1, 3])\n >>> jnp.delete(a, indices)\n Array([6, 8, 9], dtype=int32)\n\n This will fail under :func:`~jax.jit` and other transformations, because\n the output shape cannot be known with the possibility of duplicate indices:\n\n >>> jax.jit(jnp.delete)(a, indices) # doctest: +IGNORE_EXCEPTION_DETAIL\n Traceback (most recent call last):\n ...\n ConcretizationTypeError: Abstract tracer value encountered where concrete value is expected: traced array with shape int32[3].\n\n If you can ensure that the indices are unique, pass ``assume_unique_indices``\n to allow this to be executed under JIT:\n\n >>> jit_delete = jax.jit(jnp.delete, static_argnames=['assume_unique_indices'])\n >>> jit_delete(a, indices, assume_unique_indices=True)\n Array([6, 8, 9], dtype=int32)\n """"""\n a = util.ensure_arraylike(""delete"", arr)\n if axis is None:\n a = a.ravel()\n axis = 0\n axis = _canonicalize_axis(axis, a.ndim)\n\n # Case 1: obj is a static integer.\n try:\n obj = operator.index(obj) # type: ignore[arg-type]\n obj = _canonicalize_axis(obj, a.shape[axis])\n except TypeError:\n pass\n else:\n idx = tuple(slice(None) for i in range(axis))\n return concatenate([a[idx + (slice(0, obj),)], a[idx + (slice(obj + 1, None),)]], axis=axis)\n\n # Case 2: obj is a static slice.\n if isinstance(obj, slice):\n obj = arange(a.shape[axis])[obj]\n assume_unique_indices = True\n\n # Case 3: obj is an array\n # NB: pass both arrays to check for appropriate error message.\n util.check_arraylike(""delete"", a, obj)\n # Can't use ensure_arraylike here because obj may be static.\n if hasattr(obj, ""__jax_array__""):\n obj = obj.__jax_array__()\n\n # Case 3a: unique integer indices; delete in a JIT-compatible way\n if issubdtype(_dtype(obj), np.integer) and assume_unique_indices:\n obj = asarray(obj).ravel()\n obj = clip(where(obj < 0, obj + a.shape[axis], obj), 0, a.shape[axis])\n obj = sort(obj)\n obj -= arange(len(obj)) # type: ignore[arg-type,operator]\n i = arange(a.shape[axis] - obj.size)\n i += (i[None, :] >= obj[:, None]).sum(0)\n return a[(slice(None),) * axis + (i,)]\n\n # Case 3b: non-unique indices: must be static.\n obj_array = core.concrete_or_error(np.asarray, obj, ""'obj' array argument of jnp.delete()"")\n if issubdtype(obj_array.dtype, np.integer):\n # TODO(jakevdp): in theory this could be done dynamically if obj has no duplicates,\n # but this would require the complement of lax.gather.\n mask = np.ones(a.shape[axis], dtype=bool)\n mask[obj_array] = False\n elif obj_array.dtype == bool:\n if obj_array.shape != (a.shape[axis],):\n raise ValueError(""np.delete(arr, obj): for boolean indices, obj must be one-dimensional ""\n ""with length matching specified axis."")\n mask = ~obj_array\n else:\n raise ValueError(f""np.delete(arr, obj): got obj.dtype={obj_array.dtype}; must be integer or bool."")\n return a[tuple(slice(None) for i in range(axis)) + (mask,)]\n\n\n@export\ndef insert(arr: ArrayLike, obj: ArrayLike | slice, values: ArrayLike,\n axis: int | None = None) -> Array:\n """"""Insert entries into an array at specified indices.\n\n JAX implementation of :func:`numpy.insert`.\n\n Args:\n arr: array object into which values will be inserted.\n obj: slice or array of indices specifying insertion locations.\n values: array of values to be inserted.\n axis: specify the insertion axis in the case of multi-dimensional\n arrays. If unspecified, ``arr`` will be flattened.\n\n Returns:\n A copy of ``arr`` with values inserted at the specified locations.\n\n See also:\n - :func:`jax.numpy.delete`: delete entries from an array.\n\n Examples:\n Inserting a single value:\n\n >>> x = jnp.arange(5)\n >>> jnp.insert(x, 2, 99)\n Array([ 0, 1, 99, 2, 3, 4], dtype=int32)\n\n Inserting multiple identical values using a slice:\n\n >>> jnp.insert(x, slice(None, None, 2), -1)\n Array([-1, 0, 1, -1, 2, 3, -1, 4], dtype=int32)\n\n Inserting multiple values using an index:\n\n >>> indices = jnp.array([4, 2, 5])\n >>> values = jnp.array([10, 11, 12])\n >>> jnp.insert(x, indices, values)\n Array([ 0, 1, 11, 2, 3, 10, 4, 12], dtype=int32)\n\n Inserting columns into a 2D array:\n\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6]])\n >>> indices = jnp.array([1, 3])\n >>> values = jnp.array([[10, 11],\n ... [12, 13]])\n >>> jnp.insert(x, indices, values, axis=1)\n Array([[ 1, 10, 2, 3, 11],\n [ 4, 12, 5, 6, 13]], dtype=int32)\n """"""\n a, _, values_arr = util.ensure_arraylike(""insert"", arr, 0 if isinstance(obj, slice) else obj, values)\n\n if axis is None:\n a = ravel(a)\n axis = 0\n axis = core.concrete_or_error(None, axis, ""axis argument of jnp.insert()"")\n axis = _canonicalize_axis(axis, a.ndim)\n if isinstance(obj, slice):\n indices = arange(*obj.indices(a.shape[axis]))\n else:\n indices = asarray(obj)\n\n if indices.ndim > 1:\n raise ValueError(""jnp.insert(): obj must be a slice, a one-dimensional ""\n f""array, or a scalar; got {obj}"")\n if not np.issubdtype(indices.dtype, np.integer):\n if indices.size == 0 and not isinstance(obj, Array):\n indices = indices.astype(int)\n else:\n # Note: np.insert allows boolean inputs but the behavior is deprecated.\n raise ValueError(""jnp.insert(): index array must be ""\n f""integer typed; got {obj}"")\n values_arr = array(values_arr, ndmin=a.ndim, dtype=a.dtype, copy=False)\n\n if indices.size == 1:\n index = ravel(indices)[0]\n if indices.ndim == 0:\n values_arr = moveaxis(values_arr, 0, axis)\n indices = full(values_arr.shape[axis], index)\n n_input = a.shape[axis]\n n_insert = broadcast_shapes(indices.shape, (values_arr.shape[axis],))[0]\n out_shape = list(a.shape)\n out_shape[axis] += n_insert\n out = zeros_like(a, shape=tuple(out_shape))\n\n indices = where(indices < 0, indices + n_input, indices)\n indices = clip(indices, 0, n_input)\n\n values_ind = indices.at[argsort(indices)].add(arange(n_insert, dtype=indices.dtype))\n arr_mask = ones(n_input + n_insert, dtype=bool).at[values_ind].set(False)\n arr_ind = where(arr_mask, size=n_input)[0]\n\n out = out.at[(slice(None),) * axis + (values_ind,)].set(values_arr)\n out = out.at[(slice(None),) * axis + (arr_ind,)].set(a)\n\n return out\n\n\n@export\ndef apply_along_axis(\n func1d: Callable, axis: int, arr: ArrayLike, *args, **kwargs\n) -> Array:\n """"""Apply a function to 1D array slices along an axis.\n\n JAX implementation of :func:`numpy.apply_along_axis`. While NumPy implements\n this iteratively, JAX implements this via :func:`jax.vmap`, and so ``func1d``\n must be compatible with ``vmap``.\n\n Args:\n func1d: a callable function with signature ``func1d(arr, /, *args, **kwargs)``\n where ``*args`` and ``**kwargs`` are the additional positional and keyword\n arguments passed to :func:`apply_along_axis`.\n axis: integer axis along which to apply the function.\n arr: the array over which to apply the function.\n args, kwargs: additional positional and keyword arguments are passed through\n to ``func1d``.\n\n Returns:\n The result of ``func1d`` applied along the specified axis.\n\n See also:\n - :func:`jax.vmap`: a more direct way to create a vectorized version of a function.\n - :func:`jax.numpy.apply_over_axes`: repeatedly apply a function over multiple axes.\n - :func:`jax.numpy.vectorize`: create a vectorized version of a function.\n\n Examples:\n A simple example in two dimensions, where the function is applied either row-wise\n or column-wise:\n\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6]])\n >>> def func1d(x):\n ... return jnp.sum(x ** 2)\n >>> jnp.apply_along_axis(func1d, 0, x)\n Array([17, 29, 45], dtype=int32)\n >>> jnp.apply_along_axis(func1d, 1, x)\n Array([14, 77], dtype=int32)\n\n For 2D inputs, this can be equivalently expressed using :func:`jax.vmap`,\n though note that `vmap` specifies the mapped axis rather than the applied axis:\n\n >>> jax.vmap(func1d, in_axes=1)(x) # same as applying along axis 0\n Array([17, 29, 45], dtype=int32)\n >>> jax.vmap(func1d, in_axes=0)(x) # same as applying along axis 1\n Array([14, 77], dtype=int32)\n\n For 3D inputs, :func:`apply_along_axis` is equivalent to mapping over two\n dimensions:\n\n >>> x_3d = jnp.arange(24).reshape(2, 3, 4)\n >>> jnp.apply_along_axis(func1d, 2, x_3d)\n Array([[ 14, 126, 366],\n [ 734, 1230, 1854]], dtype=int32)\n >>> jax.vmap(jax.vmap(func1d))(x_3d)\n Array([[ 14, 126, 366],\n [ 734, 1230, 1854]], dtype=int32)\n\n The applied function may also take arbitrary positional or keyword arguments,\n which should be passed directly as additional arguments to :func:`apply_along_axis`:\n\n >>> def func1d(x, exponent):\n ... return jnp.sum(x ** exponent)\n >>> jnp.apply_along_axis(func1d, 0, x, exponent=3)\n Array([ 65, 133, 243], dtype=int32)\n """"""\n util.check_arraylike(""apply_along_axis"", arr)\n num_dims = np.ndim(arr)\n axis = _canonicalize_axis(axis, num_dims)\n func = lambda arr: func1d(arr, *args, **kwargs)\n for i in range(1, num_dims - axis):\n func = api.vmap(func, in_axes=i, out_axes=-1)\n for i in range(axis):\n func = api.vmap(func, in_axes=0, out_axes=0)\n return func(arr)\n\n\n@export\ndef apply_over_axes(func: Callable[[ArrayLike, int], Array], a: ArrayLike,\n axes: Sequence[int]) -> Array:\n """"""Apply a function repeatedly over specified axes.\n\n JAX implementation of :func:`numpy.apply_over_axes`.\n\n Args:\n func: the function to apply, with signature ``func(Array, int) -> Array``, and\n where ``y = func(x, axis)`` must satisfy ``y.ndim in [x.ndim, x.ndim - 1]``.\n a: N-dimensional array over which to apply the function.\n axes: the sequence of axes over which to apply the function.\n\n Returns:\n An N-dimensional array containing the result of the repeated function application.\n\n See also:\n - :func:`jax.numpy.apply_along_axis`: apply a 1D function along a single axis.\n\n Examples:\n This function is designed to have similar semantics to typical associative\n :mod:`jax.numpy` reductions over one or more axes with ``keepdims=True``.\n For example:\n\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6]])\n\n >>> jnp.apply_over_axes(jnp.sum, x, [0])\n Array([[5, 7, 9]], dtype=int32)\n >>> jnp.sum(x, [0], keepdims=True)\n Array([[5, 7, 9]], dtype=int32)\n\n >>> jnp.apply_over_axes(jnp.min, x, [1])\n Array([[1],\n [4]], dtype=int32)\n >>> jnp.min(x, [1], keepdims=True)\n Array([[1],\n [4]], dtype=int32)\n\n >>> jnp.apply_over_axes(jnp.prod, x, [0, 1])\n Array([[720]], dtype=int32)\n >>> jnp.prod(x, [0, 1], keepdims=True)\n Array([[720]], dtype=int32)\n """"""\n a_arr = util.ensure_arraylike(""apply_over_axes"", a)\n for axis in axes:\n b = func(a_arr, axis)\n if b.ndim == a_arr.ndim:\n a_arr = b\n elif b.ndim == a_arr.ndim - 1:\n a_arr = expand_dims(b, axis)\n else:\n raise ValueError(""function is not returning an array of the correct shape"")\n return a_arr\n\n\n@export\n@partial(jit, static_argnames=('axisa', 'axisb', 'axisc', 'axis'))\ndef cross(a, b, axisa: int = -1, axisb: int = -1, axisc: int = -1,\n axis: int | None = None):\n r""""""Compute the (batched) cross product of two arrays.\n\n JAX implementation of :func:`numpy.cross`.\n\n This computes the 2-dimensional or 3-dimensional cross product,\n\n .. math::\n\n c = a \times b\n\n In 3 dimensions, ``c`` is a length-3 array. In 2 dimensions, ``c`` is\n a scalar.\n\n Args:\n a: N-dimensional array. ``a.shape[axisa]`` indicates the dimension of\n the cross product, and must be 2 or 3.\n b: N-dimensional array. Must have ``b.shape[axisb] == a.shape[axisb]``,\n and other dimensions of ``a`` and ``b`` must be broadcast compatible.\n axisa: specicy the axis of ``a`` along which to compute the cross product.\n axisb: specicy the axis of ``b`` along which to compute the cross product.\n axisc: specicy the axis of ``c`` along which the cross product result\n will be stored.\n axis: if specified, this overrides ``axisa``, ``axisb``, and ``axisc``\n with a single value.\n\n Returns:\n The array ``c`` containing the (batched) cross product of ``a`` and ``b``\n along the specified axes.\n\n See also:\n - :func:`jax.numpy.linalg.cross`: an array API compatible function for\n computing cross products over 3-vectors.\n\n Examples:\n A 2-dimensional cross product returns a scalar:\n\n >>> a = jnp.array([1, 2])\n >>> b = jnp.array([3, 4])\n >>> jnp.cross(a, b)\n Array(-2, dtype=int32)\n\n A 3-dimensional cross product returns a length-3 vector:\n\n >>> a = jnp.array([1, 2, 3])\n >>> b = jnp.array([4, 5, 6])\n >>> jnp.cross(a, b)\n Array([-3, 6, -3], dtype=int32)\n\n With multi-dimensional inputs, the cross-product is computed along\n the last axis by default. Here's a batched 3-dimensional cross\n product, operating on the rows of the inputs:\n\n >>> a = jnp.array([[1, 2, 3],\n ... [3, 4, 3]])\n >>> b = jnp.array([[2, 3, 2],\n ... [4, 5, 6]])\n >>> jnp.cross(a, b)\n Array([[-5, 4, -1],\n [ 9, -6, -1]], dtype=int32)\n\n Specifying axis=0 makes this a batched 2-dimensional cross product,\n operating on the columns of the inputs:\n\n >>> jnp.cross(a, b, axis=0)\n Array([-2, -2, 12], dtype=int32)\n\n Equivalently, we can independently specify the axis of the inputs ``a``\n and ``b`` and the output ``c``:\n\n >>> jnp.cross(a, b, axisa=0, axisb=0, axisc=0)\n Array([-2, -2, 12], dtype=int32)\n """"""\n # TODO(jakevdp): NumPy 2.0 deprecates 2D inputs. Follow suit here.\n util.check_arraylike(""cross"", a, b)\n if axis is not None:\n axisa = axis\n axisb = axis\n axisc = axis\n a = moveaxis(a, axisa, -1)\n b = moveaxis(b, axisb, -1)\n\n if a.shape[-1] not in (2, 3) or b.shape[-1] not in (2, 3):\n raise ValueError(""Dimension must be either 2 or 3 for cross product"")\n\n if a.shape[-1] == 2 and b.shape[-1] == 2:\n return a[..., 0] * b[..., 1] - a[..., 1] * b[..., 0]\n\n a0 = a[..., 0]\n a1 = a[..., 1]\n a2 = a[..., 2] if a.shape[-1] == 3 else zeros_like(a0)\n b0 = b[..., 0]\n b1 = b[..., 1]\n b2 = b[..., 2] if b.shape[-1] == 3 else zeros_like(b0)\n c = array([a1 * b2 - a2 * b1, a2 * b0 - a0 * b2, a0 * b1 - a1 * b0])\n return moveaxis(c, 0, axisc)\n\n\n@export\n@jit\ndef kron(a: ArrayLike, b: ArrayLike) -> Array:\n """"""Compute the Kronecker product of two input arrays.\n\n JAX implementation of :func:`numpy.kron`.\n\n The Kronecker product is an operation on two matrices of arbitrary size that\n produces a block matrix. Each element of the first matrix ``a`` is multiplied by\n the entire second matrix ``b``. If ``a`` has shape (m, n) and ``b``\n has shape (p, q), the resulting matrix will have shape (m * p, n * q).\n\n Args:\n a: first input array with any shape.\n b: second input array with any shape.\n\n Returns:\n A new array representing the Kronecker product of the inputs ``a`` and ``b``.\n The shape of the output is the element-wise product of the input shapes.\n\n See also:\n - :func:`jax.numpy.outer`: compute the outer product of two arrays.\n\n Examples:\n >>> a = jnp.array([[1, 2],\n ... [3, 4]])\n >>> b = jnp.array([[5, 6],\n ... [7, 8]])\n >>> jnp.kron(a, b)\n Array([[ 5, 6, 10, 12],\n [ 7, 8, 14, 16],\n [15, 18, 20, 24],\n [21, 24, 28, 32]], dtype=int32)\n """"""\n util.check_arraylike(""kron"", a, b)\n a, b = util.promote_dtypes(a, b)\n if np.ndim(a) < np.ndim(b):\n a = expand_dims(a, range(np.ndim(b) - np.ndim(a)))\n elif np.ndim(b) < np.ndim(a):\n b = expand_dims(b, range(np.ndim(a) - np.ndim(b)))\n a_reshaped = expand_dims(a, range(1, 2 * np.ndim(a), 2))\n b_reshaped = expand_dims(b, range(0, 2 * np.ndim(b), 2))\n out_shape = tuple(np.multiply(np.shape(a), np.shape(b)))\n return reshape(lax.mul(a_reshaped, b_reshaped), out_shape)\n\n\n@export\n@partial(jit, static_argnames=('N', 'increasing'))\ndef vander(\n x: ArrayLike, N: int | None = None, increasing: bool = False\n) -> Array:\n """"""Generate a Vandermonde matrix.\n\n JAX implementation of :func:`numpy.vander`.\n\n Args:\n x: input array. Must have ``x.ndim == 1``.\n N: int, optional, default=None. Specifies the number of the columns the\n output matrix. If not specified, ``N = len(x)``.\n increasing: bool, optional, default=False. Specifies the order of the powers\n of the columns. If ``True``, the powers increase from left to right,\n :math:`[x^0, x^1, ..., x^{(N-1)}]`. By default, the powers decrease from left to\n right :math:`[x^{(N-1)}, ..., x^1, x^0]`.\n\n Returns:\n An array of shape ``[len(x), N]`` containing the generated Vandermonde matrix.\n\n Examples:\n >>> x = jnp.array([1, 2, 3, 4])\n >>> jnp.vander(x)\n Array([[ 1, 1, 1, 1],\n [ 8, 4, 2, 1],\n [27, 9, 3, 1],\n [64, 16, 4, 1]], dtype=int32)\n\n If ``N = 2``, generates a Vandermonde matrix with ``2`` columns.\n\n >>> jnp.vander(x, N=2)\n Array([[1, 1],\n [2, 1],\n [3, 1],\n [4, 1]], dtype=int32)\n\n Generates the Vandermonde matrix in increasing order of powers, when\n ``increasing=True``.\n\n >>> jnp.vander(x, increasing=True)\n Array([[ 1, 1, 1, 1],\n [ 1, 2, 4, 8],\n [ 1, 3, 9, 27],\n [ 1, 4, 16, 64]], dtype=int32)\n """"""\n x = util.ensure_arraylike(""vander"", x)\n if x.ndim != 1:\n raise ValueError(""x must be a one-dimensional array"")\n N = x.shape[0] if N is None else core.concrete_or_error(\n operator.index, N, ""'N' argument of jnp.vander()"")\n if N < 0:\n raise ValueError(""N must be nonnegative"")\n\n iota = lax.iota(x.dtype, N)\n if not increasing:\n iota = lax.sub(_lax_const(iota, N - 1), iota)\n\n return ufuncs.power(x[..., None], expand_dims(iota, tuple(range(x.ndim))))\n\n\n### Misc\n\n@export\ndef argwhere(\n a: ArrayLike,\n *,\n size: int | None = None,\n fill_value: ArrayLike | None = None,\n) -> Array:\n """"""Find the indices of nonzero array elements\n\n JAX implementation of :func:`numpy.argwhere`.\n\n ``jnp.argwhere(x)`` is essentially equivalent to ``jnp.column_stack(jnp.nonzero(x))``\n with special handling for zero-dimensional (i.e. scalar) inputs.\n\n Because the size of the output of ``argwhere`` is data-dependent, the function is not\n typically compatible with JIT. The JAX version adds the optional ``size`` argument, which\n specifies the size of the leading dimension of the output - it must be specified statically\n for ``jnp.argwhere`` to be compiled with non-static operands. See :func:`jax.numpy.nonzero`\n for a full discussion of ``size`` and its semantics.\n\n Args:\n a: array for which to find nonzero elements\n size: optional integer specifying statically the number of expected nonzero elements.\n This must be specified in order to use ``argwhere`` within JAX transformations like\n :func:`jax.jit`. See :func:`jax.numpy.nonzero` for more information.\n fill_value: optional array specifying the fill value when ``size`` is specified.\n See :func:`jax.numpy.nonzero` for more information.\n\n Returns:\n a two-dimensional array of shape ``[size, x.ndim]``. If ``size`` is not specified as\n an argument, it is equal to the number of nonzero elements in ``x``.\n\n See Also:\n - :func:`jax.numpy.where`\n - :func:`jax.numpy.nonzero`\n\n Examples:\n Two-dimensional array:\n\n >>> x = jnp.array([[1, 0, 2],\n ... [0, 3, 0]])\n >>> jnp.argwhere(x)\n Array([[0, 0],\n [0, 2],\n [1, 1]], dtype=int32)\n\n Equivalent computation using :func:`jax.numpy.column_stack` and :func:`jax.numpy.nonzero`:\n\n >>> jnp.column_stack(jnp.nonzero(x))\n Array([[0, 0],\n [0, 2],\n [1, 1]], dtype=int32)\n\n Special case for zero-dimensional (i.e. scalar) inputs:\n\n >>> jnp.argwhere(1)\n Array([], shape=(1, 0), dtype=int32)\n >>> jnp.argwhere(0)\n Array([], shape=(0, 0), dtype=int32)\n """"""\n a = util.ensure_arraylike(""argwhere"", a)\n result = transpose(vstack(nonzero(atleast_1d(a), size=size, fill_value=fill_value)))\n if np.ndim(a) == 0:\n return result[:0].reshape(result.shape[0], 0)\n return result.reshape(result.shape[0], np.ndim(a))\n\n\n@export\ndef argmax(a: ArrayLike, axis: int | None = None, out: None = None,\n keepdims: bool | None = None) -> Array:\n """"""Return the index of the maximum value of an array.\n\n JAX implementation of :func:`numpy.argmax`.\n\n Args:\n a: input array\n axis: optional integer specifying the axis along which to find the maximum\n value. If ``axis`` is not specified, ``a`` will be flattened.\n out: unused by JAX\n keepdims: if True, then return an array with the same number of dimensions\n as ``a``.\n\n Returns:\n an array containing the index of the maximum value along the specified axis.\n\n See also:\n - :func:`jax.numpy.argmin`: return the index of the minimum value.\n - :func:`jax.numpy.nanargmax`: compute ``argmax`` while ignoring NaN values.\n\n Examples:\n >>> x = jnp.array([1, 3, 5, 4, 2])\n >>> jnp.argmax(x)\n Array(2, dtype=int32)\n\n >>> x = jnp.array([[1, 3, 2],\n ... [5, 4, 1]])\n >>> jnp.argmax(x, axis=1)\n Array([1, 0], dtype=int32)\n\n >>> jnp.argmax(x, axis=1, keepdims=True)\n Array([[1],\n [0]], dtype=int32)\n """"""\n arr = util.ensure_arraylike(""argmax"", a)\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.argmax is not supported."")\n return _argmax(arr, None if axis is None else operator.index(axis),\n keepdims=bool(keepdims))\n\n@partial(jit, static_argnames=('axis', 'keepdims'), inline=True)\ndef _argmax(a: Array, axis: int | None = None, keepdims: bool = False) -> Array:\n if axis is None:\n dims = list(range(np.ndim(a)))\n a = ravel(a)\n axis = 0\n else:\n dims = [axis]\n if a.shape[axis] == 0:\n raise ValueError(""attempt to get argmax of an empty sequence"")\n result = lax.argmax(a, _canonicalize_axis(axis, a.ndim), dtypes.canonicalize_dtype(dtypes.int_))\n return expand_dims(result, dims) if keepdims else result\n\n\n@export\ndef argmin(a: ArrayLike, axis: int | None = None, out: None = None,\n keepdims: bool | None = None) -> Array:\n """"""Return the index of the minimum value of an array.\n\n JAX implementation of :func:`numpy.argmin`.\n\n Args:\n a: input array\n axis: optional integer specifying the axis along which to find the minimum\n value. If ``axis`` is not specified, ``a`` will be flattened.\n out: unused by JAX\n keepdims: if True, then return an array with the same number of dimensions\n as ``a``.\n\n Returns:\n an array containing the index of the minimum value along the specified axis.\n\n See also:\n - :func:`jax.numpy.argmax`: return the index of the maximum value.\n - :func:`jax.numpy.nanargmin`: compute ``argmin`` while ignoring NaN values.\n\n Examples:\n >>> x = jnp.array([1, 3, 5, 4, 2])\n >>> jnp.argmin(x)\n Array(0, dtype=int32)\n\n >>> x = jnp.array([[1, 3, 2],\n ... [5, 4, 1]])\n >>> jnp.argmin(x, axis=1)\n Array([0, 2], dtype=int32)\n\n >>> jnp.argmin(x, axis=1, keepdims=True)\n Array([[0],\n [2]], dtype=int32)\n """"""\n arr = util.ensure_arraylike(""argmin"", a)\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.argmin is not supported."")\n return _argmin(arr, None if axis is None else operator.index(axis),\n keepdims=bool(keepdims))\n\n@partial(jit, static_argnames=('axis', 'keepdims'), inline=True)\ndef _argmin(a: Array, axis: int | None = None, keepdims: bool = False) -> Array:\n if axis is None:\n dims = list(range(np.ndim(a)))\n a = ravel(a)\n axis = 0\n else:\n dims = [axis]\n if a.shape[axis] == 0:\n raise ValueError(""attempt to get argmin of an empty sequence"")\n result = lax.argmin(a, _canonicalize_axis(axis, a.ndim), dtypes.canonicalize_dtype(dtypes.int_))\n return expand_dims(result, dims) if keepdims else result\n\n\n@export\ndef nanargmax(\n a: ArrayLike,\n axis: int | None = None,\n out: None = None,\n keepdims: bool | None = None,\n) -> Array:\n """"""Return the index of the maximum value of an array, ignoring NaNs.\n\n JAX implementation of :func:`numpy.nanargmax`.\n\n Args:\n a: input array\n axis: optional integer specifying the axis along which to find the maximum\n value. If ``axis`` is not specified, ``a`` will be flattened.\n out: unused by JAX\n keepdims: if True, then return an array with the same number of dimensions\n as ``a``.\n\n Returns:\n an array containing the index of the maximum value along the specified axis.\n\n Note:\n In the case of an axis with all-NaN values, the returned index will be -1.\n This differs from the behavior of :func:`numpy.nanargmax`, which raises an error.\n\n See also:\n - :func:`jax.numpy.argmax`: return the index of the maximum value.\n - :func:`jax.numpy.nanargmin`: compute ``argmin`` while ignoring NaN values.\n\n Examples:\n >>> x = jnp.array([1, 3, 5, 4, jnp.nan])\n\n Using a standard :func:`~jax.numpy.argmax` leads to potentially unexpected results:\n\n >>> jnp.argmax(x)\n Array(4, dtype=int32)\n\n Using ``nanargmax`` returns the index of the maximum non-NaN value.\n\n >>> jnp.nanargmax(x)\n Array(2, dtype=int32)\n\n >>> x = jnp.array([[1, 3, jnp.nan],\n ... [5, 4, jnp.nan]])\n >>> jnp.nanargmax(x, axis=1)\n Array([1, 0], dtype=int32)\n\n >>> jnp.nanargmax(x, axis=1, keepdims=True)\n Array([[1],\n [0]], dtype=int32)\n """"""\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.nanargmax is not supported."")\n a = util.ensure_arraylike(""nanargmax"", a)\n return _nanargmax(a, None if axis is None else operator.index(axis), keepdims=bool(keepdims))\n\n\n@partial(jit, static_argnames=('axis', 'keepdims'))\ndef _nanargmax(a: Array, axis: int | None = None, keepdims: bool = False):\n if not issubdtype(_dtype(a), np.inexact):\n return argmax(a, axis=axis, keepdims=keepdims)\n nan_mask = ufuncs.isnan(a)\n a = where(nan_mask, -np.inf, a)\n res = argmax(a, axis=axis, keepdims=keepdims)\n return where(reductions.all(nan_mask, axis=axis, keepdims=keepdims), -1, res)\n\n\n@export\ndef nanargmin(\n a: ArrayLike,\n axis: int | None = None,\n out: None = None,\n keepdims: bool | None = None,\n) -> Array:\n\n """"""Return the index of the minimum value of an array, ignoring NaNs.\n\n JAX implementation of :func:`numpy.nanargmin`.\n\n Args:\n a: input array\n axis: optional integer specifying the axis along which to find the maximum\n value. If ``axis`` is not specified, ``a`` will be flattened.\n out: unused by JAX\n keepdims: if True, then return an array with the same number of dimensions\n as ``a``.\n\n Returns:\n an array containing the index of the minimum value along the specified axis.\n\n Note:\n In the case of an axis with all-NaN values, the returned index will be -1.\n This differs from the behavior of :func:`numpy.nanargmin`, which raises an error.\n\n See also:\n - :func:`jax.numpy.argmin`: return the index of the minimum value.\n - :func:`jax.numpy.nanargmax`: compute ``argmax`` while ignoring NaN values.\n\n Examples:\n >>> x = jnp.array([jnp.nan, 3, 5, 4, 2])\n >>> jnp.nanargmin(x)\n Array(4, dtype=int32)\n\n >>> x = jnp.array([[1, 3, jnp.nan],\n ... [5, 4, jnp.nan]])\n >>> jnp.nanargmin(x, axis=1)\n Array([0, 1], dtype=int32)\n\n >>> jnp.nanargmin(x, axis=1, keepdims=True)\n Array([[0],\n [1]], dtype=int32)\n """"""\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.nanargmin is not supported."")\n a = util.ensure_arraylike(""nanargmin"", a)\n return _nanargmin(a, None if axis is None else operator.index(axis), keepdims=bool(keepdims))\n\n\n@partial(jit, static_argnames=('axis', 'keepdims'))\ndef _nanargmin(a: Array, axis: int | None = None, keepdims : bool = False):\n if not issubdtype(_dtype(a), np.inexact):\n return argmin(a, axis=axis, keepdims=keepdims)\n nan_mask = ufuncs.isnan(a)\n a = where(nan_mask, np.inf, a)\n res = argmin(a, axis=axis, keepdims=keepdims)\n return where(reductions.all(nan_mask, axis=axis, keepdims=keepdims), -1, res)\n\n\n@partial(jit, static_argnums=(2,))\ndef _roll_dynamic(a: Array, shift: Array, axis: Sequence[int]) -> Array:\n b_shape = lax.broadcast_shapes(shift.shape, np.shape(axis))\n if len(b_shape) != 1:\n msg = ""'shift' and 'axis' arguments to roll must be scalars or 1D arrays""\n raise ValueError(msg)\n\n for x, i in zip(broadcast_to(shift, b_shape),\n np.broadcast_to(axis, b_shape)):\n a_shape_i = array(a.shape[i], dtype=np.int32)\n x = ufuncs.remainder(lax.convert_element_type(x, np.int32),\n lax.max(a_shape_i, np.int32(1)))\n a_concat = lax.concatenate((a, a), i)\n a = lax.dynamic_slice_in_dim(a_concat, a_shape_i - x, a.shape[i], axis=i)\n return a\n\n@partial(jit, static_argnums=(1, 2))\ndef _roll_static(a: Array, shift: Sequence[int], axis: Sequence[int]) -> Array:\n for ax, s in zip(*np.broadcast_arrays(axis, shift)):\n if a.shape[ax] == 0:\n continue\n i = (-s) % a.shape[ax]\n a = lax.concatenate([lax.slice_in_dim(a, i, a.shape[ax], axis=ax),\n lax.slice_in_dim(a, 0, i, axis=ax)],\n dimension=ax)\n return a\n\n\n@export\ndef roll(a: ArrayLike, shift: ArrayLike | Sequence[int],\n axis: int | Sequence[int] | None = None) -> Array:\n """"""Roll the elements of an array along a specified axis.\n\n JAX implementation of :func:`numpy.roll`.\n\n Args:\n a: input array.\n shift: the number of positions to shift the specified axis. If an integer,\n all axes are shifted by the same amount. If a tuple, the shift for each\n axis is specified individually.\n axis: the axis or axes to roll. If ``None``, the array is flattened, shifted,\n and then reshaped to its original shape.\n\n Returns:\n A copy of ``a`` with elements rolled along the specified axis or axes.\n\n See also:\n - :func:`jax.numpy.rollaxis`: roll the specified axis to a given position.\n\n Examples:\n >>> a = jnp.array([0, 1, 2, 3, 4, 5])\n >>> jnp.roll(a, 2)\n Array([4, 5, 0, 1, 2, 3], dtype=int32)\n\n Roll elements along a specific axis:\n\n >>> a = jnp.array([[ 0, 1, 2, 3],\n ... [ 4, 5, 6, 7],\n ... [ 8, 9, 10, 11]])\n >>> jnp.roll(a, 1, axis=0)\n Array([[ 8, 9, 10, 11],\n [ 0, 1, 2, 3],\n [ 4, 5, 6, 7]], dtype=int32)\n >>> jnp.roll(a, [2, 3], axis=[0, 1])\n Array([[ 5, 6, 7, 4],\n [ 9, 10, 11, 8],\n [ 1, 2, 3, 0]], dtype=int32)\n """"""\n arr = util.ensure_arraylike(""roll"", a)\n if axis is None:\n return roll(arr.ravel(), shift, 0).reshape(arr.shape)\n axis = _ensure_index_tuple(axis)\n axis = tuple(_canonicalize_axis(ax, arr.ndim) for ax in axis)\n try:\n shift = _ensure_index_tuple(shift)\n except TypeError:\n return _roll_dynamic(arr, asarray(shift), axis)\n else:\n return _roll_static(arr, shift, axis)\n\n\n@export\n@partial(jit, static_argnames=('axis', 'start'))\ndef rollaxis(a: ArrayLike, axis: int, start: int = 0) -> Array:\n """"""Roll the specified axis to a given position.\n\n JAX implementation of :func:`numpy.rollaxis`.\n\n This function exists for compatibility with NumPy, but in most cases the newer\n :func:`jax.numpy.moveaxis` instead, because the meaning of its arguments is\n more intuitive.\n\n Args:\n a: input array.\n axis: index of the axis to roll forward.\n start: index toward which the axis will be rolled (default = 0). After\n normalizing negative axes, if ``start <= axis``, the axis is rolled to\n the ``start`` index; if ``start > axis``, the axis is rolled until the\n position before ``start``.\n\n Returns:\n Copy of ``a`` with rolled axis.\n\n Notes:\n Unlike :func:`numpy.rollaxis`, :func:`jax.numpy.rollaxis` will return a copy rather\n than a view of the input array. However, under JIT, the compiler will optimize away\n such copies when possible, so this doesn't have performance impacts in practice.\n\n See also:\n - :func:`jax.numpy.moveaxis`: newer API with clearer semantics than ``rollaxis``;\n this should be preferred to ``rollaxis`` in most cases.\n - :func:`jax.numpy.swapaxes`: swap two axes.\n - :func:`jax.numpy.transpose`: general permutation of axes.\n\n Examples:\n >>> a = jnp.ones((2, 3, 4, 5))\n\n Roll axis 2 to the start of the array:\n\n >>> jnp.rollaxis(a, 2).shape\n (4, 2, 3, 5)\n\n Roll axis 1 to the end of the array:\n\n >>> jnp.rollaxis(a, 1, a.ndim).shape\n (2, 4, 5, 3)\n\n Equivalent of these two with :func:`~jax.numpy.moveaxis`\n\n >>> jnp.moveaxis(a, 2, 0).shape\n (4, 2, 3, 5)\n >>> jnp.moveaxis(a, 1, -1).shape\n (2, 4, 5, 3)\n """"""\n a = util.ensure_arraylike(""rollaxis"", a)\n start = core.concrete_or_error(operator.index, start, ""'start' argument of jnp.rollaxis()"")\n a_ndim = np.ndim(a)\n axis = _canonicalize_axis(axis, a_ndim)\n if not (-a_ndim <= start <= a_ndim):\n raise ValueError(f""{start=} must satisfy {-a_ndim}<=start<={a_ndim}"")\n if start < 0:\n start += a_ndim\n if start > axis:\n start -= 1\n return moveaxis(a, axis, start)\n\n\n@export\n@partial(jit, static_argnames=('axis', 'bitorder'))\ndef packbits(a: ArrayLike, axis: int | None = None, bitorder: str = ""big"") -> Array:\n """"""Pack array of bits into a uint8 array.\n\n JAX implementation of :func:`numpy.packbits`\n\n Args:\n a: N-dimensional array of bits to pack.\n axis: optional axis along which to pack bits. If not specified, ``a`` will\n be flattened.\n bitorder: ``""big""`` (default) or ``""little""``: specify whether the bit order\n is big-endian or little-endian.\n\n Returns:\n A uint8 array of packed values.\n\n See also:\n - :func:`jax.numpy.unpackbits`: inverse of ``packbits``.\n\n Examples:\n Packing bits in one dimension:\n\n >>> bits = jnp.array([0, 0, 0, 0, 0, 1, 1, 1])\n >>> jnp.packbits(bits)\n Array([7], dtype=uint8)\n >>> 0b00000111 # equivalent bit-wise representation:\n 7\n\n Optionally specifying little-endian convention:\n\n >>> jnp.packbits(bits, bitorder=""little"")\n Array([224], dtype=uint8)\n >>> 0b11100000 # equivalent bit-wise representation\n 224\n\n If the number of bits is not a multiple of 8, it will be right-padded\n with zeros:\n\n >>> jnp.packbits(jnp.array([1, 0, 1]))\n Array([160], dtype=uint8)\n >>> jnp.packbits(jnp.array([1, 0, 1, 0, 0, 0, 0, 0]))\n Array([160], dtype=uint8)\n\n For a multi-dimensional input, bits may be packed along a specified axis:\n\n >>> a = jnp.array([[1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0],\n ... [0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1]])\n >>> vals = jnp.packbits(a, axis=1)\n >>> vals\n Array([[212, 150],\n [ 69, 207]], dtype=uint8)\n\n The inverse of ``packbits`` is provided by :func:`~jax.numpy.unpackbits`:\n\n >>> jnp.unpackbits(vals, axis=1)\n Array([[1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0],\n [0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1]], dtype=uint8)\n """"""\n arr = util.ensure_arraylike(""packbits"", a)\n if not (issubdtype(arr.dtype, np.integer) or issubdtype(arr.dtype, np.bool_)):\n raise TypeError('Expected an input array of integer or boolean data type')\n if bitorder not in ['little', 'big']:\n raise ValueError(""'order' must be either 'little' or 'big'"")\n arr = lax.ne(arr, _lax_const(arr, 0)).astype('uint8')\n bits = arange(8, dtype='uint8')\n if bitorder == 'big':\n bits = bits[::-1]\n if axis is None:\n arr = ravel(arr)\n axis = 0\n arr = swapaxes(arr, axis, -1)\n\n remainder = arr.shape[-1] % 8\n if remainder:\n arr = lax.pad(arr, np.uint8(0),\n (arr.ndim - 1) * [(0, 0, 0)] + [(0, 8 - remainder, 0)])\n\n arr = arr.reshape(arr.shape[:-1] + (arr.shape[-1] // 8, 8))\n bits = expand_dims(bits, tuple(range(arr.ndim - 1)))\n packed = (arr << bits).sum(-1).astype('uint8')\n return swapaxes(packed, axis, -1)\n\n\n@export\n@partial(jit, static_argnames=('axis', 'count', 'bitorder'))\ndef unpackbits(\n a: ArrayLike,\n axis: int | None = None,\n count: int | None = None,\n bitorder: str = ""big"",\n) -> Array:\n """"""Unpack the bits in a uint8 array.\n\n JAX implementation of :func:`numpy.unpackbits`.\n\n Args:\n a: N-dimensional array of type ``uint8``.\n axis: optional axis along which to unpack. If not specified, ``a`` will\n be flattened\n count: specify the number of bits to unpack (if positive) or the number\n of bits to trim from the end (if negative).\n bitorder: ``""big""`` (default) or ``""little""``: specify whether the bit order\n is big-endian or little-endian.\n\n Returns:\n a uint8 array of unpacked bits.\n\n See also:\n - :func:`jax.numpy.packbits`: this inverse of ``unpackbits``.\n\n Examples:\n Unpacking bits from a scalar:\n\n >>> jnp.unpackbits(jnp.uint8(27)) # big-endian by default\n Array([0, 0, 0, 1, 1, 0, 1, 1], dtype=uint8)\n >>> jnp.unpackbits(jnp.uint8(27), bitorder=""little"")\n Array([1, 1, 0, 1, 1, 0, 0, 0], dtype=uint8)\n\n Compare this to the Python binary representation:\n\n >>> 0b00011011\n 27\n\n Unpacking bits along an axis:\n\n >>> vals = jnp.array([[154],\n ... [ 49]], dtype='uint8')\n >>> bits = jnp.unpackbits(vals, axis=1)\n >>> bits\n Array([[1, 0, 0, 1, 1, 0, 1, 0],\n [0, 0, 1, 1, 0, 0, 0, 1]], dtype=uint8)\n\n Using :func:`~jax.numpy.packbits` to invert this:\n\n >>> jnp.packbits(bits, axis=1)\n Array([[154],\n [ 49]], dtype=uint8)\n\n The ``count`` keyword lets ``unpackbits`` serve as an inverse of ``packbits``\n in cases where not all bits are present:\n\n >>> bits = jnp.array([1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1]) # 11 bits\n >>> vals = jnp.packbits(bits)\n >>> vals\n Array([219, 96], dtype=uint8)\n >>> jnp.unpackbits(vals) # 16 zero-padded bits\n Array([1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0], dtype=uint8)\n >>> jnp.unpackbits(vals, count=11) # specify 11 output bits\n Array([1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1], dtype=uint8)\n >>> jnp.unpackbits(vals, count=-5) # specify 5 bits to be trimmed\n Array([1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1], dtype=uint8)\n """"""\n arr = util.ensure_arraylike(""unpackbits"", a)\n if arr.dtype != np.uint8:\n raise TypeError(""Expected an input array of unsigned byte data type"")\n if bitorder not in ['little', 'big']:\n raise ValueError(""'order' must be either 'little' or 'big'"")\n bits = asarray(1) << arange(8, dtype='uint8')\n if bitorder == 'big':\n bits = bits[::-1]\n if axis is None:\n arr = ravel(arr)\n axis = 0\n arr = swapaxes(arr, axis, -1)\n unpacked = ((arr[..., None] & expand_dims(bits, tuple(range(arr.ndim)))) > 0).astype('uint8')\n unpacked = unpacked.reshape(unpacked.shape[:-2] + (-1,))\n if count is not None:\n if count > unpacked.shape[-1]:\n unpacked = pad(unpacked, [(0, 0)] * (unpacked.ndim - 1) + [(0, count - unpacked.shape[-1])])\n else:\n unpacked = unpacked[..., :count]\n return swapaxes(unpacked, axis, -1)\n\n\ndef _gcd_cond_fn(xs: tuple[Array, Array]) -> Array:\n x1, x2 = xs\n return reductions.any(x2 != 0)\n\ndef _gcd_body_fn(xs: tuple[Array, Array]) -> tuple[Array, Array]:\n x1, x2 = xs\n x1, x2 = (where(x2 != 0, x2, x1),\n where(x2 != 0, lax.rem(x1, x2), _lax_const(x2, 0)))\n return (where(x1 < x2, x2, x1), where(x1 < x2, x1, x2))\n\n\n@export\n@jit\ndef gcd(x1: ArrayLike, x2: ArrayLike) -> Array:\n """"""Compute the greatest common divisor of two arrays.\n\n JAX implementation of :func:`numpy.gcd`.\n\n Args:\n x1: First input array. The elements must have integer dtype.\n x2: Second input array. The elements must have integer dtype.\n\n Returns:\n An array containing the greatest common divisors of the corresponding\n elements from the absolute values of `x1` and `x2`.\n\n See also:\n - :func:`jax.numpy.lcm`: compute the least common multiple of two arrays.\n\n Examples:\n Scalar inputs:\n\n >>> jnp.gcd(12, 18)\n Array(6, dtype=int32, weak_type=True)\n\n Array inputs:\n\n >>> x1 = jnp.array([12, 18, 24])\n >>> x2 = jnp.array([5, 10, 15])\n >>> jnp.gcd(x1, x2)\n Array([1, 2, 3], dtype=int32)\n\n Broadcasting:\n\n >>> x1 = jnp.array([12])\n >>> x2 = jnp.array([6, 9, 12])\n >>> jnp.gcd(x1, x2)\n Array([ 6, 3, 12], dtype=int32)\n """"""\n x1, x2 = util.ensure_arraylike(""gcd"", x1, x2)\n x1, x2 = util.promote_dtypes(x1, x2)\n if not issubdtype(_dtype(x1), np.integer):\n raise ValueError(""Arguments to jax.numpy.gcd must be integers."")\n x1, x2 = broadcast_arrays(x1, x2)\n gcd, _ = lax.while_loop(_gcd_cond_fn, _gcd_body_fn, (ufuncs.abs(x1), ufuncs.abs(x2)))\n return gcd\n\n\n@export\n@jit\ndef lcm(x1: ArrayLike, x2: ArrayLike) -> Array:\n """"""Compute the least common multiple of two arrays.\n\n JAX implementation of :func:`numpy.lcm`.\n\n Args:\n x1: First input array. The elements must have integer dtype.\n x2: Second input array. The elements must have integer dtype.\n\n Returns:\n An array containing the least common multiple of the corresponding\n elements from the absolute values of `x1` and `x2`.\n\n See also:\n - :func:`jax.numpy.gcd`: compute the greatest common divisor of two arrays.\n\n Examples:\n Scalar inputs:\n\n >>> jnp.lcm(12, 18)\n Array(36, dtype=int32, weak_type=True)\n\n Array inputs:\n\n >>> x1 = jnp.array([12, 18, 24])\n >>> x2 = jnp.array([5, 10, 15])\n >>> jnp.lcm(x1, x2)\n Array([ 60, 90, 120], dtype=int32)\n\n Broadcasting:\n\n >>> x1 = jnp.array([12])\n >>> x2 = jnp.array([6, 9, 12])\n >>> jnp.lcm(x1, x2)\n Array([12, 36, 12], dtype=int32)\n """"""\n x1, x2 = util.ensure_arraylike(""lcm"", x1, x2)\n x1, x2 = util.promote_dtypes(x1, x2)\n x1, x2 = ufuncs.abs(x1), ufuncs.abs(x2)\n if not issubdtype(_dtype(x1), np.integer):\n raise ValueError(""Arguments to jax.numpy.lcm must be integers."")\n d = gcd(x1, x2)\n return where(d == 0, _lax_const(d, 0),\n ufuncs.multiply(x1, ufuncs.floor_divide(x2, d)))\n\n\n@export\ndef extract(condition: ArrayLike, arr: ArrayLike,\n *, size: int | None = None, fill_value: ArrayLike = 0) -> Array:\n """"""Return the elements of an array that satisfy a condition.\n\n JAX implementation of :func:`numpy.extract`.\n\n Args:\n condition: array of conditions. Will be converted to boolean and flattened to 1D.\n arr: array of values to extract. Will be flattened to 1D.\n size: optional static size for output. Must be specified in order for ``extract``\n to be compatible with JAX transformations like :func:`~jax.jit` or :func:`~jax.vmap`.\n fill_value: if ``size`` is specified, fill padded entries with this value (default: 0).\n\n Returns:\n 1D array of extracted entries . If ``size`` is specified, the result will have shape\n ``(size,)`` and be right-padded with ``fill_value``. If ``size`` is not specified,\n the output shape will depend on the number of True entries in ``condition``.\n\n Notes:\n This function does not require strict shape agreement between ``condition`` and ``arr``.\n If ``condition.size > arr.size``, then ``condition`` will be truncated, and if\n ``arr.size > condition.size``, then ``arr`` will be truncated.\n\n See also:\n :func:`jax.numpy.compress`: multi-dimensional version of ``extract``.\n\n Examples:\n Extract values from a 1D array:\n\n >>> x = jnp.array([1, 2, 3, 4, 5, 6])\n >>> mask = (x % 2 == 0)\n >>> jnp.extract(mask, x)\n Array([2, 4, 6], dtype=int32)\n\n In the simplest case, this is equivalent to boolean indexing:\n\n >>> x[mask]\n Array([2, 4, 6], dtype=int32)\n\n For use with JAX transformations, you can pass the ``size`` argument to\n specify a static shape for the output, along with an optional ``fill_value``\n that defaults to zero:\n\n >>> jnp.extract(mask, x, size=len(x), fill_value=0)\n Array([2, 4, 6, 0, 0, 0], dtype=int32)\n\n Notice that unlike with boolean indexing, ``extract`` does not require strict\n agreement between the sizes of the array and condition, and will effectively\n truncate both to the minimum size:\n\n >>> short_mask = jnp.array([False, True])\n >>> jnp.extract(short_mask, x)\n Array([2], dtype=int32)\n >>> long_mask = jnp.array([True, False, True, False, False, False, False, False])\n >>> jnp.extract(long_mask, x)\n Array([1, 3], dtype=int32)\n """"""\n util.check_arraylike(""extreact"", condition, arr, fill_value)\n return compress(ravel(condition), ravel(arr), size=size, fill_value=fill_value)\n\n\n@export\ndef compress(condition: ArrayLike, a: ArrayLike, axis: int | None = None,\n *, size: int | None = None, fill_value: ArrayLike = 0, out: None = None) -> Array:\n """"""Compress an array along a given axis using a boolean condition.\n\n JAX implementation of :func:`numpy.compress`.\n\n Args:\n condition: 1-dimensional array of conditions. Will be converted to boolean.\n a: N-dimensional array of values.\n axis: axis along which to compress. If None (default) then ``a`` will be\n flattened, and axis will be set to 0.\n size: optional static size for output. Must be specified in order for ``compress``\n to be compatible with JAX transformations like :func:`~jax.jit` or :func:`~jax.vmap`.\n fill_value: if ``size`` is specified, fill padded entries with this value (default: 0).\n out: not implemented by JAX.\n\n Returns:\n An array of dimension ``a.ndim``, compressed along the specified axis.\n\n See also:\n - :func:`jax.numpy.extract`: 1D version of ``compress``.\n - :meth:`jax.Array.compress`: equivalent functionality as an array method.\n\n Notes:\n This function does not require strict shape agreement between ``condition`` and ``a``.\n If ``condition.size > a.shape[axis]``, then ``condition`` will be truncated, and if\n ``a.shape[axis] > condition.size``, then ``a`` will be truncated.\n\n Examples:\n Compressing along the rows of a 2D array:\n\n >>> a = jnp.array([[1, 2, 3, 4],\n ... [5, 6, 7, 8],\n ... [9, 10, 11, 12]])\n >>> condition = jnp.array([True, False, True])\n >>> jnp.compress(condition, a, axis=0)\n Array([[ 1, 2, 3, 4],\n [ 9, 10, 11, 12]], dtype=int32)\n\n For convenience, you can equivalently use the :meth:`~jax.Array.compress`\n method of JAX arrays:\n\n >>> a.compress(condition, axis=0)\n Array([[ 1, 2, 3, 4],\n [ 9, 10, 11, 12]], dtype=int32)\n\n Note that the condition need not match the shape of the specified axis;\n here we compress the columns with the length-3 condition. Values beyond\n the size of the condition are ignored:\n\n >>> jnp.compress(condition, a, axis=1)\n Array([[ 1, 3],\n [ 5, 7],\n [ 9, 11]], dtype=int32)\n\n The optional ``size`` argument lets you specify a static output size so\n that the output is statically-shaped, and so this function can be used\n with transformations like :func:`~jax.jit` and :func:`~jax.vmap`:\n\n >>> f = lambda c, a: jnp.extract(c, a, size=len(a), fill_value=0)\n >>> mask = (a % 3 == 0)\n >>> jax.vmap(f)(mask, a)\n Array([[ 3, 0, 0, 0],\n [ 6, 0, 0, 0],\n [ 9, 12, 0, 0]], dtype=int32)\n """"""\n condition_arr, arr, fill_value = util.ensure_arraylike(""compress"", condition, a, fill_value)\n condition_arr = condition_arr.astype(bool)\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.compress is not supported."")\n if condition_arr.ndim != 1:\n raise ValueError(""condition must be a 1D array"")\n if axis is None:\n axis = 0\n arr = ravel(arr)\n else:\n arr = moveaxis(arr, axis, 0)\n condition_arr, extra = condition_arr[:arr.shape[0]], condition_arr[arr.shape[0]:]\n arr = arr[:condition_arr.shape[0]]\n\n if size is None:\n if reductions.any(extra):\n raise ValueError(""condition contains entries that are out of bounds"")\n result = arr[condition_arr]\n elif not 0 <= size <= arr.shape[0]:\n raise ValueError(""size must be positive and not greater than the size of the array axis;""\n f"" got {size=} for a.shape[axis]={arr.shape[0]}"")\n else:\n mask = expand_dims(condition_arr, range(1, arr.ndim))\n arr = where(mask, arr, array(fill_value, dtype=arr.dtype))\n result = arr[argsort(condition_arr, stable=True, descending=True)][:size]\n return moveaxis(result, 0, axis)\n\n\n@export\n@partial(jit, static_argnames=('rowvar', 'bias', 'ddof'))\ndef cov(m: ArrayLike, y: ArrayLike | None = None, rowvar: bool = True,\n bias: bool = False, ddof: int | None = None,\n fweights: ArrayLike | None = None,\n aweights: ArrayLike | None = None) -> Array:\n r""""""Estimate the weighted sample covariance.\n\n JAX implementation of :func:`numpy.cov`.\n\n The covariance :math:`C_{ij}` between variable *i* and variable *j* is defined\n as\n\n .. math::\n\n cov[X_i, X_j] = E[(X_i - E[X_i])(X_j - E[X_j])]\n\n Given an array of *N* observations of the variables :math:`X_i` and :math:`X_j`,\n this can be estimated via the sample covariance:\n\n .. math::\n\n C_{ij} = \frac{1}{N - 1} \sum_{n=1}^N (X_{in} - \overline{X_i})(X_{jn} - \overline{X_j})\n\n Where :math:`\overline{X_i} = \frac{1}{N} \sum_{k=1}^N X_{ik}` is the mean of the\n observations.\n\n Args:\n m: array of shape ``(M, N)`` (if ``rowvar`` is True), or ``(N, M)``\n (if ``rowvar`` is False) representing ``N`` observations of ``M`` variables.\n ``m`` may also be one-dimensional, representing ``N`` observations of a\n single variable.\n y: optional set of additional observations, with the same form as ``m``. If\n specified, then ``y`` is combined with ``m``, i.e. for the default\n ``rowvar = True`` case, ``m`` becomes ``jnp.vstack([m, y])``.\n rowvar: if True (default) then each row of ``m`` represents a variable. If\n False, then each column represents a variable.\n bias: if False (default) then normalize the covariance by ``N - 1``. If True,\n then normalize the covariance by ``N``\n ddof: specify the degrees of freedom. Defaults to ``1`` if ``bias`` is False,\n or to ``0`` if ``bias`` is True.\n fweights: optional array of integer frequency weights of shape ``(N,)``. This\n is an absolute weight specifying the number of times each observation is\n included in the computation.\n aweights: optional array of observation weights of shape ``(N,)``. This is\n a relative weight specifying the ""importance"" of each observation. In the\n ``ddof=0`` case, it is equivalent to assigning probabilities to each\n observation.\n\n Returns:\n A covariance matrix of shape ``(M, M)``, or a scalar with shape ``()`` if ``M = 1``.\n\n See also:\n - :func:`jax.numpy.corrcoef`: compute the correlation coefficient, a normalized\n version of the covariance matrix.\n\n Examples:\n Consider these observations of two variables that correlate perfectly.\n The covariance matrix in this case is a 2x2 matrix of ones:\n\n >>> x = jnp.array([[0, 1, 2],\n ... [0, 1, 2]])\n >>> jnp.cov(x)\n Array([[1., 1.],\n [1., 1.]], dtype=float32)\n\n Now consider these observations of two variables that are perfectly\n anti-correlated. The covariance matrix in this case has ``-1`` in the\n off-diagonal:\n\n >>> x = jnp.array([[-1, 0, 1],\n ... [ 1, 0, -1]])\n >>> jnp.cov(x)\n Array([[ 1., -1.],\n [-1., 1.]], dtype=float32)\n\n Equivalently, these sequences can be specified as separate arguments,\n in which case they are stacked before continuing the computation.\n\n >>> x = jnp.array([-1, 0, 1])\n >>> y = jnp.array([1, 0, -1])\n >>> jnp.cov(x, y)\n Array([[ 1., -1.],\n [-1., 1.]], dtype=float32)\n\n In general, the entries of the covariance matrix may be any positive\n or negative real value. For example, here is the covariance of 100\n points drawn from a 3-dimensional standard normal distribution:\n\n >>> key = jax.random.key(0)\n >>> x = jax.random.normal(key, shape=(3, 100))\n >>> with jnp.printoptions(precision=2):\n ... print(jnp.cov(x))\n [[0.9 0.03 0.1 ]\n [0.03 1. 0.01]\n [0.1 0.01 0.85]]\n """"""\n if y is not None:\n m, y = util.promote_args_inexact(""cov"", m, y)\n if y.ndim > 2:\n raise ValueError(""y has more than 2 dimensions"")\n else:\n m, = util.promote_args_inexact(""cov"", m)\n\n if m.ndim > 2:\n raise ValueError(""m has more than 2 dimensions"") # same as numpy error\n\n X = atleast_2d(m)\n if not rowvar and X.shape[0] != 1:\n X = X.T\n if X.shape[0] == 0:\n return array([]).reshape(0, 0)\n\n if y is not None:\n y_arr = atleast_2d(y)\n if not rowvar and y_arr.shape[0] != 1:\n y_arr = y_arr.T\n X = concatenate((X, y_arr), axis=0)\n if ddof is None:\n ddof = 1 if bias == 0 else 0\n\n w: Array | None = None\n if fweights is not None:\n fweights = util.ensure_arraylike(""cov"", fweights)\n if np.ndim(fweights) > 1:\n raise RuntimeError(""cannot handle multidimensional fweights"")\n if np.shape(fweights)[0] != X.shape[1]:\n raise RuntimeError(""incompatible numbers of samples and fweights"")\n if not issubdtype(_dtype(fweights), np.integer):\n raise TypeError(""fweights must be integer."")\n # Ensure positive fweights; note that numpy raises an error on negative fweights.\n w = abs(fweights)\n if aweights is not None:\n aweights = util.ensure_arraylike(""cov"", aweights)\n if np.ndim(aweights) > 1:\n raise RuntimeError(""cannot handle multidimensional aweights"")\n if np.shape(aweights)[0] != X.shape[1]:\n raise RuntimeError(""incompatible numbers of samples and aweights"")\n # Ensure positive aweights: note that numpy raises an error for negative aweights.\n aweights = abs(aweights)\n w = aweights if w is None else w * aweights\n\n avg, w_sum = reductions.average(X, axis=1, weights=w, returned=True)\n w_sum = w_sum[0]\n\n if w is None:\n f = X.shape[1] - ddof\n elif ddof == 0:\n f = w_sum\n elif aweights is None:\n f = w_sum - ddof\n else:\n f = w_sum - ddof * reductions.sum(w * aweights) / w_sum\n\n X = X - avg[:, None]\n X_T = X.T if w is None else (X * lax.broadcast_to_rank(w, X.ndim)).T\n return ufuncs.true_divide(tensor_contractions.dot(X, X_T.conj()), f).squeeze()\n\n\n@export\n@partial(jit, static_argnames=('rowvar',))\ndef corrcoef(x: ArrayLike, y: ArrayLike | None = None, rowvar: bool = True) -> Array:\n r""""""Compute the Pearson correlation coefficients.\n\n JAX implementation of :func:`numpy.corrcoef`.\n\n This is a normalized version of the sample covariance computed by :func:`jax.numpy.cov`.\n For a sample covariance :math:`C_{ij}`, the correlation coefficients are\n\n .. math::\n\n R_{ij} = \frac{C_{ij}}{\sqrt{C_{ii}C_{jj}}}\n\n they are constructed such that the values satisfy :math:`-1 \le R_{ij} \le 1`.\n\n Args:\n x: array of shape ``(M, N)`` (if ``rowvar`` is True), or ``(N, M)``\n (if ``rowvar`` is False) representing ``N`` observations of ``M`` variables.\n ``x`` may also be one-dimensional, representing ``N`` observations of a\n single variable.\n y: optional set of additional observations, with the same form as ``m``. If\n specified, then ``y`` is combined with ``m``, i.e. for the default\n ``rowvar = True`` case, ``m`` becomes ``jnp.vstack([m, y])``.\n rowvar: if True (default) then each row of ``m`` represents a variable. If\n False, then each column represents a variable.\n\n Returns:\n A covariance matrix of shape ``(M, M)``.\n\n See also:\n - :func:`jax.numpy.cov`: compute the covariance matrix.\n\n Examples:\n Consider these observations of two variables that correlate perfectly.\n The correlation matrix in this case is a 2x2 matrix of ones:\n\n >>> x = jnp.array([[0, 1, 2],\n ... [0, 1, 2]])\n >>> jnp.corrcoef(x)\n Array([[1., 1.],\n [1., 1.]], dtype=float32)\n\n Now consider these observations of two variables that are perfectly\n anti-correlated. The correlation matrix in this case has ``-1`` in the\n off-diagonal:\n\n >>> x = jnp.array([[-1, 0, 1],\n ... [ 1, 0, -1]])\n >>> jnp.corrcoef(x)\n Array([[ 1., -1.],\n [-1., 1.]], dtype=float32)\n\n Equivalently, these sequences can be specified as separate arguments,\n in which case they are stacked before continuing the computation.\n\n >>> x = jnp.array([-1, 0, 1])\n >>> y = jnp.array([1, 0, -1])\n >>> jnp.corrcoef(x, y)\n Array([[ 1., -1.],\n [-1., 1.]], dtype=float32)\n\n The entries of the correlation matrix are normalized such that they\n lie within the range -1 to +1, where +1 indicates perfect correlation\n and -1 indicates perfect anti-correlation. For example, here is the\n correlation of 100 points drawn from a 3-dimensional standard normal\n distribution:\n\n >>> key = jax.random.key(0)\n >>> x = jax.random.normal(key, shape=(3, 100))\n >>> with jnp.printoptions(precision=2):\n ... print(jnp.corrcoef(x))\n [[1. 0.03 0.12]\n [0.03 1. 0.01]\n [0.12 0.01 1. ]]\n """"""\n util.check_arraylike(""corrcoef"", x)\n c = cov(x, y, rowvar)\n if len(np.shape(c)) == 0:\n # scalar - this should yield nan for values (nan/nan, inf/inf, 0/0), 1 otherwise\n return ufuncs.divide(c, c)\n d = diag(c)\n stddev = ufuncs.sqrt(ufuncs.real(d)).astype(c.dtype)\n c = c / stddev[:, None] / stddev[None, :]\n\n real_part = clip(ufuncs.real(c), -1, 1)\n if iscomplexobj(c):\n complex_part = clip(ufuncs.imag(c), -1, 1)\n c = lax.complex(real_part, complex_part)\n else:\n c = real_part\n return c\n\n\n@partial(vectorize, excluded={0, 1, 3, 4})\ndef _searchsorted_via_scan(unrolled: bool, sorted_arr: Array, query: Array, side: str, dtype: type) -> Array:\n op = _sort_le_comparator if side == 'left' else _sort_lt_comparator\n unsigned_dtype = np.uint32 if dtype == np.int32 else np.uint64\n def body_fun(state, _):\n low, high = state\n mid = low.astype(unsigned_dtype) + high.astype(unsigned_dtype)\n mid = lax.div(mid, unsigned_dtype(2)).astype(dtype)\n go_left = op(query, sorted_arr[mid])\n return (where(go_left, low, mid), where(go_left, mid, high)), ()\n n_levels = int(np.ceil(np.log2(len(sorted_arr) + 1)))\n init = (array(0, dtype=dtype), array(len(sorted_arr), dtype=dtype))\n carry, _ = lax.scan(body_fun, init, (), length=n_levels,\n unroll=n_levels if unrolled else 1)\n return carry[1]\n\n\ndef _searchsorted_via_sort(sorted_arr: Array, query: Array, side: str, dtype: type) -> Array:\n working_dtype = np.dtype('int32') if sorted_arr.size + query.size < np.iinfo(np.int32).max else np.dtype('int64')\n def _rank(x):\n idx = lax.iota(working_dtype, x.shape[0])\n return zeros_like(idx).at[argsort(x)].set(idx)\n query_flat = query.ravel()\n if side == 'left':\n index = _rank(lax.concatenate([query_flat, sorted_arr], 0))[:query.size]\n else:\n index = _rank(lax.concatenate([sorted_arr, query_flat], 0))[sorted_arr.size:]\n return lax.reshape(lax.sub(index, _rank(query_flat)), np.shape(query)).astype(dtype)\n\n\ndef _searchsorted_via_compare_all(sorted_arr: Array, query: Array, side: str, dtype: type) -> Array:\n op = _sort_lt_comparator if side == 'left' else _sort_le_comparator\n comparisons = api.vmap(op, in_axes=(0, None))(sorted_arr, query)\n return comparisons.sum(dtype=dtype, axis=0)\n\n\n@export\n@partial(jit, static_argnames=('side', 'method'))\ndef searchsorted(a: ArrayLike, v: ArrayLike, side: str = 'left',\n sorter: ArrayLike | None = None, *, method: str = 'scan') -> Array:\n """"""Perform a binary search within a sorted array.\n\n JAX implementation of :func:`numpy.searchsorted`.\n\n This will return the indices within a sorted array ``a`` where values in ``v``\n can be inserted to maintain its sort order.\n\n Args:\n a: one-dimensional array, assumed to be in sorted order unless ``sorter`` is specified.\n v: N-dimensional array of query values\n side: ``'left'`` (default) or ``'right'``; specifies whether insertion indices will be\n to the left or the right in case of ties.\n sorter: optional array of indices specifying the sort order of ``a``. If specified,\n then the algorithm assumes that ``a[sorter]`` is in sorted order.\n method: one of ``'scan'`` (default), ``'scan_unrolled'``, ``'sort'`` or ``'compare_all'``.\n See *Note* below.\n\n Returns:\n Array of insertion indices of shape ``v.shape``.\n\n Note:\n The ``method`` argument controls the algorithm used to compute the insertion indices.\n\n - ``'scan'`` (the default) tends to be more performant on CPU, particularly when ``a`` is\n very large.\n - ``'scan_unrolled'`` is more performant on GPU at the expense of additional compile time.\n - ``'sort'`` is often more performant on accelerator backends like GPU and TPU, particularly\n when ``v`` is very large.\n - ``'compare_all'`` tends to be the most performant when ``a`` is very small.\n\n Examples:\n Searching for a single value:\n\n >>> a = jnp.array([1, 2, 2, 3, 4, 5, 5])\n >>> jnp.searchsorted(a, 2)\n Array(1, dtype=int32)\n >>> jnp.searchsorted(a, 2, side='right')\n Array(3, dtype=int32)\n\n Searching for a batch of values:\n\n >>> vals = jnp.array([0, 3, 8, 1.5, 2])\n >>> jnp.searchsorted(a, vals)\n Array([0, 3, 7, 1, 1], dtype=int32)\n\n Optionally, the ``sorter`` argument can be used to find insertion indices into\n an array sorted via :func:`jax.numpy.argsort`:\n\n >>> a = jnp.array([4, 3, 5, 1, 2])\n >>> sorter = jnp.argsort(a)\n >>> jnp.searchsorted(a, vals, sorter=sorter)\n Array([0, 2, 5, 1, 1], dtype=int32)\n\n The result is equivalent to passing the sorted array:\n\n >>> jnp.searchsorted(jnp.sort(a), vals)\n Array([0, 2, 5, 1, 1], dtype=int32)\n """"""\n if sorter is None:\n a, v = util.ensure_arraylike(""searchsorted"", a, v)\n else:\n a, v, sorter = util.ensure_arraylike(""searchsorted"", a, v, sorter)\n if side not in ['left', 'right']:\n raise ValueError(f""{side!r} is an invalid value for keyword 'side'. ""\n ""Expected one of ['left', 'right']."")\n if method not in ['scan', 'scan_unrolled', 'sort', 'compare_all']:\n raise ValueError(\n f""{method!r} is an invalid value for keyword 'method'. ""\n ""Expected one of ['sort', 'scan', 'scan_unrolled', 'compare_all']."")\n if np.ndim(a) != 1:\n raise ValueError(""a should be 1-dimensional"")\n a, v = util.promote_dtypes(a, v)\n if sorter is not None:\n a = a[sorter]\n dtype = np.dtype('int32') if a.shape[0] <= np.iinfo(np.int32).max else np.dtype('int64')\n if a.shape[0] == 0:\n return zeros_like(v, dtype=dtype)\n impl = {\n 'scan': partial(_searchsorted_via_scan, False),\n 'scan_unrolled': partial(_searchsorted_via_scan, True),\n 'sort': _searchsorted_via_sort,\n 'compare_all': _searchsorted_via_compare_all,\n }[method]\n return impl(a, v, side, dtype) # type: ignore\n\n\n@export\n@partial(jit, static_argnames=('right', 'method'))\ndef digitize(x: ArrayLike, bins: ArrayLike, right: bool = False,\n *, method: str | None = None) -> Array:\n """"""Convert an array to bin indices.\n\n JAX implementation of :func:`numpy.digitize`.\n\n Args:\n x: array of values to digitize.\n bins: 1D array of bin edges. Must be monotonically increasing or decreasing.\n right: if true, the intervals include the right bin edges. If false (default)\n the intervals include the left bin edges.\n method: optional method argument to be passed to :func:`~jax.numpy.searchsorted`.\n See that function for available options.\n\n Returns:\n An integer array of the same shape as ``x`` indicating the bin number that\n the values are in.\n\n See also:\n - :func:`jax.numpy.searchsorted`: find insertion indices for values in a\n sorted array.\n - :func:`jax.numpy.histogram`: compute frequency of array values within\n specified bins.\n\n Examples:\n >>> x = jnp.array([1.0, 2.0, 2.5, 1.5, 3.0, 3.5])\n >>> bins = jnp.array([1, 2, 3])\n >>> jnp.digitize(x, bins)\n Array([1, 2, 2, 1, 3, 3], dtype=int32)\n >>> jnp.digitize(x, bins, right=True)\n Array([0, 1, 2, 1, 2, 3], dtype=int32)\n\n ``digitize`` supports reverse-ordered bins as well:\n\n >>> bins = jnp.array([3, 2, 1])\n >>> jnp.digitize(x, bins)\n Array([2, 1, 1, 2, 0, 0], dtype=int32)\n """"""\n x, bins_arr = util.ensure_arraylike(""digitize"", x, bins)\n right = core.concrete_or_error(bool, right, ""right argument of jnp.digitize()"")\n if bins_arr.ndim != 1:\n raise ValueError(f""digitize: bins must be a 1-dimensional array; got {bins=}"")\n if bins_arr.shape[0] == 0:\n return zeros_like(x, dtype=np.int32)\n side = 'right' if not right else 'left'\n kwds: dict[str, str] = {} if method is None else {'method': method}\n return where(\n bins_arr[-1] >= bins_arr[0],\n searchsorted(bins_arr, x, side=side, **kwds),\n bins_arr.shape[0] - searchsorted(bins_arr[::-1], x, side=side, **kwds)\n )\n\n\n@export\ndef piecewise(x: ArrayLike, condlist: Array | Sequence[ArrayLike],\n funclist: list[ArrayLike | Callable[..., Array]],\n *args, **kw) -> Array:\n """"""Evaluate a function defined piecewise across the domain.\n\n JAX implementation of :func:`numpy.piecewise`, in terms of :func:`jax.lax.switch`.\n\n Note:\n Unlike :func:`numpy.piecewise`, :func:`jax.numpy.piecewise` requires functions\n in ``funclist`` to be traceable by JAX, as it is implemented via\n :func:`jax.lax.switch`.\n\n Args:\n x: array of input values.\n condlist: boolean array or sequence of boolean arrays corresponding to the\n functions in ``funclist``. If a sequence of arrays, the length of each\n array must match the length of ``x``\n funclist: list of arrays or functions; must either be the same length as\n ``condlist``, or have length ``len(condlist) + 1``, in which case the\n last entry is the default applied when none of the conditions are True.\n Alternatively, entries of ``funclist`` may be numerical values, in which\n case they indicate a constant function.\n args, kwargs: additional arguments are passed to each function in\n ``funclist``.\n\n Returns:\n An array which is the result of evaluating the functions on ``x`` at\n the specified conditions.\n\n See also:\n - :func:`jax.lax.switch`: choose between *N* functions based on an index.\n - :func:`jax.lax.cond`: choose between two functions based on a boolean condition.\n - :func:`jax.numpy.where`: choose between two results based on a boolean mask.\n - :func:`jax.lax.select`: choose between two results based on a boolean mask.\n - :func:`jax.lax.select_n`: choose between *N* results based on a boolean mask.\n\n Examples:\n Here's an example of a function which is zero for negative values, and linear\n for positive values:\n\n >>> x = jnp.array([-4, -3, -2, -1, 0, 1, 2, 3, 4])\n\n >>> condlist = [x < 0, x >= 0]\n >>> funclist = [lambda x: 0 * x, lambda x: x]\n >>> jnp.piecewise(x, condlist, funclist)\n Array([0, 0, 0, 0, 0, 1, 2, 3, 4], dtype=int32)\n\n ``funclist`` can also contain a simple scalar value for constant functions:\n\n >>> condlist = [x < 0, x >= 0]\n >>> funclist = [0, lambda x: x]\n >>> jnp.piecewise(x, condlist, funclist)\n Array([0, 0, 0, 0, 0, 1, 2, 3, 4], dtype=int32)\n\n You can specify a default value by appending an extra condition to ``funclist``:\n\n >>> condlist = [x < -1, x > 1]\n >>> funclist = [lambda x: 1 + x, lambda x: x - 1, 0]\n >>> jnp.piecewise(x, condlist, funclist)\n Array([-3, -2, -1, 0, 0, 0, 1, 2, 3], dtype=int32)\n\n ``condlist`` may also be a simple array of scalar conditions, in which case\n the associated function applies to the whole range\n\n >>> condlist = jnp.array([False, True, False])\n >>> funclist = [lambda x: x * 0, lambda x: x * 10, lambda x: x * 100]\n >>> jnp.piecewise(x, condlist, funclist)\n Array([-40, -30, -20, -10, 0, 10, 20, 30, 40], dtype=int32)\n """"""\n x_arr = util.ensure_arraylike(""piecewise"", x)\n nc, nf = len(condlist), len(funclist)\n if nf == nc + 1:\n funclist = funclist[-1:] + funclist[:-1]\n elif nf == nc:\n funclist = [0] + list(funclist)\n else:\n raise ValueError(f""with {nc} condition(s), either {nc} or {nc+1} functions are expected; got {nf}"")\n consts = {i: c for i, c in enumerate(funclist) if not callable(c)}\n funcs = {i: f for i, f in enumerate(funclist) if callable(f)}\n return _piecewise(x_arr, asarray(condlist, dtype=bool), consts,\n frozenset(funcs.items()), # dict is not hashable.\n *args, **kw)\n\n@partial(jit, static_argnames=['funcs'])\ndef _piecewise(x: Array, condlist: Array, consts: dict[int, ArrayLike],\n funcs: frozenset[tuple[int, Callable[..., Array]]],\n *args, **kw) -> Array:\n funcdict = dict(funcs)\n funclist = [consts.get(i, funcdict.get(i)) for i in range(len(condlist) + 1)]\n indices = argmax(reductions.cumsum(concatenate([zeros_like(condlist[:1]), condlist], 0), 0), 0)\n dtype = _dtype(x)\n def _call(f):\n return lambda x: f(x, *args, **kw).astype(dtype)\n def _const(v):\n return lambda x: array(v, dtype=dtype)\n funclist = [_call(f) if callable(f) else _const(f) for f in funclist]\n return vectorize(lax.switch, excluded=(1,))(indices, funclist, x)\n\n\ndef _tile_to_size(arr: Array, size: int) -> Array:\n assert arr.ndim == 1\n if arr.size < size:\n arr = tile(arr, int(np.ceil(size / arr.size)))\n assert arr.size >= size\n return arr[:size] if arr.size > size else arr\n",python,tab +2984,11736306,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py",152370,0,"",python,selection_command +2985,11736735,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +2986,11736735,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,0,"",python,selection_command +2987,11737035,"/fast/home/franz.srambical/jafar/utils/nn.py",9597,0,"",python,selection_command +2988,11738887,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,0,"",python,selection_command +2989,11739772,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,13," _",python,selection_command +2990,11740021,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,24," _fuse_masks(",python,selection_command +2991,11740050,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,25," _fuse_masks(m",python,selection_command +2992,11740080,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,29," _fuse_masks(mask,",python,selection_command +2993,11740112,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,31," _fuse_masks(mask, a",python,selection_command +2994,11740146,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,45," _fuse_masks(mask, attention_mask)",python,selection_command +2995,11740178,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,47," _fuse_masks(mask, attention_mask) i",python,selection_command +2996,11740533,"/fast/home/franz.srambical/jafar/utils/nn.py",10459,0,"",python,selection_command +2997,11741229,"/fast/home/franz.srambical/jafar/utils/nn.py",10462,0,"",python,selection_command +2998,11741936,"/fast/home/franz.srambical/jafar/utils/nn.py",8987,0,"",python,selection_command +2999,11743521,"/fast/home/franz.srambical/jafar/utils/nn.py",8936,0,"",python,selection_command +3000,11743568,"/fast/home/franz.srambical/jafar/utils/nn.py",8940,0,"",python,selection_command +3001,11743834,"/fast/home/franz.srambical/jafar/utils/nn.py",8944,0,"",python,selection_command +3002,11744020,"/fast/home/franz.srambical/jafar/utils/nn.py",8956,0,"",python,selection_command +3003,11744343,"/fast/home/franz.srambical/jafar/utils/nn.py",8944,0,"",python,selection_command +3004,11746794,"/fast/home/franz.srambical/jafar/utils/nn.py",8956,0,"",python,selection_command +3005,11747040,"/fast/home/franz.srambical/jafar/utils/nn.py",8957,0,"",python,selection_command +3006,11747066,"/fast/home/franz.srambical/jafar/utils/nn.py",8962,0,"",python,selection_command +3007,11747091,"/fast/home/franz.srambical/jafar/utils/nn.py",8964,0,"",python,selection_command +3008,11747126,"/fast/home/franz.srambical/jafar/utils/nn.py",8967,0,"",python,selection_command +3009,11747157,"/fast/home/franz.srambical/jafar/utils/nn.py",8969,0,"",python,selection_command +3010,11747194,"/fast/home/franz.srambical/jafar/utils/nn.py",8974,0,"",python,selection_command +3011,11747690,"/fast/home/franz.srambical/jafar/utils/nn.py",8969,0,"",python,selection_command +3012,11747942,"/fast/home/franz.srambical/jafar/utils/nn.py",8967,0,"",python,selection_command +3013,11747968,"/fast/home/franz.srambical/jafar/utils/nn.py",8964,0,"",python,selection_command +3014,11748001,"/fast/home/franz.srambical/jafar/utils/nn.py",8962,0,"",python,selection_command +3015,11748035,"/fast/home/franz.srambical/jafar/utils/nn.py",8957,0,"",python,selection_command +3016,11748068,"/fast/home/franz.srambical/jafar/utils/nn.py",8956,0,"",python,selection_command +3017,11748197,"/fast/home/franz.srambical/jafar/utils/nn.py",8944,0,"",python,selection_command +3018,11748390,"/fast/home/franz.srambical/jafar/utils/nn.py",8940,0,"",python,selection_command +3019,11748831,"/fast/home/franz.srambical/jafar/utils/nn.py",9013,0,"",python,selection_command +3020,11749081,"/fast/home/franz.srambical/jafar/utils/nn.py",9063,0,"",python,selection_command +3021,11749104,"/fast/home/franz.srambical/jafar/utils/nn.py",9114,0,"",python,selection_command +3022,11749134,"/fast/home/franz.srambical/jafar/utils/nn.py",9148,0,"",python,selection_command +3023,11749165,"/fast/home/franz.srambical/jafar/utils/nn.py",9192,0,"",python,selection_command +3024,11749199,"/fast/home/franz.srambical/jafar/utils/nn.py",9217,0,"",python,selection_command +3025,11749234,"/fast/home/franz.srambical/jafar/utils/nn.py",9279,0,"",python,selection_command +3026,11749266,"/fast/home/franz.srambical/jafar/utils/nn.py",9284,0,"",python,selection_command +3027,11749299,"/fast/home/franz.srambical/jafar/utils/nn.py",9311,0,"",python,selection_command +3028,11749331,"/fast/home/franz.srambical/jafar/utils/nn.py",9374,0,"",python,selection_command +3029,11749367,"/fast/home/franz.srambical/jafar/utils/nn.py",9379,0,"",python,selection_command +3030,11749399,"/fast/home/franz.srambical/jafar/utils/nn.py",9400,0,"",python,selection_command +3031,11749432,"/fast/home/franz.srambical/jafar/utils/nn.py",9467,0,"",python,selection_command +3032,11749571,"/fast/home/franz.srambical/jafar/utils/nn.py",9472,0,"",python,selection_command +3033,11750116,"/fast/home/franz.srambical/jafar/utils/nn.py",9476,0,"",python,selection_command +3034,11750293,"/fast/home/franz.srambical/jafar/utils/nn.py",9480,0,"",python,selection_command +3035,11750513,"/fast/home/franz.srambical/jafar/utils/nn.py",9491,0,"",python,selection_command +3036,11750786,"/fast/home/franz.srambical/jafar/utils/nn.py",9573,0,"",python,selection_command +3037,11751113,"/fast/home/franz.srambical/jafar/utils/nn.py",9572,0,"",python,selection_command +3038,11751543,"/fast/home/franz.srambical/jafar/utils/nn.py",9562,0,"",python,selection_command +3039,11760366,"/fast/home/franz.srambical/jafar/utils/nn.py",9609,0,"",python,selection_command +3040,11760601,"/fast/home/franz.srambical/jafar/utils/nn.py",9646,0,"",python,selection_command +3041,11760627,"/fast/home/franz.srambical/jafar/utils/nn.py",9727,0,"",python,selection_command +3042,11760661,"/fast/home/franz.srambical/jafar/utils/nn.py",9741,0,"",python,selection_command +3043,11761240,"/fast/home/franz.srambical/jafar/utils/nn.py",9748,0,"",python,selection_command +3044,11761481,"/fast/home/franz.srambical/jafar/utils/nn.py",9751,0,"",python,selection_command +3045,11761511,"/fast/home/franz.srambical/jafar/utils/nn.py",9752,0,"",python,selection_command +3046,11761986,"/fast/home/franz.srambical/jafar/utils/nn.py",9763,0,"",python,selection_command +3047,11762171,"/fast/home/franz.srambical/jafar/utils/nn.py",9764,0,"",python,selection_command +3048,11763410,"/fast/home/franz.srambical/jafar/utils/nn.py",9727,0,"",python,selection_command +3049,11763657,"/fast/home/franz.srambical/jafar/utils/nn.py",9669,0,"",python,selection_command +3050,11763679,"/fast/home/franz.srambical/jafar/utils/nn.py",9632,0,"",python,selection_command +3051,11763710,"/fast/home/franz.srambical/jafar/utils/nn.py",9585,0,"",python,selection_command +3052,11764007,"/fast/home/franz.srambical/jafar/utils/nn.py",9503,0,"",python,selection_command +3053,11764300,"/fast/home/franz.srambical/jafar/utils/nn.py",9476,0,"",python,selection_command +3054,11764460,"/fast/home/franz.srambical/jafar/utils/nn.py",9480,0,"",python,selection_command +3055,11764625,"/fast/home/franz.srambical/jafar/utils/nn.py",9491,0,"",python,selection_command +3056,11764949,"/fast/home/franz.srambical/jafar/utils/nn.py",9480,0,"",python,selection_command +3057,11766775,"/fast/home/franz.srambical/jafar/utils/nn.py",10425,0,"",python,selection_command +3058,11767703,"/fast/home/franz.srambical/jafar/utils/nn.py",10436,0,"",python,selection_command +3059,11767895,"/fast/home/franz.srambical/jafar/utils/nn.py",10437,0,"",python,selection_command +3060,11769252,"/fast/home/franz.srambical/jafar/utils/nn.py",10441,0,"",python,selection_command +3061,11769640,"/fast/home/franz.srambical/jafar/utils/nn.py",10443,0,"",python,selection_command +3062,11789474,"/fast/home/franz.srambical/jafar/utils/nn.py",8537,0,"",python,selection_command +3063,11789813,"/fast/home/franz.srambical/jafar/utils/nn.py",2614,0,"",python,selection_command +3064,11790890,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Attention core modules for Flax.""""""\n\nfrom __future__ import annotations\n\nimport functools\nfrom typing import Any\nfrom collections.abc import Callable\nimport math\n\nimport jax\nimport jax.numpy as jnp\nfrom jax import lax, random\n\nfrom flax import nnx\nfrom flax.nnx import rnglib\nfrom flax.nnx.module import Module, first_from\nfrom flax.nnx.nn import initializers\nfrom flax.nnx.nn import dtypes\nfrom flax.nnx.nn.linear import (\n LinearGeneral,\n default_kernel_init,\n)\nfrom flax.nnx.nn.normalization import LayerNorm\nfrom flax.typing import (\n Dtype,\n PromoteDtypeFn,\n Shape,\n Initializer,\n PrecisionLike,\n DotGeneralT,\n)\n\nArray = jax.Array\n\n\ndef dot_product_attention_weights(\n query: Array,\n key: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention weights given query and key.\n\n Used by :func:`dot_product_attention`, which is what you'll most likely use.\n But if you want access to the attention weights for introspection, then\n you can directly call this function and call einsum yourself.\n\n Args:\n query: queries for calculating attention with shape of `[batch..., q_length,\n num_heads, qk_depth_per_head]`.\n key: keys for calculating attention with shape of `[batch..., kv_length,\n num_heads, qk_depth_per_head]`.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs and params)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key)`` and a ``dtype``\n keyword argument, and return a tuple of arrays with the promoted dtype.\n\n Returns:\n Output of shape `[batch..., num_heads, q_length, kv_length]`.\n """"""\n query, key = promote_dtype((query, key), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n\n assert query.ndim == key.ndim, 'q, k must have same rank.'\n assert query.shape[:-3] == key.shape[:-3], 'q, k batch dims must match.'\n assert query.shape[-2] == key.shape[-2], 'q, k num_heads must match.'\n assert query.shape[-1] == key.shape[-1], 'q, k depths must match.'\n\n # calculate attention matrix\n depth = query.shape[-1]\n query = query / jnp.sqrt(depth).astype(dtype)\n # attn weight shape is (batch..., num_heads, q_length, kv_length)\n attn_weights = jnp.einsum(\n '...qhd,...khd->...hqk', query, key, precision=precision\n )\n\n # apply attention bias: masking, dropout, proximity bias, etc.\n if bias is not None:\n attn_weights = attn_weights + bias\n # apply attention mask\n if mask is not None:\n big_neg = jnp.finfo(dtype).min\n attn_weights = jnp.where(mask, attn_weights, big_neg)\n\n # normalize the attention weights\n attn_weights = jax.nn.softmax(attn_weights).astype(dtype)\n\n if module:\n module.sow(nnx.Intermediate, 'attention_weights', attn_weights)\n\n # apply attention dropout\n if not deterministic and dropout_rate > 0.0:\n keep_prob = 1.0 - dropout_rate\n if broadcast_dropout:\n # dropout is broadcast across the batch + head dimensions\n dropout_shape = tuple([1] * (key.ndim - 2)) + attn_weights.shape[-2:]\n keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape) # type: ignore\n else:\n keep = random.bernoulli(dropout_rng, keep_prob, attn_weights.shape) # type: ignore\n multiplier = keep.astype(dtype) / jnp.asarray(keep_prob, dtype=dtype)\n attn_weights = attn_weights * multiplier\n\n return attn_weights\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n )\n\n # return weighted sum over values for each query position\n return jnp.einsum(\n '...hqk,...khd->...qhd', attn_weights, value, precision=precision\n )\n\n\nclass MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n\n\n# mask-making utility functions\n\n\ndef make_attention_mask(\n query_input: Array,\n key_input: Array,\n pairwise_fn: Callable[..., Any] = jnp.multiply,\n extra_batch_dims: int = 0,\n dtype: Dtype = jnp.float32,\n):\n """"""Mask-making helper for attention weights.\n\n In case of 1d inputs (i.e., `[batch..., len_q]`, `[batch..., len_kv]`, the\n attention weights will be `[batch..., heads, len_q, len_kv]` and this\n function will produce `[batch..., 1, len_q, len_kv]`.\n\n Args:\n query_input: a batched, flat input of query_length size\n key_input: a batched, flat input of key_length size\n pairwise_fn: broadcasting elementwise comparison function\n extra_batch_dims: number of extra batch dims to add singleton axes for, none\n by default\n dtype: mask return dtype\n\n Returns:\n A `[batch..., 1, len_q, len_kv]` shaped mask for 1d attention.\n """"""\n mask = pairwise_fn(\n jnp.expand_dims(query_input, axis=-1), jnp.expand_dims(key_input, axis=-2)\n )\n mask = jnp.expand_dims(mask, axis=-3)\n mask = jnp.expand_dims(mask, axis=tuple(range(extra_batch_dims)))\n return mask.astype(dtype)\n\n\ndef make_causal_mask(\n x: Array, extra_batch_dims: int = 0, dtype: Dtype = jnp.float32\n) -> Array:\n """"""Make a causal mask for self-attention.\n\n In case of 1d inputs (i.e., `[batch..., len]`, the self-attention weights\n will be `[batch..., heads, len, len]` and this function will produce a\n causal mask of shape `[batch..., 1, len, len]`.\n\n Args:\n x: input array of shape `[batch..., len]`\n extra_batch_dims: number of batch dims to add singleton axes for, none by\n default\n dtype: mask return dtype\n\n Returns:\n A `[batch..., 1, len, len]` shaped causal mask for 1d attention.\n """"""\n idxs = jnp.broadcast_to(jnp.arange(x.shape[-1], dtype=jnp.int32), x.shape)\n return make_attention_mask(\n idxs,\n idxs,\n jnp.greater_equal,\n extra_batch_dims=extra_batch_dims,\n dtype=dtype,\n )\n\n\ndef combine_masks(\n *masks: Array | None, dtype: Dtype = jnp.float32\n) -> Array | None:\n """"""Combine attention masks.\n\n Args:\n *masks: set of attention mask arguments to combine, some can be None.\n dtype: dtype for the returned mask.\n\n Returns:\n Combined mask, reduced by logical and, returns None if no masks given.\n """"""\n masks_list = [m for m in masks if m is not None]\n if not masks_list:\n return None\n assert all(\n map(lambda x: x.ndim == masks_list[0].ndim, masks_list)\n ), f'masks must have same rank: {tuple(map(lambda x: x.ndim, masks_list))}'\n mask, *other_masks = masks_list\n for other_mask in other_masks:\n mask = jnp.logical_and(mask, other_mask)\n return mask.astype(dtype)\n",python,tab +3065,11790890,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +3066,11791591,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12332,0,"",python,selection_command +3067,11791824,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14092,0,"",python,selection_command +3068,11793081,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16150,0,"",python,selection_command +3069,11793369,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18670,0,"",python,selection_command +3070,11793732,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20649,0,"",python,selection_command +3071,11794202,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22802,0,"",python,selection_command +3072,11794935,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23246,0,"",python,selection_keyboard +3073,11795279,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23182,0,"",python,selection_command +3074,11795530,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23110,0,"",python,selection_command +3075,11795557,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23105,0,"",python,selection_command +3076,11795583,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23094,0,"",python,selection_command +3077,11795618,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23072,0,"",python,selection_command +3078,11795650,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23027,0,"",python,selection_command +3079,11795691,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23021,0,"",python,selection_command +3080,11795723,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22977,0,"",python,selection_command +3081,11795756,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22945,0,"",python,selection_command +3082,11795789,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22921,0,"",python,selection_command +3083,11795822,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22886,0,"",python,selection_command +3084,11795855,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22838,0,"",python,selection_command +3085,11795889,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22800,0,"",python,selection_command +3086,11795923,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22769,0,"",python,selection_command +3087,11795956,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22752,0,"",python,selection_command +3088,11795989,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22739,0,"",python,selection_command +3089,11796022,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22728,0,"",python,selection_command +3090,11796056,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22715,0,"",python,selection_command +3091,11796104,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22688,0,"",python,selection_command +3092,11796212,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22666,0,"",python,selection_command +3093,11796376,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22688,0,"",python,selection_command +3094,11797052,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22715,0,"",python,selection_command +3095,11797295,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22728,0,"",python,selection_command +3096,11797401,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22739,0,"",python,selection_command +3097,11797645,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22752,0,"",python,selection_command +3098,11797783,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22754,0,"",python,selection_command +3099,11797963,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22758,0,"",python,selection_command +3100,11798106,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22759,0,"",python,selection_command +3101,11798482,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22754,0,"",python,selection_command +3102,11799055,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21797,0,"",python,selection_command +3103,11800520,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21767,0,"",python,selection_command +3104,11814882,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21795,0,"",python,selection_command +3105,11815226,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21797,0,"",python,selection_command +3106,11815595,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21767,0,"",python,selection_command +3107,11816165,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21551,0,"",python,selection_command +3108,11816859,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18091,0,"",python,selection_command +3109,11817679,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17942,0,"",python,selection_command +3110,11817995,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17926,0,"",python,selection_command +3111,11818301,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17011,0,"",python,selection_command +3112,11818637,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9175,0,"",python,selection_command +3113,11819640,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17011,0,"",python,selection_command +3114,11819977,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17926,0,"",python,selection_command +3115,11820167,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17942,0,"",python,selection_command +3116,11820335,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18091,0,"",python,selection_command +3117,11820503,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21551,0,"",python,selection_command +3118,11820757,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21767,0,"",python,selection_command +3119,11821098,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21797,0,"",python,selection_command +3120,11821389,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22754,0,"",python,selection_command +3121,11822957,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21797,0,"",python,selection_command +3122,11823537,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21789,13," mask,",python,selection_command +3123,11823791,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21789,39," mask,\n jnp.broadcast_to(",python,selection_command +3124,11823971,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21789,86," mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,",python,selection_command +3125,11824315,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21837,0,"",python,selection_command +3126,11824499,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21811,0,"",python,selection_command +3127,11824841,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21803,25," jnp.broadcast_to(",python,selection_command +3128,11825003,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21803,72," jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,",python,selection_command +3129,11825141,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21803,122," jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),",python,selection_command +3130,11826358,"TERMINAL",0,0,"\r[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +3131,11839482,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21884,0,"",python,selection_command +3132,11866906,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21837,0,"",python,selection_command +3133,11867055,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21811,0,"",python,selection_command +3134,11867204,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21797,0,"",python,selection_command +3135,11867336,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21769,0,"",python,selection_command +3136,11867455,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21728,0,"",python,selection_command +3137,11867757,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21769,0,"",python,selection_command +3138,11888268,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +3139,11889946,"/fast/home/franz.srambical/jafar/utils/nn.py",11110,0,"",python,selection_command +3140,11890458,"/fast/home/franz.srambical/jafar/utils/nn.py",10604,0,"",python,selection_command +3141,11890787,"/fast/home/franz.srambical/jafar/utils/nn.py",9727,0,"",python,selection_command +3142,11891676,"/fast/home/franz.srambical/jafar/utils/nn.py",9646,0,"",python,selection_command +3143,11891924,"/fast/home/franz.srambical/jafar/utils/nn.py",9609,0,"",python,selection_command +3144,11891952,"/fast/home/franz.srambical/jafar/utils/nn.py",9562,0,"",python,selection_command +3145,11891984,"/fast/home/franz.srambical/jafar/utils/nn.py",9480,0,"",python,selection_command +3146,11892011,"/fast/home/franz.srambical/jafar/utils/nn.py",9467,0,"",python,selection_command +3147,11892047,"/fast/home/franz.srambical/jafar/utils/nn.py",9408,0,"",python,selection_command +3148,11892077,"/fast/home/franz.srambical/jafar/utils/nn.py",9387,0,"",python,selection_command +3149,11892112,"/fast/home/franz.srambical/jafar/utils/nn.py",9374,0,"",python,selection_command +3150,11892145,"/fast/home/franz.srambical/jafar/utils/nn.py",9319,0,"",python,selection_command +3151,11892180,"/fast/home/franz.srambical/jafar/utils/nn.py",9292,0,"",python,selection_command +3152,11892212,"/fast/home/franz.srambical/jafar/utils/nn.py",9279,0,"",python,selection_command +3153,11892246,"/fast/home/franz.srambical/jafar/utils/nn.py",9225,0,"",python,selection_command +3154,11892287,"/fast/home/franz.srambical/jafar/utils/nn.py",9200,0,"",python,selection_command +3155,11892321,"/fast/home/franz.srambical/jafar/utils/nn.py",9156,0,"",python,selection_command +3156,11892354,"/fast/home/franz.srambical/jafar/utils/nn.py",9122,0,"",python,selection_command +3157,11892541,"/fast/home/franz.srambical/jafar/utils/nn.py",9071,0,"",python,selection_command +3158,11892678,"/fast/home/franz.srambical/jafar/utils/nn.py",9021,0,"",python,selection_command +3159,11892899,"/fast/home/franz.srambical/jafar/utils/nn.py",8948,0,"",python,selection_command +3160,11895063,"/fast/home/franz.srambical/jafar/utils/nn.py",8935,0,"",python,selection_command +3161,11895317,"/fast/home/franz.srambical/jafar/utils/nn.py",8933,0,"",python,selection_command +3162,11895340,"/fast/home/franz.srambical/jafar/utils/nn.py",8899,0,"",python,selection_command +3163,11895367,"/fast/home/franz.srambical/jafar/utils/nn.py",8801,0,"",python,selection_command +3164,11895404,"/fast/home/franz.srambical/jafar/utils/nn.py",8716,0,"",python,selection_command +3165,11895438,"/fast/home/franz.srambical/jafar/utils/nn.py",8703,0,"",python,selection_command +3166,11895470,"/fast/home/franz.srambical/jafar/utils/nn.py",8636,0,"",python,selection_command +3167,11895503,"/fast/home/franz.srambical/jafar/utils/nn.py",8540,0,"",python,selection_command +3168,11895539,"/fast/home/franz.srambical/jafar/utils/nn.py",8527,0,"",python,selection_command +3169,11895569,"/fast/home/franz.srambical/jafar/utils/nn.py",8468,0,"",python,selection_command +3170,11895603,"/fast/home/franz.srambical/jafar/utils/nn.py",8454,0,"",python,selection_command +3171,11895636,"/fast/home/franz.srambical/jafar/utils/nn.py",8372,0,"",python,selection_command +3172,11895671,"/fast/home/franz.srambical/jafar/utils/nn.py",8359,0,"",python,selection_command +3173,11895703,"/fast/home/franz.srambical/jafar/utils/nn.py",8358,0,"",python,selection_command +3174,11895738,"/fast/home/franz.srambical/jafar/utils/nn.py",8332,0,"",python,selection_command +3175,11895775,"/fast/home/franz.srambical/jafar/utils/nn.py",8274,0,"",python,selection_command +3176,11895828,"/fast/home/franz.srambical/jafar/utils/nn.py",8261,0,"",python,selection_command +3177,11895863,"/fast/home/franz.srambical/jafar/utils/nn.py",8239,0,"",python,selection_command +3178,11895939,"/fast/home/franz.srambical/jafar/utils/nn.py",8261,0,"",python,selection_command +3179,11896194,"/fast/home/franz.srambical/jafar/utils/nn.py",8274,0,"",python,selection_command +3180,11896224,"/fast/home/franz.srambical/jafar/utils/nn.py",8332,0,"",python,selection_command +3181,11896249,"/fast/home/franz.srambical/jafar/utils/nn.py",8358,0,"",python,selection_command +3182,11896282,"/fast/home/franz.srambical/jafar/utils/nn.py",8359,0,"",python,selection_command +3183,11896318,"/fast/home/franz.srambical/jafar/utils/nn.py",8372,0,"",python,selection_command +3184,11896795,"/fast/home/franz.srambical/jafar/utils/nn.py",8454,0,"",python,selection_command +3185,11896962,"/fast/home/franz.srambical/jafar/utils/nn.py",8372,0,"",python,selection_command +3186,11897111,"/fast/home/franz.srambical/jafar/utils/nn.py",8389,0,"",python,selection_command +3187,11897363,"/fast/home/franz.srambical/jafar/utils/nn.py",8390,0,"",python,selection_command +3188,11897389,"/fast/home/franz.srambical/jafar/utils/nn.py",8409,0,"",python,selection_command +3189,11897417,"/fast/home/franz.srambical/jafar/utils/nn.py",8410,0,"",python,selection_command +3190,11897563,"/fast/home/franz.srambical/jafar/utils/nn.py",8415,0,"",python,selection_command +3191,11897744,"/fast/home/franz.srambical/jafar/utils/nn.py",8416,0,"",python,selection_command +3192,11897912,"/fast/home/franz.srambical/jafar/utils/nn.py",8426,0,"",python,selection_command +3193,11898062,"/fast/home/franz.srambical/jafar/utils/nn.py",8427,0,"",python,selection_command +3194,11898353,"/fast/home/franz.srambical/jafar/utils/nn.py",8432,0,"",python,selection_command +3195,11900046,"/fast/home/franz.srambical/jafar/utils/nn.py",8429,0,"",python,selection_command +3196,11900212,"/fast/home/franz.srambical/jafar/utils/nn.py",8427,0,"",python,selection_command +3197,11900336,"/fast/home/franz.srambical/jafar/utils/nn.py",8418,0,"",python,selection_command +3198,11900996,"/fast/home/franz.srambical/jafar/utils/nn.py",10975,0,"",python,selection_command +3199,11902587,"/fast/home/franz.srambical/jafar/utils/nn.py",8418,0,"",python,selection_command +3200,11903507,"/fast/home/franz.srambical/jafar/utils/nn.py",8427,0,"",python,selection_command +3201,11903735,"/fast/home/franz.srambical/jafar/utils/nn.py",8432,0,"",python,selection_command +3202,11906210,"/fast/home/franz.srambical/jafar/utils/nn.py",8429,0,"",python,selection_command +3203,11906352,"/fast/home/franz.srambical/jafar/utils/nn.py",8427,0,"",python,selection_command +3204,11906477,"/fast/home/franz.srambical/jafar/utils/nn.py",8418,0,"",python,selection_command +3205,11907211,"/fast/home/franz.srambical/jafar/utils/nn.py",10975,0,"",python,selection_command +3206,11911680,"/fast/home/franz.srambical/jafar/utils/nn.py",10932,0,"",python,selection_command +3207,11911924,"/fast/home/franz.srambical/jafar/utils/nn.py",10906,0,"",python,selection_command +3208,11911950,"/fast/home/franz.srambical/jafar/utils/nn.py",10880,0,"",python,selection_command +3209,11911977,"/fast/home/franz.srambical/jafar/utils/nn.py",10852,0,"",python,selection_command +3210,11912010,"/fast/home/franz.srambical/jafar/utils/nn.py",10828,0,"",python,selection_command +3211,11912044,"/fast/home/franz.srambical/jafar/utils/nn.py",10800,0,"",python,selection_command +3212,11912077,"/fast/home/franz.srambical/jafar/utils/nn.py",10750,0,"",python,selection_command +3213,11912112,"/fast/home/franz.srambical/jafar/utils/nn.py",10680,0,"",python,selection_command +3214,11912152,"/fast/home/franz.srambical/jafar/utils/nn.py",10667,0,"",python,selection_command +3215,11912186,"/fast/home/franz.srambical/jafar/utils/nn.py",10608,0,"",python,selection_command +3216,11912218,"/fast/home/franz.srambical/jafar/utils/nn.py",10595,0,"",python,selection_command +3217,11912253,"/fast/home/franz.srambical/jafar/utils/nn.py",10521,0,"",python,selection_command +3218,11912281,"/fast/home/franz.srambical/jafar/utils/nn.py",10507,0,"",python,selection_command +3219,11912319,"/fast/home/franz.srambical/jafar/utils/nn.py",10425,0,"",python,selection_command +3220,11912352,"/fast/home/franz.srambical/jafar/utils/nn.py",10405,0,"",python,selection_command +3221,11912508,"/fast/home/franz.srambical/jafar/utils/nn.py",10392,0,"",python,selection_command +3222,11912637,"/fast/home/franz.srambical/jafar/utils/nn.py",10328,0,"",python,selection_command +3223,11912799,"/fast/home/franz.srambical/jafar/utils/nn.py",10252,0,"",python,selection_command +3224,11912916,"/fast/home/franz.srambical/jafar/utils/nn.py",10167,0,"",python,selection_command +3225,11913665,"/fast/home/franz.srambical/jafar/utils/nn.py",10155,84," attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)",python,selection_command +3226,11913812,"/fast/home/franz.srambical/jafar/utils/nn.py",10155,160," attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)",python,selection_command +3227,11913945,"/fast/home/franz.srambical/jafar/utils/nn.py",10155,236," attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)",python,selection_command +3228,11932445,"/fast/home/franz.srambical/jafar/utils/nn.py",10328,0,"",python,selection_command +3229,11949336,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +3230,11951027,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21551,0,"",python,selection_command +3231,11951357,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18091,0,"",python,selection_command +3232,11951873,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17942,0,"",python,selection_command +3233,11952199,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17926,0,"",python,selection_command +3234,11952563,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17011,0,"",python,selection_command +3235,11953445,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17926,0,"",python,selection_command +3236,11953551,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17942,0,"",python,selection_command +3237,11953706,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18091,0,"",python,selection_command +3238,11953849,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21551,0,"",python,selection_command +3239,11954597,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21767,0,"",python,selection_command +3240,11955463,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21795,0,"",python,selection_command +3241,11955871,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21797,0,"",python,selection_command +3242,11964863,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21769,0,"",python,selection_command +3243,11965377,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21761,27," mask = combine_masks(",python,selection_command +3244,11965538,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21761,41," mask = combine_masks(\n mask,",python,selection_command +3245,11965790,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21761,67," mask = combine_masks(\n mask,\n jnp.broadcast_to(",python,selection_command +3246,11965814,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21761,114," mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,",python,selection_command +3247,11965933,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21761,164," mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),",python,selection_command +3248,11966070,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21761,175," mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),",python,selection_command +3249,11966219,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21761,183," mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )",python,selection_command +3250,11977133,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21943,0,"",python,selection_command +3251,11977476,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21944,0," Jafar ",python,content +3252,11977482,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21950,0,"",python,selection_command +3253,11978309,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21944,7,"",python,content +3254,11978312,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21943,0,"",python,selection_command +3255,11981778,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +3256,11984377,"/fast/home/franz.srambical/jafar/utils/nn.py",10392,0,"",python,selection_command +3257,11984534,"/fast/home/franz.srambical/jafar/utils/nn.py",10405,0,"",python,selection_command +3258,11984683,"/fast/home/franz.srambical/jafar/utils/nn.py",10425,0,"",python,selection_command +3259,11985034,"/fast/home/franz.srambical/jafar/utils/nn.py",10405,0,"",python,selection_command +3260,11985675,"/fast/home/franz.srambical/jafar/utils/nn.py",10393,19," mask_4d = (",python,selection_command +3261,11985806,"/fast/home/franz.srambical/jafar/utils/nn.py",10393,105," mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask",python,selection_command +3262,11985936,"/fast/home/franz.srambical/jafar/utils/nn.py",10393,115," mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )",python,selection_command +3263,12002566,"/fast/home/franz.srambical/jafar/utils/nn.py",10507,0,"",python,selection_command +3264,12064211,"/fast/home/franz.srambical/jafar/utils/nn.py",9597,131," # If mask is 4D (batch, heads, seq_len, seq_len), pad the last two dimensions\n if mask_bool.ndim == 4:\n expanded_mask = jnp.pad(\n mask_bool, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )\n else:\n # For other shapes, assume it's (seq_len, seq_len) and pad accordingly\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )",python,content +3265,12071345,"/fast/home/franz.srambical/jafar/utils/nn.py",9597,555," expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )",python,content +3266,12071432,"/fast/home/franz.srambical/jafar/utils/nn.py",10393,201," if mask is not None:\n # Handle the mask properly for decode mode\n if mask.ndim == 4: # (batch, heads, seq_len, seq_len)\n # For decode mode, mask should already be the right shape\n # We need to pad it to match the padded sequence length\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )\n else:\n # For other cases, use the original logic\n mask_4d = _fuse_masks(mask, attention_mask)\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n else:\n mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +3267,12071432,"/fast/home/franz.srambical/jafar/utils/nn.py",9597,131," # If mask is 4D (batch, heads, seq_len, seq_len), pad the last two dimensions\n if mask_bool.ndim == 4:\n expanded_mask = jnp.pad(\n mask_bool, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )\n else:\n # For other shapes, assume it's (seq_len, seq_len) and pad accordingly\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )",python,content +3268,12075030,"/fast/home/franz.srambical/jafar/utils/nn.py",10817,807," mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +3269,12075030,"/fast/home/franz.srambical/jafar/utils/nn.py",9597,555," expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )",python,content +3270,12075111,"/fast/home/franz.srambical/jafar/utils/nn.py",10393,201," if mask is not None:\n # Handle the mask properly for decode mode\n if mask.ndim == 4: # (batch, heads, seq_len, seq_len)\n # For decode mode, mask should already be the right shape\n # We need to pad it to match the padded sequence length\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )\n else:\n # For other cases, use the original logic\n mask_4d = _fuse_masks(mask, attention_mask)\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n else:\n mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +3271,12075112,"/fast/home/franz.srambical/jafar/utils/nn.py",9597,131," # If mask is 4D (batch, heads, seq_len, seq_len), pad the last two dimensions\n if mask_bool.ndim == 4:\n expanded_mask = jnp.pad(\n mask_bool, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )\n else:\n # For other shapes, assume it's (seq_len, seq_len) and pad accordingly\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )",python,content +3272,12075112,"/fast/home/franz.srambical/jafar/utils/nn.py",9110,102," # SECOND PASS: qkv.shape",python,content +3273,12132709,"/fast/home/franz.srambical/jafar/utils/nn.py",10747,807," mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +3274,12132710,"/fast/home/franz.srambical/jafar/utils/nn.py",9527,555," expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )",python,content +3275,12132710,"/fast/home/franz.srambical/jafar/utils/nn.py",9110,32," # SECOND PASS: qkv.shape \n if query.shape == (1, 921, 1, 512):\n breakpoint()",python,content +3276,12132826,"/fast/home/franz.srambical/jafar/utils/nn.py",10393,201," if mask is not None:\n # Handle the mask properly for decode mode\n if mask.ndim == 4: # (batch, heads, seq_len, seq_len)\n # For decode mode, mask should already be the right shape\n # We need to pad it to match the padded sequence length\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )\n else:\n # For other cases, use the original logic\n mask_4d = _fuse_masks(mask, attention_mask)\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n else:\n mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +3277,12132826,"/fast/home/franz.srambical/jafar/utils/nn.py",9468,326,"",python,content +3278,12132826,"/fast/home/franz.srambical/jafar/utils/nn.py",9110,102," # SECOND PASS: qkv.shape",python,content +3279,12141034,"/fast/home/franz.srambical/jafar/utils/nn.py",9997,807," mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +3280,12141034,"/fast/home/franz.srambical/jafar/utils/nn.py",9398,0," def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)",python,content +3281,12141034,"/fast/home/franz.srambical/jafar/utils/nn.py",9110,32," # SECOND PASS: qkv.shape \n if query.shape == (1, 921, 1, 512):\n breakpoint()",python,content +3282,12141104,"/fast/home/franz.srambical/jafar/utils/nn.py",10393,201," # Handle mask from nnx MultiHeadAttention\n if mask is not None:\n # Mask from nnx is always 4D: (batch, heads, seq_len, seq_len)\n # Pad it to match the padded sequence length\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )\n else:\n # No mask provided, use the attention mask for padded positions\n mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +3283,12141104,"/fast/home/franz.srambical/jafar/utils/nn.py",9468,326,"",python,content +3284,12141104,"/fast/home/franz.srambical/jafar/utils/nn.py",9110,102," # SECOND PASS: qkv.shape",python,content +3285,12146549,"/fast/home/franz.srambical/jafar/utils/nn.py",9997,551," mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +3286,12146550,"/fast/home/franz.srambical/jafar/utils/nn.py",9398,0," def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)",python,content +3287,12146550,"/fast/home/franz.srambical/jafar/utils/nn.py",9110,32," # SECOND PASS: qkv.shape \n if query.shape == (1, 921, 1, 512):\n breakpoint()",python,content +3288,12146655,"/fast/home/franz.srambical/jafar/utils/nn.py",10393,201," # Handle mask from nnx MultiHeadAttention\n if mask is not None:\n # Mask from nnx is always 4D: (batch, heads, seq_len, seq_len)\n # Pad it to match the padded sequence length\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )\n else:\n # No mask provided, use the attention mask for padded positions\n mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +3289,12146655,"/fast/home/franz.srambical/jafar/utils/nn.py",9468,326,"",python,content +3290,12146655,"/fast/home/franz.srambical/jafar/utils/nn.py",9009,204,"",python,content +3291,12151573,"/fast/home/franz.srambical/jafar/utils/nn.py",8974,0,"",python,selection_mouse +3292,12152751,"/fast/home/franz.srambical/jafar/utils/nn.py",9009,0," # for temporal attention (using kv cache)\n # FIRST PASS: qkv.shape (1, 921, 1, 8, 64)\n # SECOND PASS: qkv.shape \n if query.shape == (1, 921, 1, 512):\n breakpoint()\n",python,content +3293,12153213,"/fast/home/franz.srambical/jafar/utils/nn.py",9047,0,"",python,selection_command +3294,12153463,"/fast/home/franz.srambical/jafar/utils/nn.py",9097,0,"",python,selection_command +3295,12153489,"/fast/home/franz.srambical/jafar/utils/nn.py",9142,0,"",python,selection_command +3296,12153523,"/fast/home/franz.srambical/jafar/utils/nn.py",9182,0,"",python,selection_command +3297,12153554,"/fast/home/franz.srambical/jafar/utils/nn.py",9211,0,"",python,selection_command +3298,12153587,"/fast/home/franz.srambical/jafar/utils/nn.py",9251,0,"",python,selection_command +3299,12153621,"/fast/home/franz.srambical/jafar/utils/nn.py",9279,0,"",python,selection_command +3300,12153655,"/fast/home/franz.srambical/jafar/utils/nn.py",9305,0,"",python,selection_command +3301,12153689,"/fast/home/franz.srambical/jafar/utils/nn.py",9345,0,"",python,selection_command +3302,12153724,"/fast/home/franz.srambical/jafar/utils/nn.py",9374,0,"",python,selection_command +3303,12153920,"/fast/home/franz.srambical/jafar/utils/nn.py",9394,0,"",python,selection_command +3304,12154057,"/fast/home/franz.srambical/jafar/utils/nn.py",9434,0,"",python,selection_command +3305,12154441,"/fast/home/franz.srambical/jafar/utils/nn.py",9467,0,"",python,selection_command +3306,12176657,"/fast/home/franz.srambical/jafar/utils/nn.py",9468,0," def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)",python,content +3307,12177198,"/fast/home/franz.srambical/jafar/utils/nn.py",10080,0,"",python,selection_keyboard +3308,12177612,"/fast/home/franz.srambical/jafar/utils/nn.py",10119,0,"",python,selection_command +3309,12177861,"/fast/home/franz.srambical/jafar/utils/nn.py",10154,0,"",python,selection_command +3310,12177891,"/fast/home/franz.srambical/jafar/utils/nn.py",10163,0,"",python,selection_command +3311,12178014,"/fast/home/franz.srambical/jafar/utils/nn.py",10248,0,"",python,selection_command +3312,12178178,"/fast/home/franz.srambical/jafar/utils/nn.py",10324,0,"",python,selection_command +3313,12178462,"/fast/home/franz.srambical/jafar/utils/nn.py",10392,0,"",python,selection_command +3314,12181278,"/fast/home/franz.srambical/jafar/utils/nn.py",10324,0,"",python,selection_command +3315,12181526,"/fast/home/franz.srambical/jafar/utils/nn.py",10248,0,"",python,selection_command +3316,12181559,"/fast/home/franz.srambical/jafar/utils/nn.py",10163,0,"",python,selection_command +3317,12181578,"/fast/home/franz.srambical/jafar/utils/nn.py",10154,0,"",python,selection_command +3318,12181613,"/fast/home/franz.srambical/jafar/utils/nn.py",10119,0,"",python,selection_command +3319,12181649,"/fast/home/franz.srambical/jafar/utils/nn.py",10080,0,"",python,selection_command +3320,12181685,"/fast/home/franz.srambical/jafar/utils/nn.py",10037,0,"",python,selection_command +3321,12181720,"/fast/home/franz.srambical/jafar/utils/nn.py",10028,0,"",python,selection_command +3322,12181750,"/fast/home/franz.srambical/jafar/utils/nn.py",9983,0,"",python,selection_command +3323,12181784,"/fast/home/franz.srambical/jafar/utils/nn.py",9924,0,"",python,selection_command +3324,12181816,"/fast/home/franz.srambical/jafar/utils/nn.py",9885,0,"",python,selection_command +3325,12181851,"/fast/home/franz.srambical/jafar/utils/nn.py",9876,0,"",python,selection_command +3326,12181886,"/fast/home/franz.srambical/jafar/utils/nn.py",9841,0,"",python,selection_command +3327,12181919,"/fast/home/franz.srambical/jafar/utils/nn.py",9804,0,"",python,selection_command +3328,12181951,"/fast/home/franz.srambical/jafar/utils/nn.py",9795,0,"",python,selection_command +3329,12182962,"/fast/home/franz.srambical/jafar/utils/nn.py",9729,66," return jnp.logical_and(attention_mask, expanded_mask)\n",python,selection_command +3330,12183116,"/fast/home/franz.srambical/jafar/utils/nn.py",9715,80," )\n return jnp.logical_and(attention_mask, expanded_mask)\n",python,selection_command +3331,12183243,"/fast/home/franz.srambical/jafar/utils/nn.py",9634,161," mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n",python,selection_command +3332,12183395,"/fast/home/franz.srambical/jafar/utils/nn.py",9597,198," expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n",python,selection_command +3333,12183533,"/fast/home/franz.srambical/jafar/utils/nn.py",9550,245," mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n",python,selection_command +3334,12183669,"/fast/home/franz.srambical/jafar/utils/nn.py",9468,327," def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n",python,selection_command +3335,12183900,"/fast/home/franz.srambical/jafar/utils/nn.py",9468,328,"",python,content +3336,12183915,"/fast/home/franz.srambical/jafar/utils/nn.py",9476,0,"",python,selection_command +3337,12185779,"/fast/home/franz.srambical/jafar/utils/nn.py",9513,0,"",python,selection_command +3338,12186021,"/fast/home/franz.srambical/jafar/utils/nn.py",9548,0,"",python,selection_command +3339,12186050,"/fast/home/franz.srambical/jafar/utils/nn.py",9557,0,"",python,selection_command +3340,12186082,"/fast/home/franz.srambical/jafar/utils/nn.py",9596,0,"",python,selection_command +3341,12186115,"/fast/home/franz.srambical/jafar/utils/nn.py",9655,0,"",python,selection_command +3342,12186150,"/fast/home/franz.srambical/jafar/utils/nn.py",9700,0,"",python,selection_command +3343,12186188,"/fast/home/franz.srambical/jafar/utils/nn.py",9709,0,"",python,selection_command +3344,12186219,"/fast/home/franz.srambical/jafar/utils/nn.py",9752,0,"",python,selection_command +3345,12186253,"/fast/home/franz.srambical/jafar/utils/nn.py",9791,0,"",python,selection_command +3346,12186412,"/fast/home/franz.srambical/jafar/utils/nn.py",9826,0,"",python,selection_command +3347,12186562,"/fast/home/franz.srambical/jafar/utils/nn.py",9835,0,"",python,selection_command +3348,12186749,"/fast/home/franz.srambical/jafar/utils/nn.py",9920,0,"",python,selection_command +3349,12187271,"/fast/home/franz.srambical/jafar/utils/nn.py",9996,0,"",python,selection_command +3350,12219129,"/fast/home/franz.srambical/jafar/utils/nn.py",10064,0,"",python,selection_command +3351,12219843,"/fast/home/franz.srambical/jafar/utils/nn.py",10073,0,"",python,selection_command +3352,12220101,"/fast/home/franz.srambical/jafar/utils/nn.py",10123,0,"",python,selection_command +3353,12220125,"/fast/home/franz.srambical/jafar/utils/nn.py",10152,0,"",python,selection_command +3354,12220158,"/fast/home/franz.srambical/jafar/utils/nn.py",10227,0,"",python,selection_command +3355,12220192,"/fast/home/franz.srambical/jafar/utils/nn.py",10284,0,"",python,selection_command +3356,12220222,"/fast/home/franz.srambical/jafar/utils/nn.py",10315,0,"",python,selection_command +3357,12220256,"/fast/home/franz.srambical/jafar/utils/nn.py",10386,0,"",python,selection_command +3358,12220289,"/fast/home/franz.srambical/jafar/utils/nn.py",10424,0,"",python,selection_command +3359,12220323,"/fast/home/franz.srambical/jafar/utils/nn.py",10438,0,"",python,selection_command +3360,12220356,"/fast/home/franz.srambical/jafar/utils/nn.py",10452,0,"",python,selection_command +3361,12220389,"/fast/home/franz.srambical/jafar/utils/nn.py",10528,0,"",python,selection_command +3362,12220713,"/fast/home/franz.srambical/jafar/utils/nn.py",10452,0,"",python,selection_command +3363,12221042,"/fast/home/franz.srambical/jafar/utils/nn.py",10444,76,"",python,content +3364,12221066,"/fast/home/franz.srambical/jafar/utils/nn.py",10456,0,"",python,selection_command +3365,12222119,"/fast/home/franz.srambical/jafar/utils/nn.py",10442,0,"",python,selection_command +3366,12222362,"/fast/home/franz.srambical/jafar/utils/nn.py",10428,0,"",python,selection_command +3367,12222393,"/fast/home/franz.srambical/jafar/utils/nn.py",10390,0,"",python,selection_command +3368,12222426,"/fast/home/franz.srambical/jafar/utils/nn.py",10319,0,"",python,selection_command +3369,12222461,"/fast/home/franz.srambical/jafar/utils/nn.py",10288,0,"",python,selection_command +3370,12222493,"/fast/home/franz.srambical/jafar/utils/nn.py",10231,0,"",python,selection_command +3371,12222524,"/fast/home/franz.srambical/jafar/utils/nn.py",10156,0,"",python,selection_command +3372,12247283,"/fast/home/franz.srambical/jafar/utils/nn.py",10144,75,"",python,content +3373,12247300,"/fast/home/franz.srambical/jafar/utils/nn.py",10156,0,"",python,selection_command +3374,12248022,"/fast/home/franz.srambical/jafar/utils/nn.py",10144,57,"",python,content +3375,12248041,"/fast/home/franz.srambical/jafar/utils/nn.py",10156,0,"",python,selection_command +3376,12249794,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +3377,12250128,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3378,12261700,"/fast/home/franz.srambical/jafar/utils/nn.py",10127,0,"",python,selection_command +3379,12261886,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3380,12261980,"/fast/home/franz.srambical/jafar/utils/nn.py",10077,0,"",python,selection_command +3381,12262100,"/fast/home/franz.srambical/jafar/utils/nn.py",10080,0,"",python,selection_command +3382,12262663,"/fast/home/franz.srambical/jafar/utils/nn.py",10081,0,"",python,selection_command +3383,12262806,"/fast/home/franz.srambical/jafar/utils/nn.py",10081,0," ",python,content +3384,12262807,"/fast/home/franz.srambical/jafar/utils/nn.py",10082,0,"",python,selection_keyboard +3385,12262986,"/fast/home/franz.srambical/jafar/utils/nn.py",10082,0,"d",python,content +3386,12262986,"/fast/home/franz.srambical/jafar/utils/nn.py",10083,0,"",python,selection_keyboard +3387,12263126,"/fast/home/franz.srambical/jafar/utils/nn.py",10083,0,"e",python,content +3388,12263126,"/fast/home/franz.srambical/jafar/utils/nn.py",10084,0,"",python,selection_keyboard +3389,12263208,"/fast/home/franz.srambical/jafar/utils/nn.py",10084,0,"c",python,content +3390,12263208,"/fast/home/franz.srambical/jafar/utils/nn.py",10085,0,"",python,selection_keyboard +3391,12263380,"/fast/home/franz.srambical/jafar/utils/nn.py",10085,0,"o",python,content +3392,12263380,"/fast/home/franz.srambical/jafar/utils/nn.py",10086,0,"",python,selection_keyboard +3393,12263446,"/fast/home/franz.srambical/jafar/utils/nn.py",10086,0,"d",python,content +3394,12263447,"/fast/home/franz.srambical/jafar/utils/nn.py",10087,0,"",python,selection_keyboard +3395,12263496,"/fast/home/franz.srambical/jafar/utils/nn.py",10087,0,"e",python,content +3396,12263497,"/fast/home/franz.srambical/jafar/utils/nn.py",10088,0,"",python,selection_keyboard +3397,12263933,"/fast/home/franz.srambical/jafar/utils/nn.py",10082,6,"",python,content +3398,12264509,"/fast/home/franz.srambical/jafar/utils/nn.py",10082,0,"k",python,content +3399,12264510,"/fast/home/franz.srambical/jafar/utils/nn.py",10083,0,"",python,selection_keyboard +3400,12264582,"/fast/home/franz.srambical/jafar/utils/nn.py",10083,0,"v",python,content +3401,12264582,"/fast/home/franz.srambical/jafar/utils/nn.py",10084,0,"",python,selection_keyboard +3402,12264684,"/fast/home/franz.srambical/jafar/utils/nn.py",10084,0," ",python,content +3403,12264684,"/fast/home/franz.srambical/jafar/utils/nn.py",10085,0,"",python,selection_keyboard +3404,12264815,"/fast/home/franz.srambical/jafar/utils/nn.py",10085,0,"c",python,content +3405,12264815,"/fast/home/franz.srambical/jafar/utils/nn.py",10086,0,"",python,selection_keyboard +3406,12264930,"/fast/home/franz.srambical/jafar/utils/nn.py",10086,0,"a",python,content +3407,12264931,"/fast/home/franz.srambical/jafar/utils/nn.py",10087,0,"",python,selection_keyboard +3408,12264997,"/fast/home/franz.srambical/jafar/utils/nn.py",10087,0,"c",python,content +3409,12264997,"/fast/home/franz.srambical/jafar/utils/nn.py",10088,0,"",python,selection_keyboard +3410,12265068,"/fast/home/franz.srambical/jafar/utils/nn.py",10088,0,"h",python,content +3411,12265068,"/fast/home/franz.srambical/jafar/utils/nn.py",10089,0,"",python,selection_keyboard +3412,12265270,"/fast/home/franz.srambical/jafar/utils/nn.py",10089,0,"i",python,content +3413,12265271,"/fast/home/franz.srambical/jafar/utils/nn.py",10090,0,"",python,selection_keyboard +3414,12265331,"/fast/home/franz.srambical/jafar/utils/nn.py",10090,0,"n",python,content +3415,12265332,"/fast/home/franz.srambical/jafar/utils/nn.py",10091,0,"",python,selection_keyboard +3416,12265394,"/fast/home/franz.srambical/jafar/utils/nn.py",10091,0,"g",python,content +3417,12265395,"/fast/home/franz.srambical/jafar/utils/nn.py",10092,0,"",python,selection_keyboard +3418,12265514,"/fast/home/franz.srambical/jafar/utils/nn.py",10092,0," ",python,content +3419,12265514,"/fast/home/franz.srambical/jafar/utils/nn.py",10093,0,"",python,selection_keyboard +3420,12266011,"/fast/home/franz.srambical/jafar/utils/nn.py",10085,8,"",python,content +3421,12266246,"/fast/home/franz.srambical/jafar/utils/nn.py",10082,3,"",python,content +3422,12266365,"/fast/home/franz.srambical/jafar/utils/nn.py",10081,0,"",python,selection_command +3423,12266955,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +3424,12267531,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21932,0,"",python,selection_command +3425,12267782,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21882,0,"",python,selection_command +3426,12267799,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21835,0,"",python,selection_command +3427,12267834,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21809,0,"",python,selection_command +3428,12267867,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21795,0,"",python,selection_command +3429,12267908,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21767,0,"",python,selection_command +3430,12267941,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21726,0,"",python,selection_command +3431,12268096,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21663,0,"",python,selection_command +3432,12268253,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21597,0,"",python,selection_command +3433,12268456,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21542,0,"",python,selection_command +3434,12268586,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,0,"",python,selection_command +3435,12268677,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21551,0,"",python,selection_command +3436,12268821,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21556,0,"",python,selection_command +3437,12269268,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21551,0,"",python,selection_command +3438,12269417,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,0,"",python,selection_command +3439,12269708,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,1,"c",python,selection_command +3440,12269772,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,6,"causal",python,selection_command +3441,12269881,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,11,"causal mask",python,selection_command +3442,12270096,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,15,"causal mask for",python,selection_command +3443,12270239,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,22,"causal mask for cached",python,selection_command +3444,12270570,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,30,"causal mask for cached decoder",python,selection_command +3445,12270755,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,35,"causal mask for cached decoder self",python,selection_command +3446,12270988,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,36,"causal mask for cached decoder self-",python,selection_command +3447,12271321,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21544,45,"causal mask for cached decoder self-attention",python,selection_command +3448,12271639,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21588,0,"",python,selection_command +3449,12272160,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +3450,12273085,"/fast/home/franz.srambical/jafar/utils/nn.py",10081,0,"causal mask for cached decoder self-attention",python,content +3451,12273085,"/fast/home/franz.srambical/jafar/utils/nn.py",10126,0,"",python,selection_keyboard +3452,12273635,"/fast/home/franz.srambical/jafar/utils/nn.py",10125,0,"",python,selection_command +3453,12273900,"/fast/home/franz.srambical/jafar/utils/nn.py",10065,0,"",python,selection_command +3454,12274035,"/fast/home/franz.srambical/jafar/utils/nn.py",10073,0,"",python,selection_command +3455,12274270,"/fast/home/franz.srambical/jafar/utils/nn.py",10075,0,"",python,selection_command +3456,12274596,"/fast/home/franz.srambical/jafar/utils/nn.py",10076,0,"",python,selection_command +3457,12274746,"/fast/home/franz.srambical/jafar/utils/nn.py",10077,0,"",python,selection_command +3458,12274994,"/fast/home/franz.srambical/jafar/utils/nn.py",10078,0,"",python,selection_command +3459,12275137,"/fast/home/franz.srambical/jafar/utils/nn.py",10079,0,"",python,selection_command +3460,12275341,"/fast/home/franz.srambical/jafar/utils/nn.py",10080,0,"",python,selection_command +3461,12275493,"/fast/home/franz.srambical/jafar/utils/nn.py",10081,0,"",python,selection_command +3462,12275591,"/fast/home/franz.srambical/jafar/utils/nn.py",10081,0," ",python,content +3463,12275591,"/fast/home/franz.srambical/jafar/utils/nn.py",10082,0,"",python,selection_keyboard +3464,12275986,"/fast/home/franz.srambical/jafar/utils/nn.py",10081,0,"",python,selection_command +3465,12276255,"/fast/home/franz.srambical/jafar/utils/nn.py",10082,0,"",python,selection_command +3466,12276494,"/fast/home/franz.srambical/jafar/utils/nn.py",10089,0,"",python,selection_command +3467,12276522,"/fast/home/franz.srambical/jafar/utils/nn.py",10094,0,"",python,selection_command +3468,12276552,"/fast/home/franz.srambical/jafar/utils/nn.py",10098,0,"",python,selection_command +3469,12276854,"/fast/home/franz.srambical/jafar/utils/nn.py",10105,0,"",python,selection_command +3470,12277056,"/fast/home/franz.srambical/jafar/utils/nn.py",10113,0,"",python,selection_command +3471,12277255,"/fast/home/franz.srambical/jafar/utils/nn.py",10117,0,"",python,selection_command +3472,12277460,"/fast/home/franz.srambical/jafar/utils/nn.py",10118,0,"",python,selection_command +3473,12277657,"/fast/home/franz.srambical/jafar/utils/nn.py",10129,0,"",python,selection_command +3474,12277822,"/fast/home/franz.srambical/jafar/utils/nn.py",10134,0,"",python,selection_command +3475,12278193,"/fast/home/franz.srambical/jafar/utils/nn.py",10129,0,"",python,selection_command +3476,12278379,"/fast/home/franz.srambical/jafar/utils/nn.py",10132,0,"",python,selection_command +3477,12278411,"/fast/home/franz.srambical/jafar/utils/nn.py",10132,1,"k",python,selection_command +3478,12278470,"/fast/home/franz.srambical/jafar/utils/nn.py",10132,6,"k from",python,selection_command +3479,12279049,"/fast/home/franz.srambical/jafar/utils/nn.py",10132,3,"k f",python,selection_command +3480,12279211,"/fast/home/franz.srambical/jafar/utils/nn.py",10129,4,"mask",python,selection_command +3481,12279387,"/fast/home/franz.srambical/jafar/utils/nn.py",10129,0,"",python,selection_command +3482,12279562,"/fast/home/franz.srambical/jafar/utils/nn.py",10118,0,"",python,selection_command +3483,12280025,"/fast/home/franz.srambical/jafar/utils/nn.py",10129,0,"",python,selection_command +3484,12280409,"/fast/home/franz.srambical/jafar/utils/nn.py",10128,0,"",python,selection_command +3485,12280477,"/fast/home/franz.srambical/jafar/utils/nn.py",10128,1," ",python,selection_command +3486,12280559,"/fast/home/franz.srambical/jafar/utils/nn.py",10128,5," mask",python,selection_command +3487,12280874,"/fast/home/franz.srambical/jafar/utils/nn.py",10128,10," mask from",python,selection_command +3488,12281977,"/fast/home/franz.srambical/jafar/utils/nn.py",10128,10,"",python,content +3489,12282400,"/fast/home/franz.srambical/jafar/utils/nn.py",10128,0,"()",python,content +3490,12282401,"/fast/home/franz.srambical/jafar/utils/nn.py",10129,0,"",python,selection_keyboard +3491,12282643,"/fast/home/franz.srambical/jafar/utils/nn.py",10129,0,"f",python,content +3492,12282643,"/fast/home/franz.srambical/jafar/utils/nn.py",10130,0,"",python,selection_keyboard +3493,12283145,"/fast/home/franz.srambical/jafar/utils/nn.py",10130,0,"r",python,content +3494,12283145,"/fast/home/franz.srambical/jafar/utils/nn.py",10131,0,"",python,selection_keyboard +3495,12283226,"/fast/home/franz.srambical/jafar/utils/nn.py",10131,0,"o",python,content +3496,12283227,"/fast/home/franz.srambical/jafar/utils/nn.py",10132,0,"",python,selection_keyboard +3497,12283310,"/fast/home/franz.srambical/jafar/utils/nn.py",10132,0,"m",python,content +3498,12283310,"/fast/home/franz.srambical/jafar/utils/nn.py",10133,0,"",python,selection_keyboard +3499,12283483,"/fast/home/franz.srambical/jafar/utils/nn.py",10132,0,"",python,selection_command +3500,12283655,"/fast/home/franz.srambical/jafar/utils/nn.py",10133,0,"",python,selection_command +3501,12283848,"/fast/home/franz.srambical/jafar/utils/nn.py",10133,1,"",python,content +3502,12284331,"/fast/home/franz.srambical/jafar/utils/nn.py",10155,0,"",python,selection_command +3503,12284528,"/fast/home/franz.srambical/jafar/utils/nn.py",10154,0,"",python,selection_command +3504,12285119,"/fast/home/franz.srambical/jafar/utils/nn.py",10155,0,"",python,selection_command +3505,12285854,"/fast/home/franz.srambical/jafar/utils/nn.py",10156,0,")",python,content +3506,12285866,"/fast/home/franz.srambical/jafar/utils/nn.py",10156,0,"",python,selection_command +3507,12286420,"/fast/home/franz.srambical/jafar/utils/nn.py",10138,0,"",python,selection_command +3508,12286672,"/fast/home/franz.srambical/jafar/utils/nn.py",10134,0,"",python,selection_command +3509,12286696,"/fast/home/franz.srambical/jafar/utils/nn.py",10129,0,"",python,selection_command +3510,12286727,"/fast/home/franz.srambical/jafar/utils/nn.py",10128,0,"",python,selection_command +3511,12287136,"/fast/home/franz.srambical/jafar/utils/nn.py",10129,0,"",python,selection_command +3512,12287463,"/fast/home/franz.srambical/jafar/utils/nn.py",10134,0,"",python,selection_command +3513,12287801,"/fast/home/franz.srambical/jafar/utils/nn.py",10135,0,"",python,selection_command +3514,12287952,"/fast/home/franz.srambical/jafar/utils/nn.py",10136,0,"",python,selection_command +3515,12288298,"/fast/home/franz.srambical/jafar/utils/nn.py",10137,0,"",python,selection_command +3516,12288529,"/fast/home/franz.srambical/jafar/utils/nn.py",10137,1,".",python,content +3517,12289737,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +3518,12293217,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +3519,12295858,"/fast/home/franz.srambical/jafar/utils/nn.py",10065,0,"",python,selection_command +3520,12314527,"TERMINAL",0,0,"2025-07-27 12:43:54.326913: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3521,12316003,"TERMINAL",0,0,"2025-07-27 12:43:55.805326: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3522,12318696,"/fast/home/franz.srambical/jafar/utils/nn.py",10245,0,"",python,selection_mouse +3523,12319548,"TERMINAL",0,0,"2025-07-27 12:43:59.296743: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3524,12320310,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 323, in attention_fn\r\n mask_4d = jnp.pad(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4352, in pad\r\n pad_width = _broadcast_to_pairs(pad_width, np.ndim(array), ""pad_width"")\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 3937, in _broadcast_to_pairs\r\n raise ValueError(f""jnp.pad: {name} with {nd=} has unsupported shape {nvals.shape}. ""\r\nValueError: jnp.pad: pad_width with nd=5 has unsupported shape (4, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n",,terminal_output +3525,12321376,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +3526,12332271,"/fast/home/franz.srambical/jafar/utils/nn.py",10214,0,"",python,selection_command +3527,12332795,"/fast/home/franz.srambical/jafar/utils/nn.py",10214,0,")",python,content +3528,12332809,"/fast/home/franz.srambical/jafar/utils/nn.py",10214,0,"",python,selection_command +3529,12334045,"/fast/home/franz.srambical/jafar/utils/nn.py",10214,1,"",python,content +3530,12335150,"/fast/home/franz.srambical/jafar/utils/nn.py",10186,0,"\n ",python,content +3531,12335412,"/fast/home/franz.srambical/jafar/utils/nn.py",10199,0,":",python,content +3532,12335412,"/fast/home/franz.srambical/jafar/utils/nn.py",10200,0,"",python,selection_keyboard +3533,12335591,"/fast/home/franz.srambical/jafar/utils/nn.py",10200,0,"w",python,content +3534,12335592,"/fast/home/franz.srambical/jafar/utils/nn.py",10201,0,"",python,selection_keyboard +3535,12335908,"/fast/home/franz.srambical/jafar/utils/nn.py",10200,1,"",python,content +3536,12336101,"/fast/home/franz.srambical/jafar/utils/nn.py",10199,1,"",python,content +3537,12336272,"/fast/home/franz.srambical/jafar/utils/nn.py",10199,0,"p",python,content +3538,12336272,"/fast/home/franz.srambical/jafar/utils/nn.py",10200,0,"",python,selection_keyboard +3539,12336353,"/fast/home/franz.srambical/jafar/utils/nn.py",10200,0,"r",python,content +3540,12336353,"/fast/home/franz.srambical/jafar/utils/nn.py",10201,0,"",python,selection_keyboard +3541,12336468,"/fast/home/franz.srambical/jafar/utils/nn.py",10201,0,"i",python,content +3542,12336469,"/fast/home/franz.srambical/jafar/utils/nn.py",10202,0,"",python,selection_keyboard +3543,12336538,"/fast/home/franz.srambical/jafar/utils/nn.py",10202,0,"n",python,content +3544,12336538,"/fast/home/franz.srambical/jafar/utils/nn.py",10203,0,"",python,selection_keyboard +3545,12336563,"/fast/home/franz.srambical/jafar/utils/nn.py",10203,0,"t",python,content +3546,12336563,"/fast/home/franz.srambical/jafar/utils/nn.py",10204,0,"",python,selection_keyboard +3547,12336842,"/fast/home/franz.srambical/jafar/utils/nn.py",10204,0,"()",python,content +3548,12336842,"/fast/home/franz.srambical/jafar/utils/nn.py",10205,0,"",python,selection_keyboard +3549,12337436,"/fast/home/franz.srambical/jafar/utils/nn.py",10205,0,"""""",python,content +3550,12337437,"/fast/home/franz.srambical/jafar/utils/nn.py",10206,0,"",python,selection_keyboard +3551,12338534,"/fast/home/franz.srambical/jafar/utils/nn.py",10206,0,"D",python,content +3552,12338535,"/fast/home/franz.srambical/jafar/utils/nn.py",10207,0,"",python,selection_keyboard +3553,12338659,"/fast/home/franz.srambical/jafar/utils/nn.py",10207,0,"E",python,content +3554,12338660,"/fast/home/franz.srambical/jafar/utils/nn.py",10208,0,"",python,selection_keyboard +3555,12339262,"/fast/home/franz.srambical/jafar/utils/nn.py",10208,0,"B",python,content +3556,12339262,"/fast/home/franz.srambical/jafar/utils/nn.py",10209,0,"",python,selection_keyboard +3557,12339296,"/fast/home/franz.srambical/jafar/utils/nn.py",10209,0,"U",python,content +3558,12339297,"/fast/home/franz.srambical/jafar/utils/nn.py",10210,0,"",python,selection_keyboard +3559,12339399,"/fast/home/franz.srambical/jafar/utils/nn.py",10210,0,"G",python,content +3560,12339399,"/fast/home/franz.srambical/jafar/utils/nn.py",10211,0,"",python,selection_keyboard +3561,12339698,"/fast/home/franz.srambical/jafar/utils/nn.py",10211,0,":",python,content +3562,12339698,"/fast/home/franz.srambical/jafar/utils/nn.py",10212,0,"",python,selection_keyboard +3563,12339813,"/fast/home/franz.srambical/jafar/utils/nn.py",10212,0," ",python,content +3564,12339813,"/fast/home/franz.srambical/jafar/utils/nn.py",10213,0,"",python,selection_keyboard +3565,12340336,"/fast/home/franz.srambical/jafar/utils/nn.py",10212,1,"",python,content +3566,12340579,"/fast/home/franz.srambical/jafar/utils/nn.py",10211,1,"",python,content +3567,12340614,"/fast/home/franz.srambical/jafar/utils/nn.py",10210,1,"",python,content +3568,12340645,"/fast/home/franz.srambical/jafar/utils/nn.py",10209,1,"",python,content +3569,12340692,"/fast/home/franz.srambical/jafar/utils/nn.py",10208,1,"",python,content +3570,12340819,"/fast/home/franz.srambical/jafar/utils/nn.py",10207,1,"",python,content +3571,12340974,"/fast/home/franz.srambical/jafar/utils/nn.py",10206,1,"",python,content +3572,12341139,"/fast/home/franz.srambical/jafar/utils/nn.py",10205,2,"",python,content +3573,12341602,"/fast/home/franz.srambical/jafar/utils/nn.py",10205,0,"f",python,content +3574,12341602,"/fast/home/franz.srambical/jafar/utils/nn.py",10206,0,"",python,selection_keyboard +3575,12341949,"/fast/home/franz.srambical/jafar/utils/nn.py",10206,0,"""""",python,content +3576,12341949,"/fast/home/franz.srambical/jafar/utils/nn.py",10207,0,"",python,selection_keyboard +3577,12343078,"/fast/home/franz.srambical/jafar/utils/nn.py",10207,0,"D",python,content +3578,12343078,"/fast/home/franz.srambical/jafar/utils/nn.py",10208,0,"",python,selection_keyboard +3579,12343151,"/fast/home/franz.srambical/jafar/utils/nn.py",10208,0,"E",python,content +3580,12343151,"/fast/home/franz.srambical/jafar/utils/nn.py",10209,0,"",python,selection_keyboard +3581,12343198,"/fast/home/franz.srambical/jafar/utils/nn.py",10209,0,"B",python,content +3582,12343199,"/fast/home/franz.srambical/jafar/utils/nn.py",10210,0,"",python,selection_keyboard +3583,12343237,"/fast/home/franz.srambical/jafar/utils/nn.py",10210,0,"U",python,content +3584,12343237,"/fast/home/franz.srambical/jafar/utils/nn.py",10211,0,"",python,selection_keyboard +3585,12343378,"/fast/home/franz.srambical/jafar/utils/nn.py",10211,0,"G",python,content +3586,12343378,"/fast/home/franz.srambical/jafar/utils/nn.py",10212,0,"",python,selection_keyboard +3587,12343531,"/fast/home/franz.srambical/jafar/utils/nn.py",10212,0,":",python,content +3588,12343531,"/fast/home/franz.srambical/jafar/utils/nn.py",10213,0,"",python,selection_keyboard +3589,12343613,"/fast/home/franz.srambical/jafar/utils/nn.py",10213,0," ",python,content +3590,12343614,"/fast/home/franz.srambical/jafar/utils/nn.py",10214,0,"",python,selection_keyboard +3591,12345628,"/fast/home/franz.srambical/jafar/utils/nn.py",10214,0,"mask.shape {mask.shape}",python,content +3592,12345818,"/fast/home/franz.srambical/jafar/utils/nn.py",10236,0,"",python,selection_command +3593,12347436,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +3594,12347657,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3595,12359321,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3596,12372428,"TERMINAL",0,0,"2025-07-27 12:44:52.226584: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3597,12373975,"TERMINAL",0,0,"2025-07-27 12:44:53.774647: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3598,12377648,"TERMINAL",0,0,"2025-07-27 12:44:57.448392: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3599,12378450,"TERMINAL",0,0,"DEBUG: mask.shape (1, 921, 1, 1, 1)\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 324, in attention_fn\r\n mask_4d = jnp.pad(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4352, in pad\r\n pad_width = _broadcast_to_pairs(pad_width, np.ndim(array), ""pad_width"")\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 3937, in _broadcast_to_pairs\r\n raise ValueError(f""jnp.pad: {name} with {nd=} has unsupported shape {nvals.shape}. ""\r\nValueError: jnp.pad: pad_width with nd=5 has unsupported shape (4, 2). Valid shapes are (5, 2), (1, 2), (2,), (1,), or ().\r\n",,terminal_output +3600,12379563,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +3601,12417493,"/fast/home/franz.srambical/jafar/utils/nn.py",10203,0,"",python,selection_mouse +3602,12418661,"/fast/home/franz.srambical/jafar/utils/nn.py",10256,0,"",python,selection_command +3603,12418806,"/fast/home/franz.srambical/jafar/utils/nn.py",10270,0,"\n ",python,content +3604,12419702,"/fast/home/franz.srambical/jafar/utils/nn.py",10271,16,"",python,content +3605,12419927,"/fast/home/franz.srambical/jafar/utils/nn.py",10271,1,"",python,content +3606,12419943,"/fast/home/franz.srambical/jafar/utils/nn.py",10256,0,"",python,selection_command +3607,12420395,"/fast/home/franz.srambical/jafar/utils/nn.py",10239,0,"\n ",python,content +3608,12420495,"/fast/home/franz.srambical/jafar/utils/nn.py",10252,0,"#",python,content +3609,12420495,"/fast/home/franz.srambical/jafar/utils/nn.py",10253,0,"",python,selection_keyboard +3610,12420530,"/fast/home/franz.srambical/jafar/utils/nn.py",10253,0," ",python,content +3611,12420530,"/fast/home/franz.srambical/jafar/utils/nn.py",10254,0,"",python,selection_keyboard +3612,12422924,"/fast/home/franz.srambical/jafar/utils/nn.py",10254,0,"m",python,content +3613,12422925,"/fast/home/franz.srambical/jafar/utils/nn.py",10255,0,"",python,selection_keyboard +3614,12423047,"/fast/home/franz.srambical/jafar/utils/nn.py",10255,0,"a",python,content +3615,12423047,"/fast/home/franz.srambical/jafar/utils/nn.py",10256,0,"",python,selection_keyboard +3616,12423106,"/fast/home/franz.srambical/jafar/utils/nn.py",10256,0,"s",python,content +3617,12423106,"/fast/home/franz.srambical/jafar/utils/nn.py",10257,0,"",python,selection_keyboard +3618,12423164,"/fast/home/franz.srambical/jafar/utils/nn.py",10257,0,"k",python,content +3619,12423164,"/fast/home/franz.srambical/jafar/utils/nn.py",10258,0,"",python,selection_keyboard +3620,12423329,"/fast/home/franz.srambical/jafar/utils/nn.py",10258,0,".",python,content +3621,12423329,"/fast/home/franz.srambical/jafar/utils/nn.py",10259,0,"",python,selection_keyboard +3622,12423431,"/fast/home/franz.srambical/jafar/utils/nn.py",10259,0,"s",python,content +3623,12423432,"/fast/home/franz.srambical/jafar/utils/nn.py",10260,0,"",python,selection_keyboard +3624,12423545,"/fast/home/franz.srambical/jafar/utils/nn.py",10260,0,"h",python,content +3625,12423546,"/fast/home/franz.srambical/jafar/utils/nn.py",10261,0,"",python,selection_keyboard +3626,12423612,"/fast/home/franz.srambical/jafar/utils/nn.py",10261,0,"a",python,content +3627,12423612,"/fast/home/franz.srambical/jafar/utils/nn.py",10262,0,"",python,selection_keyboard +3628,12423728,"/fast/home/franz.srambical/jafar/utils/nn.py",10262,0,"p",python,content +3629,12423729,"/fast/home/franz.srambical/jafar/utils/nn.py",10263,0,"",python,selection_keyboard +3630,12423794,"/fast/home/franz.srambical/jafar/utils/nn.py",10263,0,"e",python,content +3631,12423795,"/fast/home/franz.srambical/jafar/utils/nn.py",10264,0,"",python,selection_keyboard +3632,12424694,"/fast/home/franz.srambical/jafar/utils/nn.py",10264,0," ",python,content +3633,12424695,"/fast/home/franz.srambical/jafar/utils/nn.py",10265,0,"",python,selection_keyboard +3634,12424977,"/fast/home/franz.srambical/jafar/utils/nn.py",10265,0,"(1, 921, 1, 1, 1)",python,content +3635,12424977,"/fast/home/franz.srambical/jafar/utils/nn.py",10282,0,"",python,selection_keyboard +3636,12425600,"/fast/home/franz.srambical/jafar/utils/nn.py",10281,0,"",python,selection_command +3637,12426816,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,0,"",python,selection_command +3638,12431871,"/fast/home/franz.srambical/jafar/utils/nn.py",10274,0,"",python,selection_mouse +3639,12432512,"/fast/home/franz.srambical/jafar/utils/nn.py",10278,0,"",python,selection_mouse +3640,12433016,"/fast/home/franz.srambical/jafar/utils/nn.py",10280,0,"",python,selection_mouse +3641,12449255,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,42," # mask.shape (1, 921, 1, 1, 1)",python,selection_command +3642,12467545,"/fast/home/franz.srambical/jafar/utils/nn.py",10280,0,"",python,selection_command +3643,12469584,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +3644,12470272,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21643,0,"",python,selection_command +3645,12470533,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21709,0,"",python,selection_command +3646,12470565,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21759,0,"",python,selection_command +3647,12470737,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21787,0,"",python,selection_command +3648,12472770,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20468,0,"",python,selection_command +3649,12472893,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",19456,0,"",python,selection_command +3650,12473034,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18199,0,"",python,selection_command +3651,12473297,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16903,0,"",python,selection_command +3652,12473435,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16000,0,"",python,selection_command +3653,12473807,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14879,0,"",python,selection_command +3654,12474136,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13874,0,"",python,selection_command +3655,12474386,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12759,0,"",python,selection_command +3656,12474541,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12002,0,"",python,selection_command +3657,12475016,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",10243,0,"",python,selection_command +3658,12475454,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9300,0,"",python,selection_command +3659,12476107,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9303,0,"",python,selection_keyboard +3660,12476409,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9363,0,"",python,selection_command +3661,12476541,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9384,0,"",python,selection_command +3662,12476808,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9454,0,"",python,selection_command +3663,12476826,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9456,0,"",python,selection_command +3664,12476852,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9457,0,"",python,selection_command +3665,12476956,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9460,0,"",python,selection_command +3666,12477324,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,33,"class MultiHeadAttention(Module):",python,selection_command +3667,12477424,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,61,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n",python,selection_command +3668,12477592,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,80,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n",python,selection_command +3669,12477838,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,129,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n",python,selection_command +3670,12478010,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,539,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n",python,selection_command +3671,12478256,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,878,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n",python,selection_command +3672,12478288,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,2871,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n",python,selection_command +3673,12478326,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,3038,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n",python,selection_command +3674,12478343,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,5094,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n",python,selection_command +3675,12478431,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,5296,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n",python,selection_command +3676,12478431,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,5353,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n",python,selection_command +3677,12478451,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,6058,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n",python,selection_command +3678,12478552,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,6711,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n",python,selection_command +3679,12478742,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,7264,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n",python,selection_command +3680,12478858,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,7424,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n",python,selection_command +3681,12479142,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,7791,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n",python,selection_command +3682,12479189,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,7944,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n",python,selection_command +3683,12479481,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,8119,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n",python,selection_command +3684,12479649,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,9411,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n",python,selection_command +3685,12479743,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,9537,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n",python,selection_command +3686,12479930,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,9918,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n",python,selection_command +3687,12480067,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,10115,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n",python,selection_command +3688,12480280,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,10211,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n",python,selection_command +3689,12480353,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,10523,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n",python,selection_command +3690,12480663,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,10861,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n",python,selection_command +3691,12480849,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,12487,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n",python,selection_command +3692,12480994,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13203,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n",python,selection_command +3693,12481274,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13647,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n",python,selection_command +3694,12481441,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13945,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n",python,selection_command +3695,12482424,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13647,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n",python,selection_command +3696,12482581,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13945,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n",python,selection_command +3697,12482752,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,13966,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n",python,selection_command +3698,12482902,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,14839,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n",python,selection_command +3699,12483090,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,14873,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n\n\n# mask-making utility functions\n",python,selection_command +3700,12483750,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,14840,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n\n",python,selection_command +3701,12484602,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9458,14839,"class MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n",python,selection_command +3702,12489153,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24297,0,"",python,selection_command +3703,12489841,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +3704,12499869,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,196," # mask.shape (1, 921, 1, 1, 1) - (batch, kv_length, heads, q_length, k_length)\n if mask.ndim == 5:\n # For decode mode: pad the kv_length dimension (dim 1) and k_length dimension (dim 4)\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, pad_size), (0, 0), (0, 0), (0, pad_size)), \n constant_values=False\n )\n else:\n # Fallback for other mask shapes\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,content +3705,12799035,"/fast/home/franz.srambical/jafar/utils/nn.py",10877,0,"",python,selection_command +3706,12802632,"/fast/home/franz.srambical/jafar/utils/nn.py",1842,0,"",python,selection_command +3707,12803789,"/fast/home/franz.srambical/jafar/utils/nn.py",1813,0,"",python,selection_command +3708,12803992,"/fast/home/franz.srambical/jafar/utils/nn.py",1817,0,"",python,selection_command +3709,12804115,"/fast/home/franz.srambical/jafar/utils/nn.py",1818,0,"",python,selection_command +3710,12804430,"/fast/home/franz.srambical/jafar/utils/nn.py",3878,0,"",python,selection_command +3711,12812945,"/fast/home/franz.srambical/jafar/utils/nn.py",1818,0,"",python,selection_command +3712,12814435,"/fast/home/franz.srambical/jafar/utils/nn.py",1875,0,"",python,selection_command +3713,12814678,"/fast/home/franz.srambical/jafar/utils/nn.py",1913,0,"",python,selection_command +3714,12814704,"/fast/home/franz.srambical/jafar/utils/nn.py",1947,0,"",python,selection_command +3715,12814728,"/fast/home/franz.srambical/jafar/utils/nn.py",1982,0,"",python,selection_command +3716,12814764,"/fast/home/franz.srambical/jafar/utils/nn.py",2021,0,"",python,selection_command +3717,12814795,"/fast/home/franz.srambical/jafar/utils/nn.py",2063,0,"",python,selection_command +3718,12814827,"/fast/home/franz.srambical/jafar/utils/nn.py",2093,0,"",python,selection_command +3719,12814861,"/fast/home/franz.srambical/jafar/utils/nn.py",2146,0,"",python,selection_command +3720,12815220,"/fast/home/franz.srambical/jafar/utils/nn.py",2093,0,"",python,selection_command +3721,12817843,".venv/lib/python3.10/site-packages/flax/linen/attention.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Attention core modules for Flax.""""""\nfrom __future__ import annotations\n\nimport functools\nimport inspect\nimport warnings\nfrom typing import Any, overload\nfrom collections.abc import Callable\n\nimport jax\nimport jax.numpy as jnp\nfrom jax import lax, random\n\nfrom flax.linen import initializers\nfrom flax.linen.dtypes import promote_dtype\nfrom flax.linen.linear import (\n DenseGeneral,\n default_kernel_init,\n)\nfrom flax.linen.module import Module, compact, merge_param\nfrom flax.linen.normalization import LayerNorm\nfrom flax.typing import (\n Array,\n PRNGKey,\n Dtype,\n Shape as Shape,\n Initializer,\n PrecisionLike,\n DotGeneralT,\n)\n\n\ndef dot_product_attention_weights(\n query: Array,\n key: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: PRNGKey | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n force_fp32_for_softmax: bool = False,\n einsum_dot_general: Callable[..., Array] | None = None,\n einsum: Callable[..., Array] | None = None,\n):\n """"""Computes dot-product attention weights given query and key.\n\n Used by :func:`dot_product_attention`, which is what you'll most likely use.\n But if you want access to the attention weights for introspection, then\n you can directly call this function and call einsum yourself.\n\n Args:\n query: queries for calculating attention with shape of ``[batch...,\n q_length, num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is ``False``.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs and params)\n precision: numerical precision of the computation see ``jax.lax.Precision``\n for details.\n module: the Module that will sow the attention weights into the\n 'intermediates' collection. Remember to mark 'intermediates' as mutable\n via ``mutable=['intermediates']`` in order to have that collection\n returned. If ``module`` is None, the attention weights will not be sowed.\n force_fp32_for_softmax: bool, whether to force the softmax to be computed in\n fp32. This is useful for mixed-precision training where higher precision\n is desired for numerical stability.\n einsum_dot_general: the dot_general to use in einsum.\n einsum: If unspecified, default `jnp.einsum` will be used. This argument is\n mutually exclusive with `precision` and `einsum_dot_general`.\n\n Raises:\n ValueError: if both `precision`/`einsum_dot_general` and `einsum` are\n specified.\n\n Returns:\n Output of shape ``[batch..., num_heads, q_length, kv_length]``.\n """"""\n if (precision or einsum_dot_general) and einsum:\n raise ValueError(\n 'precision/einsum_dot_general and einsum are mutually exclusive. Please'\n ' specify only one of them.'\n )\n if not einsum:\n einsum = functools.partial(\n jnp.einsum,\n precision=precision,\n _dot_general=einsum_dot_general\n if einsum_dot_general\n else jax.lax.dot_general,\n )\n\n query, key = promote_dtype(query, key, dtype=dtype)\n dtype = query.dtype\n\n assert query.ndim == key.ndim, 'q, k must have same rank.'\n assert query.shape[:-3] == key.shape[:-3], 'q, k batch dims must match.'\n assert query.shape[-2] == key.shape[-2], 'q, k num_heads must match.'\n assert query.shape[-1] == key.shape[-1], 'q, k depths must match.'\n\n # calculate attention matrix\n depth = query.shape[-1]\n query = query / jnp.sqrt(depth).astype(dtype)\n # attn weight shape is (batch..., num_heads, q_length, kv_length)\n attn_weights = einsum('...qhd,...khd->...hqk', query, key)\n\n # apply attention bias: masking, dropout, proximity bias, etc.\n if bias is not None:\n attn_weights = attn_weights + bias\n # apply attention mask\n if mask is not None:\n big_neg = jnp.finfo(dtype).min\n attn_weights = jnp.where(mask, attn_weights, big_neg)\n\n # normalize the attention weights\n if force_fp32_for_softmax and dtype != jnp.float32:\n attn_weights = jax.nn.softmax(attn_weights.astype(jnp.float32))\n else:\n attn_weights = jax.nn.softmax(attn_weights).astype(dtype)\n\n if module:\n module.sow('intermediates', 'attention_weights', attn_weights)\n\n # apply attention dropout\n if not deterministic and dropout_rate > 0.0:\n keep_prob = 1.0 - dropout_rate\n if broadcast_dropout:\n # dropout is broadcast across the batch + head dimensions\n dropout_shape = tuple([1] * (key.ndim - 2)) + attn_weights.shape[-2:]\n keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape) # type: ignore\n else:\n keep = random.bernoulli(dropout_rng, keep_prob, attn_weights.shape) # type: ignore\n multiplier = keep.astype(dtype) / jnp.asarray(keep_prob, dtype=dtype)\n attn_weights = attn_weights * multiplier\n\n return attn_weights\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: PRNGKey | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n force_fp32_for_softmax: bool = False,\n einsum_dot_general: Callable[..., Array] | None = None,\n qk_attn_weights_einsum: Callable[..., Array] | None = None,\n attn_weights_value_einsum: Callable[..., Array] | None = None,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch...,\n q_length, num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is ``False``.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see ``jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n 'intermediates' collection. Remember to mark 'intermediates' as mutable\n via ``mutable=['intermediates']`` in order to have that collection\n returned. If ``module`` is None, the attention weights will not be sowed.\n force_fp32_for_softmax: bool, whether to force the softmax to be computed in\n fp32. This is useful for mixed-precision training where higher precision\n is desired for numerical stability.\n einsum_dot_general: the dot_general to use in `jnp.einsum`.\n qk_attn_weights_einsum: the einsum for computing the attention weights. When\n unspecified, the default `jnp.einsum` will be used. This argument is\n mutually exclusive with `precision` and `einsum_dot_general`.\n attn_weights_value_einsum: the einsum for computing the product of the\n attention weights and the values. When unspecified, the default\n `jnp.einsum` will be used. This argument is mutually exclusive with\n `precision` and `einsum_dot_general`.\n\n Returns:\n Output of shape ``[batch..., q_length, num_heads, v_depth_per_head]``.\n\n Raises:\n ValueError: if both `precision`/`einsum_dot_general` and\n `qk_attn_weights_einsum`/`attn_weights_value_einsum` are\n specified.\n """"""\n if (qk_attn_weights_einsum and not attn_weights_value_einsum) or (\n not qk_attn_weights_einsum and attn_weights_value_einsum\n ):\n raise ValueError(\n 'qk_attn_weights_einsum and attn_weights_value_einsum must be specified'\n ' together.'\n )\n if (precision or einsum_dot_general) and (\n qk_attn_weights_einsum or attn_weights_value_einsum\n ):\n raise ValueError(\n 'precision/einsum_dot_general and'\n ' qk_attn_weights_einsum/attn_weights_value_einsum are mutually'\n ' exclusive. Please specify only one of them.'\n )\n\n query, key, value = promote_dtype(query, key, value, dtype=dtype)\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n force_fp32_for_softmax,\n einsum_dot_general=einsum_dot_general,\n einsum=qk_attn_weights_einsum,\n )\n if not attn_weights_value_einsum:\n attn_weights_value_einsum = functools.partial(\n jnp.einsum,\n precision=precision,\n _dot_general=einsum_dot_general\n if einsum_dot_general\n else jax.lax.dot_general,\n )\n # return weighted sum over values for each query position\n return attn_weights_value_einsum(\n '...hqk,...khd->...qhd',\n attn_weights,\n value,\n )\n\n\nclass MultiHeadDotProductAttention(Module):\n """"""Multi-head dot-product attention.\n\n Example usage::\n\n >>> import flax.linen as nn\n >>> import jax\n\n >>> layer = nn.MultiHeadDotProductAttention(num_heads=8, qkv_features=16)\n >>> key1, key2, key3, key4, key5, key6 = jax.random.split(jax.random.key(0), 6)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = jax.random.uniform(key1, shape), jax.random.uniform(key2, shape), jax.random.uniform(key3, shape)\n >>> variables = layer.init(jax.random.key(0), q)\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer.apply(variables, q, k, v)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=k, inputs_v=k)\n >>> out = layer.apply(variables, q, k)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=q) and layer.apply(variables, inputs_q=q, inputs_k=q, inputs_v=q)\n >>> out = layer.apply(variables, q)\n\n >>> attention_kwargs = dict(\n ... num_heads=8,\n ... qkv_features=16,\n ... kernel_init=nn.initializers.ones,\n ... bias_init=nn.initializers.zeros,\n ... dropout_rate=0.5,\n ... deterministic=False,\n ... )\n >>> class Module(nn.Module):\n ... attention_kwargs: dict\n ...\n ... @nn.compact\n ... def __call__(self, x, dropout_rng=None):\n ... out1 = nn.MultiHeadDotProductAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... out2 = nn.MultiHeadDotProductAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... return out1, out2\n >>> module = Module(attention_kwargs)\n >>> variables = module.init({'params': key1, 'dropout': key2}, q)\n\n >>> # out1 and out2 are different.\n >>> out1, out2 = module.apply(variables, q, rngs={'dropout': key3})\n >>> # out3 and out4 are different.\n >>> # out1 and out3 are different. out2 and out4 are different.\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key4})\n >>> # out1 and out2 are the same.\n >>> out1, out2 = module.apply(variables, q, dropout_rng=key5)\n >>> # out1 and out2 are the same as out3 and out4.\n >>> # providing a `dropout_rng` arg will take precedence over the `rngs` arg in `.apply`\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key6}, dropout_rng=key5)\n\n Attributes:\n num_heads: Number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n dtype: The dtype of the computation (default: infer from inputs and params)\n param_dtype: The dtype passed to parameter initializers (default: float32)\n qkv_features: Dimension of the key, query, and value.\n out_features: Dimension of the last projection\n broadcast_dropout: Use a broadcasted dropout along batch dims.\n dropout_rate: Dropout rate.\n deterministic: If False, the attention weight is masked randomly using\n dropout, whereas if True, the attention weights are deterministic.\n precision: Numerical precision of the computation see ``jax.lax.Precision``\n for details.\n kernel_init: Initializer for the kernel of the Dense layers.\n out_kernel_init: Optional Initializer for the kernel of the output Dense layer,\n if None, ``kernel_init`` will be used.\n bias_init: Initializer for the bias of the Dense layers.\n out_bias_init: Optional Initializer for the bias of the output Dense layer,\n if None, ``bias_init`` will be used.\n use_bias: Whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape ``[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: Whether to prepare and use an autoregressive cache.\n normalize_qk: Should QK normalization be applied (arxiv.org/abs/2302.05442).\n qk_attn_weights_einsum_cls: factory function to create the einsum for\n computing the attention weights.\n attn_weights_value_einsum_cls: factory function to create the einsum for\n computing the product of the attention weights and the values.\n """"""\n\n num_heads: int\n dtype: Dtype | None = None\n param_dtype: Dtype = jnp.float32\n qkv_features: int | None = None\n out_features: int | None = None\n broadcast_dropout: bool = True\n dropout_rate: float = 0.0\n deterministic: bool | None = None\n precision: PrecisionLike = None\n kernel_init: Initializer = default_kernel_init\n out_kernel_init: Initializer | None = None\n bias_init: Initializer = initializers.zeros_init()\n out_bias_init: Initializer | None = None\n use_bias: bool = True\n attention_fn: Callable[..., Array] = dot_product_attention\n decode: bool = False\n normalize_qk: bool = False\n force_fp32_for_softmax: bool = False\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None\n out_dot_general: DotGeneralT | None = None\n qkv_dot_general_cls: Any = None\n out_dot_general_cls: Any = None\n qk_attn_weights_einsum_cls: Callable[..., Callable[..., Array]] | None = None\n attn_weights_value_einsum_cls: Callable[..., Callable[..., Array]] | None = (\n None\n )\n\n @overload\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n ...\n\n @overload\n def __call__(\n self,\n inputs_q: Array,\n *,\n inputs_kv: Array | None = None,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n ...\n\n @compact\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n inputs_kv: Array | None = None,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape ``[batch_sizes..., length, features]``.\n inputs_k: key of shape ``[batch_sizes..., length, features]``. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape ``[batch_sizes..., length, features]``. If None,\n inputs_v will copy the value of inputs_k.\n inputs_kv: key/values of shape ``[batch_sizes..., length, features]``. If\n None, inputs_kv will copy the value of inputs_q. This arg will be\n deprecated soon. Use inputs_k and inputs_v instead.\n mask: attention mask of shape ``[batch_sizes..., num_heads, query_length,\n key/value_length]``. Attention weights are masked out if their\n corresponding mask value is ``False``.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n dropout_rng: optional rng key to pass to the attention layer's dropout\n mask. Otherwise, self.make_rng('dropout') is used instead.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection. Remember to mark 'intermediates' as\n mutable via ``mutable=['intermediates']`` in order to have that\n collection returned.\n\n Returns:\n output of shape ``[batch_sizes..., length, features]``.\n """"""\n if inputs_kv is not None:\n if inputs_k is not None or inputs_v is not None:\n raise ValueError(\n 'If either `inputs_k` or `inputs_v` is not None, '\n '`inputs_kv` must be None. If `inputs_kv` is not None, both `inputs_k` '\n 'and `inputs_v` must be None. We recommend using `inputs_k` and '\n '`inputs_v` args, since `inputs_kv` will be deprecated soon. See '\n 'https://github.com/google/flax/discussions/3389 for more '\n 'information.'\n )\n inputs_k = inputs_v = inputs_kv\n warnings.warn(\n 'The inputs_kv arg will be deprecated soon. '\n 'Use inputs_k and inputs_v instead. See '\n 'https://github.com/google/flax/discussions/3389 '\n 'for more information.',\n DeprecationWarning,\n )\n else:\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n elif inputs_v.shape[-1] == inputs_v.shape[-2]:\n warnings.warn(\n f'You are passing an array of shape {inputs_v.shape} '\n 'to the `inputs_v` arg, when you may have intended '\n 'to pass it to the `mask` arg. As of Flax version '\n '0.7.4, the function signature of '\n ""MultiHeadDotProductAttention's `__call__` method ""\n 'has changed to `__call__(inputs_q, inputs_k=None, '\n 'inputs_v=None, *, inputs_kv=None, mask=None, '\n 'deterministic=None)`. Use the kwarg `mask` instead. '\n 'See https://github.com/google/flax/discussions/3389 '\n 'and read the docstring for more information.',\n DeprecationWarning,\n )\n\n features = self.out_features or inputs_q.shape[-1]\n qkv_features = self.qkv_features or inputs_q.shape[-1]\n assert qkv_features % self.num_heads == 0, (\n f'Memory dimension ({qkv_features}) must be divisible by number of'\n f' heads ({self.num_heads}).'\n )\n head_dim = qkv_features // self.num_heads\n\n dense = functools.partial(\n DenseGeneral,\n axis=-1,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n features=(self.num_heads, head_dim),\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n query, key, value = (\n dense(name='query')(inputs_q),\n dense(name='key')(inputs_k),\n dense(name='value')(inputs_v),\n )\n\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = LayerNorm(\n name='query_ln',\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n )(query) # type: ignore[call-arg]\n key = LayerNorm(\n name='key_ln',\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n )(key) # type: ignore[call-arg]\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n if self.decode:\n # detect if we're initializing by absence of existing cache data.\n is_initialized = self.has_variable('cache', 'cached_key')\n cached_key = self.variable(\n 'cache', 'cached_key', jnp.zeros, key.shape, key.dtype\n )\n cached_value = self.variable(\n 'cache', 'cached_value', jnp.zeros, value.shape, value.dtype\n )\n cache_index = self.variable(\n 'cache', 'cache_index', lambda: jnp.array(0, dtype=jnp.int32)\n )\n if is_initialized:\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = cache_index.value\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices: tuple[int | jax.Array, ...] = (zero,) * len(\n batch_dims\n ) + (\n cur_index,\n zero,\n zero,\n )\n key = lax.dynamic_update_slice(cached_key.value, key, indices)\n value = lax.dynamic_update_slice(cached_value.value, value, indices)\n cached_key.value = key\n cached_value.value = value\n cache_index.value = cache_index.value + 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n m_deterministic = merge_param(\n 'deterministic', self.deterministic, deterministic\n )\n if not m_deterministic and dropout_rng is None:\n dropout_rng = self.make_rng('dropout')\n else:\n m_deterministic = True\n\n # `qk_attn_weights_einsum` and `attn_weights_value_einsum` are optional\n # arguments that can be used to override the default `jnp.einsum`. They\n # exist for quantized einsum support in AQT.\n qk_attn_weights_einsum = (\n self.qk_attn_weights_einsum_cls()\n if self.qk_attn_weights_einsum_cls\n else None\n )\n attn_weights_value_einsum = (\n self.attn_weights_value_einsum_cls()\n if self.attn_weights_value_einsum_cls\n else None\n )\n # apply attention\n attn_args = (query, key, value)\n # This kwargs list match the default nn.dot_product_attention.\n # For custom `attention_fn`s, invalid kwargs will be filtered.\n attn_kwargs = dict(\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=m_deterministic,\n dtype=self.dtype,\n precision=self.precision,\n force_fp32_for_softmax=self.force_fp32_for_softmax,\n qk_attn_weights_einsum=qk_attn_weights_einsum,\n attn_weights_value_einsum=attn_weights_value_einsum,\n )\n attn_kwargs = {\n k: v\n for k, v in attn_kwargs.items()\n if k in inspect.signature(self.attention_fn).parameters\n }\n if sow_weights:\n x = self.attention_fn(*attn_args, **attn_kwargs, module=self)\n else:\n x = self.attention_fn(*attn_args, **attn_kwargs)\n # back to the original inputs dimensions\n out = DenseGeneral(\n features=features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n name='out', # type: ignore[call-arg]\n )(x)\n return out\n\n\nclass MultiHeadAttention(MultiHeadDotProductAttention):\n """"""Multi-head dot-product attention.\n Alias for ``MultiHeadDotProductAttention``.\n\n **NOTE**: ``MultiHeadAttention`` is a wrapper of ``MultiHeadDotProductAttention``,\n and so their implementations are identical. However ``MultiHeadAttention`` layers\n will, by default, be named ``MultiHeadAttention_{index}``, whereas ``MultiHeadDotProductAttention``\n will be named ``MultiHeadDotProductAttention_{index}``. Therefore, this could affect\n checkpointing, param collection names and RNG threading (since the layer name is\n used when generating new RNG's) within the module.\n\n Example usage::\n\n >>> import flax.linen as nn\n >>> import jax\n\n >>> layer = nn.MultiHeadAttention(num_heads=8, qkv_features=16)\n >>> key1, key2, key3, key4, key5, key6 = jax.random.split(jax.random.key(0), 6)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = jax.random.uniform(key1, shape), jax.random.uniform(key2, shape), jax.random.uniform(key3, shape)\n >>> variables = layer.init(jax.random.key(0), q)\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer.apply(variables, q, k, v)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=k, inputs_v=k)\n >>> out = layer.apply(variables, q, k)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=q) and layer.apply(variables, inputs_q=q, inputs_k=q, inputs_v=q)\n >>> out = layer.apply(variables, q)\n\n >>> attention_kwargs = dict(\n ... num_heads=8,\n ... qkv_features=16,\n ... kernel_init=nn.initializers.ones,\n ... bias_init=nn.initializers.zeros,\n ... dropout_rate=0.5,\n ... deterministic=False,\n ... )\n >>> class Module(nn.Module):\n ... attention_kwargs: dict\n ...\n ... @nn.compact\n ... def __call__(self, x, dropout_rng=None):\n ... out1 = nn.MultiHeadAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... out2 = nn.MultiHeadAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... return out1, out2\n >>> module = Module(attention_kwargs)\n >>> variables = module.init({'params': key1, 'dropout': key2}, q)\n\n >>> # out1 and out2 are different.\n >>> out1, out2 = module.apply(variables, q, rngs={'dropout': key3})\n >>> # out3 and out4 are different.\n >>> # out1 and out3 are different. out2 and out4 are different.\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key4})\n >>> # out1 and out2 are the same.\n >>> out1, out2 = module.apply(variables, q, dropout_rng=key5)\n >>> # out1 and out2 are the same as out3 and out4.\n >>> # providing a `dropout_rng` arg will take precedence over the `rngs` arg in `.apply`\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key6}, dropout_rng=key5)\n\n Attributes:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see ``jax.lax.Precision``\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n bias_init: initializer for the bias of the Dense layers.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape ``[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n """"""\n\n\nclass SelfAttention(MultiHeadDotProductAttention):\n """"""Self-attention special case of multi-head dot-product attention.\n This layer is deprecated in favor of ``MultiHeadDotProductAttention``.\n\n Example usage::\n >>> import flax.linen as nn\n >>> import jax, jax.numpy as jnp\n >>> layer = nn.MultiHeadDotProductAttention(num_heads=8, qkv_features=16)\n >>> variables = layer.init(jax.random.key(0), jnp.ones((4, 3, 2, 5)))\n """"""\n\n @compact\n def __call__( # type: ignore\n self,\n inputs_q: Array,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n """"""Applies multi-head dot product self-attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n Args:\n inputs_q: input queries of shape ``[batch_sizes..., length, features]``.\n mask: attention mask of shape ``[batch_sizes..., num_heads, query_length,\n key/value_length]``. Attention weights are masked out if their\n corresponding mask value is ``False``.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n\n Returns:\n output of shape ``[batch_sizes..., length, features]``.\n """"""\n warnings.warn(\n 'SelfAttention will be deprecated soon. Use '\n '`MultiHeadDotProductAttention.__call__(inputs_q)` instead. '\n 'See https://github.com/google/flax/discussions/3389 '\n 'for more information.',\n DeprecationWarning,\n )\n return super().__call__(\n inputs_q,\n mask=mask,\n deterministic=deterministic,\n dropout_rng=dropout_rng,\n sow_weights=sow_weights,\n )\n\n\n# mask-making utility functions\n\n\ndef make_attention_mask(\n query_input: Array,\n key_input: Array,\n pairwise_fn: Callable[..., Any] = jnp.multiply,\n extra_batch_dims: int = 0,\n dtype: Dtype = jnp.float32,\n):\n """"""Mask-making helper for attention weights.\n\n In case of 1d inputs (i.e., ``[batch..., len_q]``, ``[batch..., len_kv]``, the\n attention weights will be ``[batch..., heads, len_q, len_kv]`` and this\n function will produce ``[batch..., 1, len_q, len_kv]``.\n\n Args:\n query_input: a batched, flat input of query_length size\n key_input: a batched, flat input of key_length size\n pairwise_fn: broadcasting elementwise comparison function\n extra_batch_dims: number of extra batch dims to add singleton axes for, none\n by default\n dtype: mask return dtype\n\n Returns:\n A ``[batch..., 1, len_q, len_kv]`` shaped mask for 1d attention.\n """"""\n mask = pairwise_fn(\n jnp.expand_dims(query_input, axis=-1), jnp.expand_dims(key_input, axis=-2)\n )\n mask = jnp.expand_dims(mask, axis=-3)\n mask = jnp.expand_dims(mask, axis=tuple(range(extra_batch_dims)))\n return mask.astype(dtype)\n\n\ndef make_causal_mask(\n x: Array, extra_batch_dims: int = 0, dtype: Dtype = jnp.float32\n) -> Array:\n """"""Make a causal mask for self-attention.\n\n In case of 1d inputs (i.e., ``[batch..., len]``, the self-attention weights\n will be ``[batch..., heads, len, len]`` and this function will produce a\n causal mask of shape ``[batch..., 1, len, len]``.\n\n Args:\n x: input array of shape ``[batch..., len]``\n extra_batch_dims: number of batch dims to add singleton axes for, none by\n default\n dtype: mask return dtype\n\n Returns:\n A ``[batch..., 1, len, len]`` shaped causal mask for 1d attention.\n """"""\n idxs = jnp.broadcast_to(jnp.arange(x.shape[-1], dtype=jnp.int32), x.shape)\n return make_attention_mask(\n idxs,\n idxs,\n jnp.greater_equal,\n extra_batch_dims=extra_batch_dims,\n dtype=dtype,\n )\n\n\ndef combine_masks(\n *masks: Array | None, dtype: Dtype = jnp.float32\n) -> Array | None:\n """"""Combine attention masks.\n\n Args:\n *masks: set of attention mask arguments to combine, some can be None.\n dtype: dtype for the returned mask.\n\n Returns:\n Combined mask, reduced by logical and, returns None if no masks given.\n """"""\n masks_list = [m for m in masks if m is not None]\n if not masks_list:\n return None\n assert all(\n map(lambda x: x.ndim == masks_list[0].ndim, masks_list)\n ), f'masks must have same rank: {tuple(map(lambda x: x.ndim, masks_list))}'\n mask, *other_masks = masks_list\n for other_mask in other_masks:\n mask = jnp.logical_and(mask, other_mask)\n return mask.astype(dtype)\n",python,tab +3722,12818662,".venv/lib/python3.10/site-packages/flax/linen/attention.py",27239,0,"",python,selection_command +3723,12820631,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +3724,12820632,"/fast/home/franz.srambical/jafar/utils/nn.py",2093,0,"",python,selection_command +3725,12821305,".venv/lib/python3.10/site-packages/flax/linen/attention.py",0,0,"",python,tab +3726,12821305,".venv/lib/python3.10/site-packages/flax/linen/attention.py",27239,0,"",python,selection_command +3727,12821758,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +3728,12821758,"/fast/home/franz.srambical/jafar/utils/nn.py",2093,0,"",python,selection_command +3729,12822096,".venv/lib/python3.10/site-packages/flax/linen/attention.py",0,0,"",python,tab +3730,12822096,".venv/lib/python3.10/site-packages/flax/linen/attention.py",27239,0,"",python,selection_command +3731,12825083,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +3732,12866281,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,638," # mask.shape (1, 921, 1, 1, 1)\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,content +3733,12866362,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,196," print(f""DEBUG: query.shape {query.shape}"")\n print(f""DEBUG: key.shape {key.shape}"")\n print(f""DEBUG: value.shape {value.shape}"")\n # mask.shape (1, 921, 1, 1, 1) - (batch, kv_length, heads, q_length, k_length)\n if mask.ndim == 5:\n # For decode mode: pad the kv_length dimension (dim 1) and k_length dimension (dim 4)\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, pad_size), (0, 0), (0, 0), (0, pad_size)), \n constant_values=False\n )\n else:\n # Fallback for other mask shapes\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,content +3734,12953278,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,0,"",python,selection_command +3735,12971112,"/fast/home/franz.srambical/jafar/utils/nn.py",9082,0,"",python,selection_mouse +3736,12971235,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,3,"qkv",python,selection_mouse +3737,12971382,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,9,"qkv.shape",python,selection_mouse +3738,12971412,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,12,"qkv.shape (1",python,selection_mouse +3739,12971429,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,17,"qkv.shape (1, 921",python,selection_mouse +3740,12971460,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,20,"qkv.shape (1, 921, 1",python,selection_mouse +3741,12971479,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,22,"qkv.shape (1, 921, 1, ",python,selection_mouse +3742,12971495,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,23,"qkv.shape (1, 921, 1, 8",python,selection_mouse +3743,12971512,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,24,"qkv.shape (1, 921, 1, 8,",python,selection_mouse +3744,12971577,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,25,"qkv.shape (1, 921, 1, 8, ",python,selection_mouse +3745,12971626,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,27,"qkv.shape (1, 921, 1, 8, 64",python,selection_mouse +3746,12972035,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,28,"qkv.shape (1, 921, 1, 8, 64)",python,selection_mouse +3747,12973860,"/fast/home/franz.srambical/jafar/utils/nn.py",9108,0,"",python,selection_command +3748,12998375,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,799," # mask.shape (1, 921, 1, 1, 1)\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,content +3749,12998454,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,196," print(f""DEBUG: query.shape {query.shape}"")\n print(f""DEBUG: key.shape {key.shape}"")\n print(f""DEBUG: value.shape {value.shape}"")\n # mask.shape (1, 921, 1, 1, 1) - (batch, kv_length, heads, q_length, k_length)\n if mask.ndim == 5:\n # For decode mode: pad the kv_length dimension (dim 1) and k_length dimension (dim 4)\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, pad_size), (0, 0), (0, 0), (0, pad_size)), \n constant_values=False\n )\n else:\n # Fallback for other mask shapes\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,content +3750,12998455,"/fast/home/franz.srambical/jafar/utils/nn.py",4129,0," # Reshape back to original shape\n z = z.reshape(original_shape)\n",python,content +3751,12998455,"/fast/home/franz.srambical/jafar/utils/nn.py",4055,34," # z.shape (1, 921, 512)",python,content +3752,12998455,"/fast/home/franz.srambical/jafar/utils/nn.py",3984,36," # Reshape from (1, 921, 1, 512) to (1, 921, 512) to fix the extra dimension issue\n original_shape = x.shape\n x_reshaped = x.reshape(original_shape[0], original_shape[1], -1)\n z = self.temporal_pos_enc(x_reshaped)",python,content +3753,13003430,"/fast/home/franz.srambical/jafar/utils/nn.py",10521,799," # mask.shape (1, 921, 1, 1, 1)\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,content +3754,13003430,"/fast/home/franz.srambical/jafar/utils/nn.py",4331,79,"",python,content +3755,13003430,"/fast/home/franz.srambical/jafar/utils/nn.py",4260,31," # z.shape (1, 921, 1, 512)",python,content +3756,13003430,"/fast/home/franz.srambical/jafar/utils/nn.py",3984,241," z = self.temporal_pos_enc(x)",python,content +3757,13003512,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,196," print(f""DEBUG: query.shape {query.shape}"")\n print(f""DEBUG: key.shape {key.shape}"")\n print(f""DEBUG: value.shape {value.shape}"")\n # mask.shape (1, 921, 1, 1, 1) - (batch, kv_length, heads, q_length, k_length)\n if mask.ndim == 5:\n # For decode mode: pad the kv_length dimension (dim 1) and k_length dimension (dim 4)\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, pad_size), (0, 0), (0, 0), (0, pad_size)), \n constant_values=False\n )\n else:\n # Fallback for other mask shapes\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,content +3758,13003512,"/fast/home/franz.srambical/jafar/utils/nn.py",9009,204,"",python,content +3759,13003512,"/fast/home/franz.srambical/jafar/utils/nn.py",4129,0," # Reshape back to original shape\n z = z.reshape(original_shape)\n",python,content +3760,13003512,"/fast/home/franz.srambical/jafar/utils/nn.py",4055,34," # z.shape (1, 921, 512)",python,content +3761,13003512,"/fast/home/franz.srambical/jafar/utils/nn.py",3984,36," # Reshape from (1, 921, 1, 512) to (1, 921, 512) to fix the extra dimension issue\n original_shape = x.shape\n x_reshaped = x.reshape(original_shape[0], original_shape[1], -1)\n z = self.temporal_pos_enc(x_reshaped)",python,content +3762,13012586,"/fast/home/franz.srambical/jafar/utils/nn.py",10317,799," # mask.shape (1, 921, 1, 1, 1)\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,content +3763,13012586,"/fast/home/franz.srambical/jafar/utils/nn.py",9290,0," # for temporal attention (using kv cache)\n # FIRST PASS: qkv.shape (1, 921, 1, 8, 64)\n # SECOND PASS: qkv.shape \n if query.shape == (1, 921, 1, 512):\n breakpoint()\n",python,content +3764,13012586,"/fast/home/franz.srambical/jafar/utils/nn.py",4331,79,"",python,content +3765,13012586,"/fast/home/franz.srambical/jafar/utils/nn.py",4260,31," # z.shape (1, 921, 1, 512)",python,content +3766,13012586,"/fast/home/franz.srambical/jafar/utils/nn.py",3984,241," z = self.temporal_pos_enc(x)",python,content +3767,13012672,"/fast/home/franz.srambical/jafar/utils/nn.py",10187,95," # Mask from nnx is 4D: (batch, heads, seq_len, seq_len)\n # Pad it to match the padded sequence length",python,content +3768,13012672,"/fast/home/franz.srambical/jafar/utils/nn.py",9009,204,"",python,content +3769,13012672,"/fast/home/franz.srambical/jafar/utils/nn.py",4129,0," # Reshape back to original shape\n z = z.reshape(original_shape)\n",python,content +3770,13012672,"/fast/home/franz.srambical/jafar/utils/nn.py",4055,34," # z.shape (1, 921, 512)",python,content +3771,13012672,"/fast/home/franz.srambical/jafar/utils/nn.py",3984,36," # Reshape from (1, 921, 1, 512) to (1, 921, 512) to fix the extra dimension issue\n original_shape = x.shape\n x_reshaped = x.reshape(original_shape[0], original_shape[1], -1)\n z = self.temporal_pos_enc(x_reshaped)",python,content +3772,13039633,"/fast/home/franz.srambical/jafar/utils/nn.py",10263,0,"",python,selection_mouse +3773,13039635,"/fast/home/franz.srambical/jafar/utils/nn.py",10262,0,"",python,selection_command +3774,13040440,"/fast/home/franz.srambical/jafar/utils/nn.py",10264,124," print(f""DEBUG: mask.shape {mask.shape}"")\n # mask.shape (1, 921, 1, 1, 1)",python,content +3775,13040788,"/fast/home/franz.srambical/jafar/utils/nn.py",10169,0,"",python,selection_command +3776,13041037,"/fast/home/franz.srambical/jafar/utils/nn.py",10141,0,"",python,selection_command +3777,13041077,"/fast/home/franz.srambical/jafar/utils/nn.py",10092,0,"",python,selection_command +3778,13041103,"/fast/home/franz.srambical/jafar/utils/nn.py",10016,0,"",python,selection_command +3779,13041131,"/fast/home/franz.srambical/jafar/utils/nn.py",9931,0,"",python,selection_command +3780,13041166,"/fast/home/franz.srambical/jafar/utils/nn.py",9903,0,"",python,selection_command +3781,13041200,"/fast/home/franz.srambical/jafar/utils/nn.py",9887,0,"",python,selection_command +3782,13041232,"/fast/home/franz.srambical/jafar/utils/nn.py",9848,0,"",python,selection_command +3783,13041265,"/fast/home/franz.srambical/jafar/utils/nn.py",9805,0,"",python,selection_command +3784,13041298,"/fast/home/franz.srambical/jafar/utils/nn.py",9777,0,"",python,selection_command +3785,13041332,"/fast/home/franz.srambical/jafar/utils/nn.py",9751,0,"",python,selection_command +3786,13041365,"/fast/home/franz.srambical/jafar/utils/nn.py",9692,0,"",python,selection_command +3787,13041400,"/fast/home/franz.srambical/jafar/utils/nn.py",9653,0,"",python,selection_command +3788,13041434,"/fast/home/franz.srambical/jafar/utils/nn.py",9625,0,"",python,selection_command +3789,13041467,"/fast/home/franz.srambical/jafar/utils/nn.py",9609,0,"",python,selection_command +3790,13041502,"/fast/home/franz.srambical/jafar/utils/nn.py",9572,0,"",python,selection_command +3791,13041532,"/fast/home/franz.srambical/jafar/utils/nn.py",9544,0,"",python,selection_command +3792,13041565,"/fast/home/franz.srambical/jafar/utils/nn.py",9500,0,"",python,selection_command +3793,13041598,"/fast/home/franz.srambical/jafar/utils/nn.py",9471,0,"",python,selection_command +3794,13041631,"/fast/home/franz.srambical/jafar/utils/nn.py",9451,0,"",python,selection_command +3795,13041668,"/fast/home/franz.srambical/jafar/utils/nn.py",9411,0,"",python,selection_command +3796,13041698,"/fast/home/franz.srambical/jafar/utils/nn.py",9382,0,"",python,selection_command +3797,13041732,"/fast/home/franz.srambical/jafar/utils/nn.py",9356,0,"",python,selection_command +3798,13041768,"/fast/home/franz.srambical/jafar/utils/nn.py",9317,0,"",python,selection_command +3799,13041800,"/fast/home/franz.srambical/jafar/utils/nn.py",9244,0,"",python,selection_command +3800,13041833,"/fast/home/franz.srambical/jafar/utils/nn.py",9216,0,"",python,selection_command +3801,13041866,"/fast/home/franz.srambical/jafar/utils/nn.py",9214,0,"",python,selection_command +3802,13041901,"/fast/home/franz.srambical/jafar/utils/nn.py",9195,0,"",python,selection_command +3803,13042378,"/fast/home/franz.srambical/jafar/utils/nn.py",9214,0,"",python,selection_command +3804,13042528,"/fast/home/franz.srambical/jafar/utils/nn.py",9216,0,"",python,selection_command +3805,13042843,"/fast/home/franz.srambical/jafar/utils/nn.py",9290,0," # for temporal attention (using kv cache)\n # FIRST PASS: qkv.shape (1, 921, 1, 8, 64)\n # SECOND PASS: qkv.shape \n if query.shape == (1, 921, 1, 512):\n breakpoint()\n",python,content +3806,13055319,"/fast/home/franz.srambical/jafar/utils/nn.py",9217,0,"",python,selection_command +3807,13055567,"/fast/home/franz.srambical/jafar/utils/nn.py",9290,0,"",python,selection_command +3808,13055580,"/fast/home/franz.srambical/jafar/utils/nn.py",9340,0,"",python,selection_command +3809,13055668,"/fast/home/franz.srambical/jafar/utils/nn.py",9391,0,"",python,selection_command +3810,13055668,"/fast/home/franz.srambical/jafar/utils/nn.py",9425,0,"",python,selection_command +3811,13055700,"/fast/home/franz.srambical/jafar/utils/nn.py",9469,0,"",python,selection_command +3812,13056001,"/fast/home/franz.srambical/jafar/utils/nn.py",9425,0,"",python,selection_command +3813,13056170,"/fast/home/franz.srambical/jafar/utils/nn.py",9433,0,"",python,selection_command +3814,13056429,"/fast/home/franz.srambical/jafar/utils/nn.py",9436,0,"",python,selection_command +3815,13056456,"/fast/home/franz.srambical/jafar/utils/nn.py",9441,0,"",python,selection_command +3816,13056481,"/fast/home/franz.srambical/jafar/utils/nn.py",9442,0,"",python,selection_command +3817,13056514,"/fast/home/franz.srambical/jafar/utils/nn.py",9448,0,"",python,selection_command +3818,13056547,"/fast/home/franz.srambical/jafar/utils/nn.py",9451,0,"",python,selection_command +3819,13056717,"/fast/home/franz.srambical/jafar/utils/nn.py",9452,0,"",python,selection_command +3820,13056900,"/fast/home/franz.srambical/jafar/utils/nn.py",9453,0,"",python,selection_command +3821,13057065,"/fast/home/franz.srambical/jafar/utils/nn.py",9455,0,"",python,selection_command +3822,13057352,"/fast/home/franz.srambical/jafar/utils/nn.py",9458,0,"",python,selection_command +3823,13057519,"/fast/home/franz.srambical/jafar/utils/nn.py",9460,0,"",python,selection_command +3824,13057706,"/fast/home/franz.srambical/jafar/utils/nn.py",9461,0,"",python,selection_command +3825,13058890,"/fast/home/franz.srambical/jafar/utils/nn.py",9463,0,"",python,selection_command +3826,13059353,"/fast/home/franz.srambical/jafar/utils/nn.py",9463,3,"",python,content +3827,13061836,"/fast/home/franz.srambical/jafar/utils/nn.py",9463,0,"8, 64",python,content +3828,13062020,"/fast/home/franz.srambical/jafar/utils/nn.py",9467,0,"",python,selection_command +3829,13064519,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +3830,13064586,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +3831,13064855,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3832,13076298,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3833,13081084,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 219, in _broadcast_shapes_uncached\r\n return _try_broadcast_shapes(*rank_promoted_shapes, name='broadcast_shapes')\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 136, in _try_broadcast_shapes\r\n raise TypeError(f'{name} got incompatible shapes for broadcasting: '\r\nTypeError: broadcast_shapes got incompatible shapes for broadcasting: (1, 61, 1024), (1, 1024, 512).\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 197, in broadcast_shapes\r\n return _broadcast_shapes_cached(*shapes)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/util.py"", line 299, in wrapper\r\n return cached(config.trace_context() if trace_context_in_key else _ignore(),\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/util.py"", line 293, in cached\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 203, in _broadcast_shapes_cached\r\n return _broadcast_shapes_uncached(*shapes)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 222, in _broadcast_shapes_uncached\r\n raise ValueError(f""Incompatible shapes for broadcasting: shapes={list(shapes)}"") from err\r\nValueError: Incompatible shapes for broadcasting: shapes=[(1, 61, 1024), (1024, 512)]\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 219, in _broadcast_shapes_uncached\r\n return _try_broadcast_shapes(*rank_promoted_shapes, name='broadcast_shapes')\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 136, in _try_broadcast_shapes\r\n raise TypeError(f'{name} got incompatible shapes for broadcasting: '\r\nTypeError: broadcast_shapes got incompatible shapes for broadcasting: (1, 61, 1024), (1, 1024, 512).\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 193, in \r\n action_batch = jasmine.vq_encode(batch, training=False)\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 391, in vq_encode\r\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\r\n File ""/fast/home/franz.srambical/jafar/models/lam.py"", line 133, in vq_encode\r\n z = self.encoder(padded_patches) # (B, T, N, E)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 236, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_pos_enc(x_reshaped)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 27, in __call__\r\n x = x + self.pe[: x.shape[2]]\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1083, in op\r\n return getattr(self.aval, f""_{name}"")(self, *args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 583, in deferring_binary_op\r\n return binary_op(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 182, in __call__\r\n return call(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1234, in add\r\n x, y = promote_args(""add"", x, y)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 228, in promote_args\r\n return promote_shapes(fun_name, *promote_dtypes(*args))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/util.py"", line 64, in promote_shapes\r\n result_rank = len(lax.broadcast_shapes(*shapes))\r\nValueError: Incompatible shapes for broadcasting: shapes=[(1, 61, 1024), (1024, 512)]\r\n",,terminal_output +3834,13081885,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +3835,13090289,"/fast/home/franz.srambical/jafar/utils/nn.py",4180,0,"",python,selection_command +3836,13092552,"/fast/home/franz.srambical/jafar/utils/nn.py",866,0,"",python,selection_command +3837,13094486,"/fast/home/franz.srambical/jafar/utils/nn.py",4180,0,"",python,selection_command +3838,13102852,"/fast/home/franz.srambical/jafar/utils/nn.py",3954,0,"",python,selection_mouse +3839,13102855,"/fast/home/franz.srambical/jafar/utils/nn.py",3953,0,"",python,selection_command +3840,13104037,"/fast/home/franz.srambical/jafar/utils/nn.py",3984,241," z = self.temporal_pos_enc(x)",python,content +3841,13104415,"/fast/home/franz.srambical/jafar/utils/nn.py",4055,31," # z.shape (1, 921, 1, 512)",python,content +3842,13104775,"/fast/home/franz.srambical/jafar/utils/nn.py",4129,79,"",python,content +3843,13107794,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +3844,13108046,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3845,13110327,"/fast/home/franz.srambical/jafar/utils/nn.py",11065,0,"",python,selection_command +3846,13110997,"/fast/home/franz.srambical/jafar/utils/nn.py",10559,0,"",python,selection_command +3847,13111543,"/fast/home/franz.srambical/jafar/utils/nn.py",9559,0,"",python,selection_command +3848,13112883,"/fast/home/franz.srambical/jafar/utils/nn.py",9550,0,"",python,selection_command +3849,13113130,"/fast/home/franz.srambical/jafar/utils/nn.py",9515,0,"",python,selection_command +3850,13113145,"/fast/home/franz.srambical/jafar/utils/nn.py",9478,0,"",python,selection_command +3851,13113177,"/fast/home/franz.srambical/jafar/utils/nn.py",9469,0,"",python,selection_command +3852,13113210,"/fast/home/franz.srambical/jafar/utils/nn.py",9406,0,"",python,selection_command +3853,13113246,"/fast/home/franz.srambical/jafar/utils/nn.py",9385,0,"",python,selection_command +3854,13113279,"/fast/home/franz.srambical/jafar/utils/nn.py",9376,0,"",python,selection_command +3855,13113314,"/fast/home/franz.srambical/jafar/utils/nn.py",9317,0,"",python,selection_command +3856,13113347,"/fast/home/franz.srambical/jafar/utils/nn.py",9290,0,"",python,selection_command +3857,13113381,"/fast/home/franz.srambical/jafar/utils/nn.py",9281,0,"",python,selection_command +3858,13113419,"/fast/home/franz.srambical/jafar/utils/nn.py",9223,0,"",python,selection_command +3859,13113452,"/fast/home/franz.srambical/jafar/utils/nn.py",9198,0,"",python,selection_command +3860,13113646,"/fast/home/franz.srambical/jafar/utils/nn.py",9223,0,"",python,selection_command +3861,13119075,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +3862,13127179,"TERMINAL",0,0,"2025-07-27 12:57:26.984367: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3863,13128666,"TERMINAL",0,0,"2025-07-27 12:57:28.469450: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3864,13132170,"TERMINAL",0,0,"2025-07-27 12:57:31.976869: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3865,13132961,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom jasmine import Jasmine\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_co_train: bool = False\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n dynamics_type: str = ""maskgit""\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Load Dynamics model checkpoint ---\n rngs = nnx.Rngs(rng)\n jasmine = Jasmine(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=args.lam_co_train,\n # Dynamics\n dynamics_type=args.dynamics_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=True,\n rngs=rngs,\n )\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(jasmine, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Jasmine, batch: dict) -> jax.Array:\n """"""Runs Jasmine.sample with pre-defined generation hyper-parameters.""""""\n if args.dynamics_type == ""maskgit"":\n return model.sample_maskgit(\n batch,\n args.seq_len,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n )\n else:\n return model.sample_causal(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n )\n\n # --- Define autoregressive sampling loop ---\n # @nnx.jit\n def _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = _sampling_fn(jasmine, batch)\n return generated_vid\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n video_batch = next(dataloader)\n video_batch = video_batch.astype(args.dtype) / 255.0\n # Get latent actions for all videos in the batch\n batch = dict(videos=video_batch)\n action_batch = jasmine.vq_encode(batch, training=False)\n action_batch = jnp.asarray(action_batch).reshape(\n video_batch.shape[0], args.seq_len - 1, 1\n )\n\n # --- Sample + evaluate video ---\n # The autoregressive cache needs to be initialized with the shape of the tokenized inputs, not the raw video.\n # The number of spatial tokens is derived from the image dimensions and patch size.\n # It appears the 90x160 image is padded to 92x160, and a CLS token is added.\n # (92 // args.patch_size) * (160 // args.patch_size) + 1 = 23 * 40 + 1 = 921\n num_patches = ((args.image_height + 3) // 4 * 4 // args.patch_size) * (\n args.image_width // args.patch_size\n ) + 1\n # Shape for spatial attention: (batch, time, patches, num_heads, head_dim)\n spatial_token_shape = (\n args.batch_size,\n 1,\n num_patches,\n args.dyna_dim,\n )\n # Shape for temporal attention: (batch, patches, time, num_heads, head_dim)\n temporal_token_shape = (\n args.batch_size,\n num_patches,\n 1,\n args.dyna_dim,\n )\n if args.dynamics_type == ""causal"":\n transformer_blocks = jasmine.dynamics.transformer.blocks\n for block in transformer_blocks:\n block.spatial_attention.init_cache(spatial_token_shape, dtype=args.dtype)\n block.temporal_attention.init_cache(temporal_token_shape, dtype=args.dtype)\n vid = _autoreg_sample(rng, video_batch, action_batch)\n gt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\n recon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (video_batch * 255).astype(np.uint8)\n pred_videos = (vid * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +3866,13133033,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(298)attention_fn()\r\n-> implementation = ""cudnn"" if use_flash_attention else None\r\n",,terminal_output +3867,13148841,"sample.py",7016,0,"",python,selection_mouse +3868,13148841,"sample.py",7015,0,"",python,selection_command +3869,13148861,"sample.py",7015,1,")",python,selection_mouse +3870,13148862,"sample.py",7016,0,"",python,selection_command +3871,13149961,"sample.py",7015,0,"",python,selection_command +3872,13152661,"sample.py",7437,0,"",python,selection_mouse +3873,13154684,"sample.py",6918,0,"",python,selection_mouse +3874,13156692,"sample.py",7391,0,"",python,selection_mouse +3875,13157084,"sample.py",7219,0,"",python,selection_mouse +3876,13172674,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +3877,13185686,"TERMINAL",0,0,"c",,terminal_output +3878,13186623,"TERMINAL",0,0," ",,terminal_output +3879,13187077,"TERMINAL",0,0,"n",,terminal_output +3880,13187480,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(300)attention_fn()\r\n-> def _rearrange(x):\r\n",,terminal_output +3881,13188867,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(303)attention_fn()\r\n-> def _pad(x):\r\n",,terminal_output +3882,13189011,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(306)attention_fn()\r\n-> original_shape = query.shape\r\n",,terminal_output +3883,13189280,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(307)attention_fn()\r\n-> original_seq_len = query.shape[-3]\r\n",,terminal_output +3884,13189509,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(310)attention_fn()\r\n-> target_seq_len = ((original_seq_len + 3) // 4) * 4\r\n",,terminal_output +3885,13193052,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(311)attention_fn()\r\n-> pad_size = target_seq_len - original_seq_len\r\n",,terminal_output +3886,13193461,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(313)attention_fn()\r\n-> query_4d = _pad(_rearrange(query))\r\n",,terminal_output +3887,13193972,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(314)attention_fn()\r\n-> key_4d = _pad(_rearrange(key))\r\n",,terminal_output +3888,13194297,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(315)attention_fn()\r\n-> value_4d = _pad(_rearrange(value))\r\n",,terminal_output +3889,13194614,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(317)attention_fn()\r\n-> attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\r\n",,terminal_output +3890,13195872,"TERMINAL",0,0,"q",,terminal_output +3891,13195964,"TERMINAL",0,0,"u",,terminal_output +3892,13196099,"TERMINAL",0,0,"e",,terminal_output +3893,13196310,"TERMINAL",0,0,"y",,terminal_output +3894,13196397,"TERMINAL",0,0,"r",,terminal_output +3895,13196799,"TERMINAL",0,0," ",,terminal_output +3896,13196947,"TERMINAL",0,0," ",,terminal_output +3897,13197309,"TERMINAL",0,0,"r",,terminal_output +3898,13197397,"TERMINAL",0,0,"y",,terminal_output +3899,13197610,"TERMINAL",0,0,"_",,terminal_output +3900,13198191,"TERMINAL",0,0,"4",,terminal_output +3901,13198398,"TERMINAL",0,0,"d",,terminal_output +3902,13198499,"TERMINAL",0,0,".",,terminal_output +3903,13198633,"TERMINAL",0,0,"s",,terminal_output +3904,13198745,"TERMINAL",0,0,"h",,terminal_output +3905,13198847,"TERMINAL",0,0,"a",,terminal_output +3906,13198945,"TERMINAL",0,0,"p",,terminal_output +3907,13199048,"TERMINAL",0,0,"e",,terminal_output +3908,13199150,"TERMINAL",0,0,"\r\n(Pdb) (921, 4, 8, 64)\r\n",,terminal_output +3909,13287128,"/fast/home/franz.srambical/jafar/utils/nn.py",9198,0,"",python,selection_command +3910,13287456,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"",python,selection_command +3911,13287610,"/fast/home/franz.srambical/jafar/utils/nn.py",9118,0,"",python,selection_command +3912,13287744,"/fast/home/franz.srambical/jafar/utils/nn.py",9067,0,"",python,selection_command +3913,13287946,"/fast/home/franz.srambical/jafar/utils/nn.py",9059,50," # FIRST PASS: qkv.shape (1, 921, 1, 8, 64)",python,selection_command +3914,13291952,"/fast/home/franz.srambical/jafar/utils/nn.py",9067,0,"",python,selection_command +3915,13292444,"/fast/home/franz.srambical/jafar/utils/nn.py",9069,0,"",python,selection_command +3916,13292701,"/fast/home/franz.srambical/jafar/utils/nn.py",9075,0,"",python,selection_command +3917,13292726,"/fast/home/franz.srambical/jafar/utils/nn.py",9079,0,"",python,selection_command +3918,13292752,"/fast/home/franz.srambical/jafar/utils/nn.py",9081,0,"",python,selection_command +3919,13292786,"/fast/home/franz.srambical/jafar/utils/nn.py",9084,0,"",python,selection_command +3920,13292819,"/fast/home/franz.srambical/jafar/utils/nn.py",9085,0,"",python,selection_command +3921,13292852,"/fast/home/franz.srambical/jafar/utils/nn.py",9091,0,"",python,selection_command +3922,13293422,"/fast/home/franz.srambical/jafar/utils/nn.py",9092,0,"",python,selection_command +3923,13293631,"/fast/home/franz.srambical/jafar/utils/nn.py",9093,0,"",python,selection_command +3924,13293867,"/fast/home/franz.srambical/jafar/utils/nn.py",9095,0,"",python,selection_command +3925,13294263,"/fast/home/franz.srambical/jafar/utils/nn.py",9098,0,"",python,selection_command +3926,13313412,"/fast/home/franz.srambical/jafar/utils/nn.py",9100,0,"",python,selection_command +3927,13316014,"/fast/home/franz.srambical/jafar/utils/nn.py",9098,0,"",python,selection_command +3928,13316303,"/fast/home/franz.srambical/jafar/utils/nn.py",9095,0,"",python,selection_command +3929,13316467,"/fast/home/franz.srambical/jafar/utils/nn.py",9093,0,"",python,selection_command +3930,13316601,"/fast/home/franz.srambical/jafar/utils/nn.py",9092,0,"",python,selection_command +3931,13318382,"/fast/home/franz.srambical/jafar/utils/nn.py",9093,0,"",python,selection_command +3932,13318610,"/fast/home/franz.srambical/jafar/utils/nn.py",9095,0,"",python,selection_command +3933,13318834,"/fast/home/franz.srambical/jafar/utils/nn.py",9098,0,"",python,selection_command +3934,13319076,"/fast/home/franz.srambical/jafar/utils/nn.py",9100,0,"",python,selection_command +3935,13378656,".venv/lib/python3.10/site-packages/flax/linen/attention.py",0,0,"",python,tab +3936,13382525,".venv/lib/python3.10/site-packages/flax/linen/attention.py",29898,0,"",python,selection_command +3937,13383608,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +3938,13387910,"/fast/home/franz.srambical/jafar/utils/nn.py",8537,0,"",python,selection_command +3939,13388165,"/fast/home/franz.srambical/jafar/utils/nn.py",2614,0,"",python,selection_command +3940,13389166,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +3941,13389166,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +3942,13389690,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12332,0,"",python,selection_command +3943,13389942,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14092,0,"",python,selection_command +3944,13390397,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16150,0,"",python,selection_command +3945,13390703,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18670,0,"",python,selection_command +3946,13390960,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20649,0,"",python,selection_command +3947,13391635,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22802,0,"",python,selection_command +3948,13392248,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24711,0,"",python,selection_command +3949,13392971,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24765,0,"",python,selection_command +3950,13393231,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24768,0,"",python,selection_command +3951,13393244,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24776,0,"",python,selection_command +3952,13393276,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24836,0,"",python,selection_command +3953,13393309,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24892,0,"",python,selection_command +3954,13393344,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",24954,0,"",python,selection_command +3955,13393375,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25035,0,"",python,selection_command +3956,13393409,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25052,0,"",python,selection_command +3957,13393443,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25079,0,"",python,selection_command +3958,13393477,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25082,0,"",python,selection_command +3959,13393511,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25093,0,"",python,selection_command +3960,13393546,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25160,0,"",python,selection_command +3961,13393579,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25166,0,"",python,selection_command +3962,13393613,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25188,0,"",python,selection_command +3963,13393645,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25267,0,"",python,selection_command +3964,13393684,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25271,0,"",python,selection_command +3965,13394133,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25311,0,"",python,selection_command +3966,13394302,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",25271,0,"",python,selection_command +3967,13394710,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23403,0,"",python,selection_command +3968,13395932,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23388,0,"",python,selection_command +3969,13396181,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23314,0,"",python,selection_command +3970,13396194,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23242,0,"",python,selection_command +3971,13396225,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23178,0,"",python,selection_command +3972,13396258,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23106,0,"",python,selection_command +3973,13396292,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23105,0,"",python,selection_command +3974,13396325,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23090,0,"",python,selection_command +3975,13396359,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23068,0,"",python,selection_command +3976,13396393,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23023,0,"",python,selection_command +3977,13396425,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",23017,0,"",python,selection_command +3978,13396459,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22973,0,"",python,selection_command +3979,13396492,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22941,0,"",python,selection_command +3980,13396524,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22917,0,"",python,selection_command +3981,13396558,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22882,0,"",python,selection_command +3982,13396593,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22834,0,"",python,selection_command +3983,13396625,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22796,0,"",python,selection_command +3984,13396658,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22765,0,"",python,selection_command +3985,13396691,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22748,0,"",python,selection_command +3986,13396725,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22735,0,"",python,selection_command +3987,13396759,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22724,0,"",python,selection_command +3988,13396962,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22711,0,"",python,selection_command +3989,13397129,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22684,0,"",python,selection_command +3990,13397236,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22688,0,"",python,selection_command +3991,13397389,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22690,0,"",python,selection_command +3992,13397699,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22692,0,"",python,selection_command +3993,13397833,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22696,0,"",python,selection_command +3994,13398167,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22697,0,"",python,selection_command +3995,13398468,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14274,0,"",python,selection_command +3996,13399147,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14287,0,"",python,selection_command +3997,13399315,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14289,0,"",python,selection_command +3998,13400351,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13131,0,"",python,selection_command +3999,13401451,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13143,0,"",python,selection_command +4000,13401686,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13145,0,"",python,selection_command +4001,13401721,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13153,0,"",python,selection_command +4002,13401753,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13159,0,"",python,selection_command +4003,13401784,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13164,0,"",python,selection_command +4004,13401847,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13166,0,"",python,selection_command +4005,13401949,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13168,0,"",python,selection_command +4006,13402427,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5282,0,"",python,selection_command +4007,13408492,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6295,0,"",python,selection_command +4008,13409325,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8041,0,"",python,selection_command +4009,13411759,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9165,0,"",python,selection_command +4010,13417661,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8041,0,"",python,selection_command +4011,13419204,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9165,0,"",python,selection_command +4012,13420017,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8041,0,"",python,selection_command +4013,13433524,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7940,0,"",python,selection_command +4014,13433768,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7934,0,"",python,selection_command +4015,13433794,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7861,0,"",python,selection_command +4016,13433827,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7850,0,"",python,selection_command +4017,13433857,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7847,0,"",python,selection_command +4018,13433896,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7836,0,"",python,selection_command +4019,13433945,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7755,0,"",python,selection_command +4020,13433969,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7674,0,"",python,selection_command +4021,13433997,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7596,0,"",python,selection_command +4022,13434022,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7563,0,"",python,selection_command +4023,13434060,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7487,0,"",python,selection_command +4024,13434091,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7419,0,"",python,selection_command +4025,13434124,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7400,0,"",python,selection_command +4026,13434156,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7322,0,"",python,selection_command +4027,13434190,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7253,0,"",python,selection_command +4028,13434227,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7188,0,"",python,selection_command +4029,13434257,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7157,0,"",python,selection_command +4030,13434289,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7104,0,"",python,selection_command +4031,13434322,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7031,0,"",python,selection_command +4032,13434357,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6988,0,"",python,selection_command +4033,13434390,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6912,0,"",python,selection_command +4034,13434423,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6833,0,"",python,selection_command +4035,13434456,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6755,0,"",python,selection_command +4036,13434490,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6685,0,"",python,selection_command +4037,13434523,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6606,0,"",python,selection_command +4038,13434558,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6528,0,"",python,selection_command +4039,13434591,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6490,0,"",python,selection_command +4040,13434623,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6410,0,"",python,selection_command +4041,13434657,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6371,0,"",python,selection_command +4042,13434690,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6293,0,"",python,selection_command +4043,13434724,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6254,0,"",python,selection_command +4044,13434757,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6172,0,"",python,selection_command +4045,13434790,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6164,0,"",python,selection_command +4046,13434824,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6161,0,"",python,selection_command +4047,13434856,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6094,0,"",python,selection_command +4048,13434891,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6082,0,"",python,selection_command +4049,13434924,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6079,0,"",python,selection_command +4050,13434958,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6046,0,"",python,selection_command +4051,13434991,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5971,0,"",python,selection_command +4052,13435024,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5968,0,"",python,selection_command +4053,13435058,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5901,0,"",python,selection_command +4054,13435089,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5823,0,"",python,selection_command +4055,13435123,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5763,0,"",python,selection_command +4056,13435156,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5760,0,"",python,selection_command +4057,13435190,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5697,0,"",python,selection_command +4058,13435224,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5693,0,"",python,selection_command +4059,13435258,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5638,0,"",python,selection_command +4060,13435291,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5606,0,"",python,selection_command +4061,13435324,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5571,0,"",python,selection_command +4062,13435357,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5541,0,"",python,selection_command +4063,13435391,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5510,0,"",python,selection_command +4064,13435424,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5481,0,"",python,selection_command +4065,13435457,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5445,0,"",python,selection_command +4066,13435490,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5411,0,"",python,selection_command +4067,13435523,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5382,0,"",python,selection_command +4068,13435561,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5353,0,"",python,selection_command +4069,13435619,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5337,0,"",python,selection_command +4070,13435639,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5323,0,"",python,selection_command +4071,13435667,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5307,0,"",python,selection_command +4072,13435690,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5280,0,"",python,selection_command +4073,13435725,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5277,0,"",python,selection_command +4074,13435757,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5276,0,"",python,selection_command +4075,13435790,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5256,0,"",python,selection_command +4076,13435824,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5253,0,"",python,selection_command +4077,13435858,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5210,0,"",python,selection_command +4078,13435892,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5136,0,"",python,selection_command +4079,13435962,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5210,0,"",python,selection_command +4080,13436222,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5253,0,"",python,selection_command +4081,13436245,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5256,0,"",python,selection_command +4082,13436270,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5276,0,"",python,selection_command +4083,13436307,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5277,0,"",python,selection_command +4084,13436345,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5280,0,"",python,selection_command +4085,13436378,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5307,0,"",python,selection_command +4086,13436680,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5280,0,"",python,selection_command +4087,13436933,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,26,"def dot_product_attention(",python,selection_command +4088,13437135,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,482,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n",python,selection_command +4089,13437656,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,690,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n",python,selection_command +4090,13437798,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,801,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n",python,selection_command +4091,13437970,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,883,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n",python,selection_command +4092,13438348,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,2569,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n",python,selection_command +4093,13438465,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,3143,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n",python,selection_command +4094,13439142,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,3784,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n",python,selection_command +4095,13439475,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,4022,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n )\n",python,selection_command +4096,13440502,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,4178,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n )\n\n # return weighted sum over values for each query position\n return jnp.einsum(\n '...hqk,...khd->...qhd', attn_weights, value, precision=precision\n )\n",python,selection_command +4097,13441036,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,4177,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n )\n\n # return weighted sum over values for each query position\n return jnp.einsum(\n '...hqk,...khd->...qhd', attn_weights, value, precision=precision\n )",python,selection_command +4098,13456522,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9452,0,"",python,selection_command +4099,13538371,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +4100,13539810,"/fast/home/franz.srambical/jafar/utils/nn.py",11065,0,"",python,selection_command +4101,13540377,"/fast/home/franz.srambical/jafar/utils/nn.py",9837,0,"",python,selection_command +4102,13541423,"/fast/home/franz.srambical/jafar/utils/nn.py",9828,0,"",python,selection_command +4103,13541670,"/fast/home/franz.srambical/jafar/utils/nn.py",9793,0,"",python,selection_command +4104,13541699,"/fast/home/franz.srambical/jafar/utils/nn.py",9754,0,"",python,selection_command +4105,13541723,"/fast/home/franz.srambical/jafar/utils/nn.py",9711,0,"",python,selection_command +4106,13541756,"/fast/home/franz.srambical/jafar/utils/nn.py",9702,0,"",python,selection_command +4107,13541790,"/fast/home/franz.srambical/jafar/utils/nn.py",9657,0,"",python,selection_command +4108,13541825,"/fast/home/franz.srambical/jafar/utils/nn.py",9598,0,"",python,selection_command +4109,13541862,"/fast/home/franz.srambical/jafar/utils/nn.py",9559,0,"",python,selection_command +4110,13541898,"/fast/home/franz.srambical/jafar/utils/nn.py",9550,0,"",python,selection_command +4111,13541930,"/fast/home/franz.srambical/jafar/utils/nn.py",9515,0,"",python,selection_command +4112,13541963,"/fast/home/franz.srambical/jafar/utils/nn.py",9478,0,"",python,selection_command +4113,13541995,"/fast/home/franz.srambical/jafar/utils/nn.py",9469,0,"",python,selection_command +4114,13542029,"/fast/home/franz.srambical/jafar/utils/nn.py",9406,0,"",python,selection_command +4115,13542062,"/fast/home/franz.srambical/jafar/utils/nn.py",9385,0,"",python,selection_command +4116,13542095,"/fast/home/franz.srambical/jafar/utils/nn.py",9376,0,"",python,selection_command +4117,13542236,"/fast/home/franz.srambical/jafar/utils/nn.py",9317,0,"",python,selection_command +4118,13542435,"/fast/home/franz.srambical/jafar/utils/nn.py",9290,0,"",python,selection_command +4119,13542621,"/fast/home/franz.srambical/jafar/utils/nn.py",9317,0,"",python,selection_command +4120,13542791,"/fast/home/franz.srambical/jafar/utils/nn.py",9376,0,"",python,selection_command +4121,13542938,"/fast/home/franz.srambical/jafar/utils/nn.py",9317,0,"",python,selection_command +4122,13543131,"/fast/home/franz.srambical/jafar/utils/nn.py",9290,0,"",python,selection_command +4123,13543618,"/fast/home/franz.srambical/jafar/utils/nn.py",9282,0,"",python,selection_command +4124,13551095,".venv/lib/python3.10/site-packages/flax/linen/attention.py",0,0,"",python,tab +4125,13551275,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +4126,13553629,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +4127,13554877,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9382,0,"",python,selection_command +4128,13555131,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9361,0,"",python,selection_command +4129,13555143,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9301,0,"",python,selection_command +4130,13555175,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9300,0,"",python,selection_command +4131,13555208,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9296,0,"",python,selection_command +4132,13555242,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9284,0,"",python,selection_command +4133,13555277,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9269,0,"",python,selection_command +4134,13555310,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9258,0,"",python,selection_command +4135,13555344,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9239,0,"",python,selection_command +4136,13555378,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9221,0,"",python,selection_command +4137,13555412,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9204,0,"",python,selection_command +4138,13555445,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9181,0,"",python,selection_command +4139,13555480,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9171,0,"",python,selection_command +4140,13555512,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9161,0,"",python,selection_command +4141,13555547,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9152,0,"",python,selection_command +4142,13555584,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9141,0,"",python,selection_command +4143,13555616,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9093,0,"",python,selection_command +4144,13555651,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9063,0,"",python,selection_command +4145,13555684,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9062,0,"",python,selection_command +4146,13555719,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9047,0,"",python,selection_command +4147,13555752,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9005,0,"",python,selection_command +4148,13555792,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8976,0,"",python,selection_command +4149,13555820,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8906,0,"",python,selection_command +4150,13555853,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8871,0,"",python,selection_command +4151,13555892,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8846,0,"",python,selection_command +4152,13555920,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8793,0,"",python,selection_command +4153,13555959,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8741,0,"",python,selection_command +4154,13556114,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8669,0,"",python,selection_command +4155,13556243,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8644,0,"",python,selection_command +4156,13564994,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +4157,13653816,"/fast/home/franz.srambical/jafar/utils/nn.py",9309,0,"",python,selection_command +4158,13654047,"/fast/home/franz.srambical/jafar/utils/nn.py",9376,0,"",python,selection_command +4159,13654078,"/fast/home/franz.srambical/jafar/utils/nn.py",9377,0,"",python,selection_command +4160,13654110,"/fast/home/franz.srambical/jafar/utils/nn.py",9398,0,"",python,selection_command +4161,13654143,"/fast/home/franz.srambical/jafar/utils/nn.py",9469,0,"",python,selection_command +4162,13654178,"/fast/home/franz.srambical/jafar/utils/nn.py",9470,0,"",python,selection_command +4163,13654212,"/fast/home/franz.srambical/jafar/utils/nn.py",9507,0,"",python,selection_command +4164,13654243,"/fast/home/franz.srambical/jafar/utils/nn.py",9550,0,"",python,selection_command +4165,13654279,"/fast/home/franz.srambical/jafar/utils/nn.py",9551,0,"",python,selection_command +4166,13654315,"/fast/home/franz.srambical/jafar/utils/nn.py",9590,0,"",python,selection_command +4167,13654393,"/fast/home/franz.srambical/jafar/utils/nn.py",9649,0,"",python,selection_command +4168,13654393,"/fast/home/franz.srambical/jafar/utils/nn.py",9702,0,"",python,selection_command +4169,13654416,"/fast/home/franz.srambical/jafar/utils/nn.py",9703,0,"",python,selection_command +4170,13654450,"/fast/home/franz.srambical/jafar/utils/nn.py",9746,0,"",python,selection_command +4171,13654523,"/fast/home/franz.srambical/jafar/utils/nn.py",9785,0,"",python,selection_command +4172,13654523,"/fast/home/franz.srambical/jafar/utils/nn.py",9828,0,"",python,selection_command +4173,13654552,"/fast/home/franz.srambical/jafar/utils/nn.py",9829,0,"",python,selection_command +4174,13654587,"/fast/home/franz.srambical/jafar/utils/nn.py",9914,0,"",python,selection_command +4175,13654653,"/fast/home/franz.srambical/jafar/utils/nn.py",9990,0,"",python,selection_command +4176,13654653,"/fast/home/franz.srambical/jafar/utils/nn.py",10066,0,"",python,selection_command +4177,13654686,"/fast/home/franz.srambical/jafar/utils/nn.py",10067,0,"",python,selection_command +4178,13654720,"/fast/home/franz.srambical/jafar/utils/nn.py",10160,0,"",python,selection_command +4179,13654783,"/fast/home/franz.srambical/jafar/utils/nn.py",10189,0,"",python,selection_command +4180,13688262,"/fast/home/franz.srambical/jafar/utils/nn.py",10242,0,"",python,selection_command +4181,13688409,"/fast/home/franz.srambical/jafar/utils/nn.py",10285,0,"",python,selection_command +4182,13688749,"/fast/home/franz.srambical/jafar/utils/nn.py",10285,30," mask_4d = jnp.pad(",python,selection_command +4183,13688857,"/fast/home/franz.srambical/jafar/utils/nn.py",10285,101," mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), ",python,selection_command +4184,13689012,"/fast/home/franz.srambical/jafar/utils/nn.py",10285,139," mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False",python,selection_command +4185,13689147,"/fast/home/franz.srambical/jafar/utils/nn.py",10285,153," mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,selection_command +4186,13695534,"/fast/home/franz.srambical/jafar/utils/nn.py",10425,0,"",python,selection_command +4187,13695915,"/fast/home/franz.srambical/jafar/utils/nn.py",10425,13," )",python,selection_command +4188,13696044,"/fast/home/franz.srambical/jafar/utils/nn.py",10387,51," constant_values=False\n )",python,selection_command +4189,13696202,"/fast/home/franz.srambical/jafar/utils/nn.py",10316,122," mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,selection_command +4190,13696334,"/fast/home/franz.srambical/jafar/utils/nn.py",10285,153," mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,selection_command +4191,13696483,"/fast/home/franz.srambical/jafar/utils/nn.py",10242,196," # mask.shape (1, 921, 1, 1, 1)\n mask_4d = jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,selection_command +4192,13718684,"/fast/home/franz.srambical/jafar/utils/nn.py",10242,0,"",python,selection_command +4193,13722609,"/fast/home/franz.srambical/jafar/utils/nn.py",10254,0,"",python,selection_command +4194,13723437,"/fast/home/franz.srambical/jafar/utils/nn.py",10297,0,"",python,selection_command +4195,13723737,"/fast/home/franz.srambical/jafar/utils/nn.py",10305,0,"",python,selection_command +4196,13723920,"/fast/home/franz.srambical/jafar/utils/nn.py",10307,0,"",python,selection_command +4197,13724332,"/fast/home/franz.srambical/jafar/utils/nn.py",10307,1,"j",python,selection_command +4198,13724425,"/fast/home/franz.srambical/jafar/utils/nn.py",10307,32,"jnp.pad(\n mask, (",python,selection_command +4199,13724584,"/fast/home/franz.srambical/jafar/utils/nn.py",10307,103,"jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constan",python,selection_command +4200,13724925,"/fast/home/franz.srambical/jafar/utils/nn.py",10307,132,"jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )\n",python,selection_command +4201,13725194,"/fast/home/franz.srambical/jafar/utils/nn.py",10307,131,"jnp.pad(\n mask, ((0, 0), (0, 0), (0, pad_size), (0, pad_size)), \n constant_values=False\n )",python,selection_command +4202,13725560,"/fast/home/franz.srambical/jafar/utils/nn.py",10307,131,"",python,content +4203,13727762,"/fast/home/franz.srambical/jafar/utils/nn.py",10307,0,"_",python,content +4204,13727762,"/fast/home/franz.srambical/jafar/utils/nn.py",10308,0,"",python,selection_keyboard +4205,13727971,"/fast/home/franz.srambical/jafar/utils/nn.py",10308,0,"p",python,content +4206,13727972,"/fast/home/franz.srambical/jafar/utils/nn.py",10309,0,"",python,selection_keyboard +4207,13728035,"/fast/home/franz.srambical/jafar/utils/nn.py",10309,0,"a",python,content +4208,13728035,"/fast/home/franz.srambical/jafar/utils/nn.py",10310,0,"",python,selection_keyboard +4209,13728190,"/fast/home/franz.srambical/jafar/utils/nn.py",10310,0,"d",python,content +4210,13728190,"/fast/home/franz.srambical/jafar/utils/nn.py",10311,0,"",python,selection_keyboard +4211,13728922,"/fast/home/franz.srambical/jafar/utils/nn.py",10311,0,"()",python,content +4212,13728923,"/fast/home/franz.srambical/jafar/utils/nn.py",10312,0,"",python,selection_keyboard +4213,13729624,"/fast/home/franz.srambical/jafar/utils/nn.py",10312,0,"_",python,content +4214,13729624,"/fast/home/franz.srambical/jafar/utils/nn.py",10313,0,"",python,selection_keyboard +4215,13729951,"/fast/home/franz.srambical/jafar/utils/nn.py",10313,0,"r",python,content +4216,13729951,"/fast/home/franz.srambical/jafar/utils/nn.py",10314,0,"",python,selection_keyboard +4217,13730024,"/fast/home/franz.srambical/jafar/utils/nn.py",10314,0,"e",python,content +4218,13730025,"/fast/home/franz.srambical/jafar/utils/nn.py",10315,0,"",python,selection_keyboard +4219,13730209,"/fast/home/franz.srambical/jafar/utils/nn.py",10315,0,"a",python,content +4220,13730209,"/fast/home/franz.srambical/jafar/utils/nn.py",10316,0,"",python,selection_keyboard +4221,13730542,"/fast/home/franz.srambical/jafar/utils/nn.py",10316,0,"rrange(mask)",python,content +4222,13730744,"/fast/home/franz.srambical/jafar/utils/nn.py",10327,0,"",python,selection_command +4223,13733362,"/fast/home/franz.srambical/jafar/utils/nn.py",10342,0,"",python,selection_command +4224,13733550,"/fast/home/franz.srambical/jafar/utils/nn.py",10329,0,"\n ",python,content +4225,13733568,"/fast/home/franz.srambical/jafar/utils/nn.py",10338,4,"",python,content +4226,13733733,"/fast/home/franz.srambical/jafar/utils/nn.py",10338,0,"#",python,content +4227,13733733,"/fast/home/franz.srambical/jafar/utils/nn.py",10339,0,"",python,selection_keyboard +4228,13733795,"/fast/home/franz.srambical/jafar/utils/nn.py",10339,0," ",python,content +4229,13733795,"/fast/home/franz.srambical/jafar/utils/nn.py",10340,0,"",python,selection_keyboard +4230,13734233,"/fast/home/franz.srambical/jafar/utils/nn.py",10339,0,"",python,selection_command +4231,13734580,"/fast/home/franz.srambical/jafar/utils/nn.py",10330,11,"",python,content +4232,13734596,"/fast/home/franz.srambical/jafar/utils/nn.py",10338,0,"",python,selection_command +4233,13734829,"/fast/home/franz.srambical/jafar/utils/nn.py",10343,0,"\n ",python,content +4234,13735012,"/fast/home/franz.srambical/jafar/utils/nn.py",10356,0,"#",python,content +4235,13735012,"/fast/home/franz.srambical/jafar/utils/nn.py",10357,0,"",python,selection_keyboard +4236,13735027,"/fast/home/franz.srambical/jafar/utils/nn.py",10357,0," ",python,content +4237,13735028,"/fast/home/franz.srambical/jafar/utils/nn.py",10358,0,"",python,selection_keyboard +4238,13735286,"/fast/home/franz.srambical/jafar/utils/nn.py",10358,0,"F",python,content +4239,13735287,"/fast/home/franz.srambical/jafar/utils/nn.py",10359,0,"",python,selection_keyboard +4240,13735355,"/fast/home/franz.srambical/jafar/utils/nn.py",10359,0,"I",python,content +4241,13735355,"/fast/home/franz.srambical/jafar/utils/nn.py",10360,0,"",python,selection_keyboard +4242,13735443,"/fast/home/franz.srambical/jafar/utils/nn.py",10360,0,"X",python,content +4243,13735443,"/fast/home/franz.srambical/jafar/utils/nn.py",10361,0,"",python,selection_keyboard +4244,13735580,"/fast/home/franz.srambical/jafar/utils/nn.py",10361,0,"M",python,content +4245,13735580,"/fast/home/franz.srambical/jafar/utils/nn.py",10362,0,"",python,selection_keyboard +4246,13735695,"/fast/home/franz.srambical/jafar/utils/nn.py",10362,0,"E",python,content +4247,13735695,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,"",python,selection_keyboard +4248,13735761,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,":",python,content +4249,13735761,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0,"",python,selection_keyboard +4250,13735926,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0," ",python,content +4251,13735927,"/fast/home/franz.srambical/jafar/utils/nn.py",10365,0,"",python,selection_keyboard +4252,13736757,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,1,"",python,content +4253,13736878,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,1,"",python,content +4254,13736910,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0," ",python,content +4255,13736910,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0,"",python,selection_keyboard +4256,13737081,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0,"()",python,content +4257,13737081,"/fast/home/franz.srambical/jafar/utils/nn.py",10365,0,"",python,selection_keyboard +4258,13737268,"/fast/home/franz.srambical/jafar/utils/nn.py",10365,0,"f",python,content +4259,13737268,"/fast/home/franz.srambical/jafar/utils/nn.py",10366,0,"",python,selection_keyboard +4260,13737495,"/fast/home/franz.srambical/jafar/utils/nn.py",10366,0,"s",python,content +4261,13737495,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"",python,selection_keyboard +4262,13737621,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,".",python,content +4263,13737621,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"",python,selection_keyboard +4264,13737865,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,1,"",python,content +4265,13737994,"/fast/home/franz.srambical/jafar/utils/nn.py",10366,1,"",python,content +4266,13738128,"/fast/home/franz.srambical/jafar/utils/nn.py",10366,0,".",python,content +4267,13738129,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"",python,selection_keyboard +4268,13738254,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"s",python,content +4269,13738254,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"",python,selection_keyboard +4270,13738255,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"r",python,content +4271,13738255,"/fast/home/franz.srambical/jafar/utils/nn.py",10369,0,"",python,selection_keyboard +4272,13738412,"/fast/home/franz.srambical/jafar/utils/nn.py",10369,0,"a",python,content +4273,13738412,"/fast/home/franz.srambical/jafar/utils/nn.py",10370,0,"",python,selection_keyboard +4274,13738432,"/fast/home/franz.srambical/jafar/utils/nn.py",10370,0,"m",python,content +4275,13738433,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_keyboard +4276,13738648,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"b",python,content +4277,13738649,"/fast/home/franz.srambical/jafar/utils/nn.py",10372,0,"",python,selection_keyboard +4278,13738820,"/fast/home/franz.srambical/jafar/utils/nn.py",10372,0,"c",python,content +4279,13738820,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"",python,selection_keyboard +4280,13739129,"/fast/home/franz.srambical/jafar/utils/nn.py",10372,1,"",python,content +4281,13739364,"/fast/home/franz.srambical/jafar/utils/nn.py",10372,0,"i",python,content +4282,13739364,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"",python,selection_keyboard +4283,13739413,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"c",python,content +4284,13739413,"/fast/home/franz.srambical/jafar/utils/nn.py",10374,0,"",python,selection_keyboard +4285,13739454,"/fast/home/franz.srambical/jafar/utils/nn.py",10374,0,"a",python,content +4286,13739455,"/fast/home/franz.srambical/jafar/utils/nn.py",10375,0,"",python,selection_keyboard +4287,13739562,"/fast/home/franz.srambical/jafar/utils/nn.py",10375,0,"l",python,content +4288,13739563,"/fast/home/franz.srambical/jafar/utils/nn.py",10376,0,"",python,selection_keyboard +4289,13739737,"/fast/home/franz.srambical/jafar/utils/nn.py",10376,1,")",python,content +4290,13739737,"/fast/home/franz.srambical/jafar/utils/nn.py",10377,0,"",python,selection_keyboard +4291,13739870,"/fast/home/franz.srambical/jafar/utils/nn.py",10377,0,":",python,content +4292,13739870,"/fast/home/franz.srambical/jafar/utils/nn.py",10378,0,"",python,selection_keyboard +4293,13739965,"/fast/home/franz.srambical/jafar/utils/nn.py",10378,0," ",python,content +4294,13739965,"/fast/home/franz.srambical/jafar/utils/nn.py",10379,0,"",python,selection_keyboard +4295,13740233,"/fast/home/franz.srambical/jafar/utils/nn.py",10379,0,"I",python,content +4296,13740233,"/fast/home/franz.srambical/jafar/utils/nn.py",10380,0,"",python,selection_keyboard +4297,13740431,"/fast/home/franz.srambical/jafar/utils/nn.py",10380,0,"n",python,content +4298,13740431,"/fast/home/franz.srambical/jafar/utils/nn.py",10381,0,"",python,selection_keyboard +4299,13740514,"/fast/home/franz.srambical/jafar/utils/nn.py",10381,0,"v",python,content +4300,13740514,"/fast/home/franz.srambical/jafar/utils/nn.py",10382,0,"",python,selection_keyboard +4301,13740599,"/fast/home/franz.srambical/jafar/utils/nn.py",10382,0,"e",python,content +4302,13740599,"/fast/home/franz.srambical/jafar/utils/nn.py",10383,0,"",python,selection_keyboard +4303,13740682,"/fast/home/franz.srambical/jafar/utils/nn.py",10383,0,"s",python,content +4304,13740682,"/fast/home/franz.srambical/jafar/utils/nn.py",10384,0,"",python,selection_keyboard +4305,13740849,"/fast/home/franz.srambical/jafar/utils/nn.py",10384,0,"t",python,content +4306,13740849,"/fast/home/franz.srambical/jafar/utils/nn.py",10385,0,"",python,selection_keyboard +4307,13740953,"/fast/home/franz.srambical/jafar/utils/nn.py",10385,0,"i",python,content +4308,13740954,"/fast/home/franz.srambical/jafar/utils/nn.py",10386,0,"",python,selection_keyboard +4309,13741031,"/fast/home/franz.srambical/jafar/utils/nn.py",10386,0,"g",python,content +4310,13741032,"/fast/home/franz.srambical/jafar/utils/nn.py",10387,0,"",python,selection_keyboard +4311,13741097,"/fast/home/franz.srambical/jafar/utils/nn.py",10387,0,"a",python,content +4312,13741097,"/fast/home/franz.srambical/jafar/utils/nn.py",10388,0,"",python,selection_keyboard +4313,13741198,"/fast/home/franz.srambical/jafar/utils/nn.py",10388,0,"t",python,content +4314,13741198,"/fast/home/franz.srambical/jafar/utils/nn.py",10389,0,"",python,selection_keyboard +4315,13741282,"/fast/home/franz.srambical/jafar/utils/nn.py",10389,0,"e",python,content +4316,13741282,"/fast/home/franz.srambical/jafar/utils/nn.py",10390,0,"",python,selection_keyboard +4317,13741371,"/fast/home/franz.srambical/jafar/utils/nn.py",10390,0," ",python,content +4318,13741372,"/fast/home/franz.srambical/jafar/utils/nn.py",10391,0,"",python,selection_keyboard +4319,13742662,"/fast/home/franz.srambical/jafar/utils/nn.py",10391,0,"w",python,content +4320,13742663,"/fast/home/franz.srambical/jafar/utils/nn.py",10392,0,"",python,selection_keyboard +4321,13742742,"/fast/home/franz.srambical/jafar/utils/nn.py",10392,0,"h",python,content +4322,13742742,"/fast/home/franz.srambical/jafar/utils/nn.py",10393,0,"",python,selection_keyboard +4323,13742833,"/fast/home/franz.srambical/jafar/utils/nn.py",10393,0,"e",python,content +4324,13742834,"/fast/home/franz.srambical/jafar/utils/nn.py",10394,0,"",python,selection_keyboard +4325,13742880,"/fast/home/franz.srambical/jafar/utils/nn.py",10394,0,"t",python,content +4326,13742880,"/fast/home/franz.srambical/jafar/utils/nn.py",10395,0,"",python,selection_keyboard +4327,13742979,"/fast/home/franz.srambical/jafar/utils/nn.py",10395,0,"e",python,content +4328,13742979,"/fast/home/franz.srambical/jafar/utils/nn.py",10396,0,"",python,selection_keyboard +4329,13743064,"/fast/home/franz.srambical/jafar/utils/nn.py",10396,0,"r",python,content +4330,13743064,"/fast/home/franz.srambical/jafar/utils/nn.py",10397,0,"",python,selection_keyboard +4331,13743540,"/fast/home/franz.srambical/jafar/utils/nn.py",10397,0,"/",python,content +4332,13743541,"/fast/home/franz.srambical/jafar/utils/nn.py",10398,0,"",python,selection_keyboard +4333,13743984,"/fast/home/franz.srambical/jafar/utils/nn.py",10397,1,"",python,content +4334,13744146,"/fast/home/franz.srambical/jafar/utils/nn.py",10396,1,"",python,content +4335,13744266,"/fast/home/franz.srambical/jafar/utils/nn.py",10395,1,"",python,content +4336,13744434,"/fast/home/franz.srambical/jafar/utils/nn.py",10395,0,"h",python,content +4337,13744435,"/fast/home/franz.srambical/jafar/utils/nn.py",10396,0,"",python,selection_keyboard +4338,13744532,"/fast/home/franz.srambical/jafar/utils/nn.py",10396,0,"e",python,content +4339,13744532,"/fast/home/franz.srambical/jafar/utils/nn.py",10397,0,"",python,selection_keyboard +4340,13744572,"/fast/home/franz.srambical/jafar/utils/nn.py",10397,0,"r",python,content +4341,13744572,"/fast/home/franz.srambical/jafar/utils/nn.py",10398,0,"",python,selection_keyboard +4342,13744688,"/fast/home/franz.srambical/jafar/utils/nn.py",10398,0,"/",python,content +4343,13744688,"/fast/home/franz.srambical/jafar/utils/nn.py",10399,0,"",python,selection_keyboard +4344,13745161,"/fast/home/franz.srambical/jafar/utils/nn.py",10399,0,"w",python,content +4345,13745162,"/fast/home/franz.srambical/jafar/utils/nn.py",10400,0,"",python,selection_keyboard +4346,13745268,"/fast/home/franz.srambical/jafar/utils/nn.py",10400,0,"h",python,content +4347,13745268,"/fast/home/franz.srambical/jafar/utils/nn.py",10401,0,"",python,selection_keyboard +4348,13745337,"/fast/home/franz.srambical/jafar/utils/nn.py",10401,0,"y",python,content +4349,13745337,"/fast/home/franz.srambical/jafar/utils/nn.py",10402,0,"",python,selection_keyboard +4350,13745465,"/fast/home/franz.srambical/jafar/utils/nn.py",10402,0," ",python,content +4351,13745465,"/fast/home/franz.srambical/jafar/utils/nn.py",10403,0,"",python,selection_keyboard +4352,13745702,"/fast/home/franz.srambical/jafar/utils/nn.py",10403,0,"t",python,content +4353,13745703,"/fast/home/franz.srambical/jafar/utils/nn.py",10404,0,"",python,selection_keyboard +4354,13745769,"/fast/home/franz.srambical/jafar/utils/nn.py",10404,0,"h",python,content +4355,13745770,"/fast/home/franz.srambical/jafar/utils/nn.py",10405,0,"",python,selection_keyboard +4356,13745820,"/fast/home/franz.srambical/jafar/utils/nn.py",10405,0,"i",python,content +4357,13745820,"/fast/home/franz.srambical/jafar/utils/nn.py",10406,0,"",python,selection_keyboard +4358,13745871,"/fast/home/franz.srambical/jafar/utils/nn.py",10406,0,"s",python,content +4359,13745872,"/fast/home/franz.srambical/jafar/utils/nn.py",10407,0,"",python,selection_keyboard +4360,13745952,"/fast/home/franz.srambical/jafar/utils/nn.py",10407,0," ",python,content +4361,13745952,"/fast/home/franz.srambical/jafar/utils/nn.py",10408,0,"",python,selection_keyboard +4362,13746019,"/fast/home/franz.srambical/jafar/utils/nn.py",10408,0,"i",python,content +4363,13746020,"/fast/home/franz.srambical/jafar/utils/nn.py",10409,0,"",python,selection_keyboard +4364,13746070,"/fast/home/franz.srambical/jafar/utils/nn.py",10409,0,"s",python,content +4365,13746070,"/fast/home/franz.srambical/jafar/utils/nn.py",10410,0,"",python,selection_keyboard +4366,13746134,"/fast/home/franz.srambical/jafar/utils/nn.py",10410,0," ",python,content +4367,13746135,"/fast/home/franz.srambical/jafar/utils/nn.py",10411,0,"",python,selection_keyboard +4368,13746219,"/fast/home/franz.srambical/jafar/utils/nn.py",10411,0,"n",python,content +4369,13746220,"/fast/home/franz.srambical/jafar/utils/nn.py",10412,0,"",python,selection_keyboard +4370,13746268,"/fast/home/franz.srambical/jafar/utils/nn.py",10412,0,"e",python,content +4371,13746269,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,0,"",python,selection_keyboard +4372,13746421,"/fast/home/franz.srambical/jafar/utils/nn.py",10413,0,"e",python,content +4373,13746421,"/fast/home/franz.srambical/jafar/utils/nn.py",10414,0,"",python,selection_keyboard +4374,13746610,"/fast/home/franz.srambical/jafar/utils/nn.py",10414,0,"d",python,content +4375,13746610,"/fast/home/franz.srambical/jafar/utils/nn.py",10415,0,"",python,selection_keyboard +4376,13746761,"/fast/home/franz.srambical/jafar/utils/nn.py",10415,0,"d",python,content +4377,13746761,"/fast/home/franz.srambical/jafar/utils/nn.py",10416,0,"",python,selection_keyboard +4378,13747068,"/fast/home/franz.srambical/jafar/utils/nn.py",10416,0,"d",python,content +4379,13747068,"/fast/home/franz.srambical/jafar/utils/nn.py",10417,0,"",python,selection_keyboard +4380,13747351,"/fast/home/franz.srambical/jafar/utils/nn.py",10416,1,"",python,content +4381,13747485,"/fast/home/franz.srambical/jafar/utils/nn.py",10415,1,"",python,content +4382,13747516,"/fast/home/franz.srambical/jafar/utils/nn.py",10415,0,"e",python,content +4383,13747517,"/fast/home/franz.srambical/jafar/utils/nn.py",10416,0,"",python,selection_keyboard +4384,13747671,"/fast/home/franz.srambical/jafar/utils/nn.py",10416,0,"d",python,content +4385,13747672,"/fast/home/franz.srambical/jafar/utils/nn.py",10417,0,"",python,selection_keyboard +4386,13747885,"/fast/home/franz.srambical/jafar/utils/nn.py",10416,0,"",python,selection_command +4387,13750104,"TERMINAL",0,0,"q",,terminal_output +4388,13750202,"TERMINAL",0,0,"u",,terminal_output +4389,13750286,"TERMINAL",0,0,"it",,terminal_output +4390,13750539,"TERMINAL",0,0,"()",,terminal_output +4391,13750781,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 317, in attention_fn\r\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 317, in attention_fn\r\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +4392,13751549,"TERMINAL",0,0,"(Pdb) ",,terminal_output +4393,13751774,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +4394,13752663,"/fast/home/franz.srambical/jafar/utils/nn.py",10342,0,"",python,selection_command +4395,13752910,"/fast/home/franz.srambical/jafar/utils/nn.py",10328,0,"",python,selection_command +4396,13752941,"/fast/home/franz.srambical/jafar/utils/nn.py",10283,0,"",python,selection_command +4397,13752974,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,0,"",python,selection_command +4398,13753005,"/fast/home/franz.srambical/jafar/utils/nn.py",10187,0,"",python,selection_command +4399,13753041,"/fast/home/franz.srambical/jafar/utils/nn.py",10139,0,"",python,selection_command +4400,13753074,"/fast/home/franz.srambical/jafar/utils/nn.py",10066,0,"",python,selection_command +4401,13753108,"/fast/home/franz.srambical/jafar/utils/nn.py",10062,0,"",python,selection_command +4402,13753143,"/fast/home/franz.srambical/jafar/utils/nn.py",9986,0,"",python,selection_command +4403,13753175,"/fast/home/franz.srambical/jafar/utils/nn.py",9901,0,"",python,selection_command +4404,13753209,"/fast/home/franz.srambical/jafar/utils/nn.py",9828,0,"",python,selection_command +4405,13753244,"/fast/home/franz.srambical/jafar/utils/nn.py",9826,0,"",python,selection_command +4406,13753282,"/fast/home/franz.srambical/jafar/utils/nn.py",9783,0,"",python,selection_command +4407,13753316,"/fast/home/franz.srambical/jafar/utils/nn.py",9744,0,"",python,selection_command +4408,13753373,"/fast/home/franz.srambical/jafar/utils/nn.py",9702,0,"",python,selection_command +4409,13753396,"/fast/home/franz.srambical/jafar/utils/nn.py",9700,0,"",python,selection_command +4410,13753423,"/fast/home/franz.srambical/jafar/utils/nn.py",9647,0,"",python,selection_command +4411,13753447,"/fast/home/franz.srambical/jafar/utils/nn.py",9588,0,"",python,selection_command +4412,13753481,"/fast/home/franz.srambical/jafar/utils/nn.py",9550,0,"",python,selection_command +4413,13753517,"/fast/home/franz.srambical/jafar/utils/nn.py",9548,0,"",python,selection_command +4414,13753550,"/fast/home/franz.srambical/jafar/utils/nn.py",9505,0,"",python,selection_command +4415,13753584,"/fast/home/franz.srambical/jafar/utils/nn.py",9469,0,"",python,selection_command +4416,13753624,"/fast/home/franz.srambical/jafar/utils/nn.py",9467,0,"",python,selection_command +4417,13753652,"/fast/home/franz.srambical/jafar/utils/nn.py",9396,0,"",python,selection_command +4418,13753682,"/fast/home/franz.srambical/jafar/utils/nn.py",9376,0,"",python,selection_command +4419,13753719,"/fast/home/franz.srambical/jafar/utils/nn.py",9374,0,"",python,selection_command +4420,13753750,"/fast/home/franz.srambical/jafar/utils/nn.py",9307,0,"",python,selection_command +4421,13753785,"/fast/home/franz.srambical/jafar/utils/nn.py",9281,0,"",python,selection_command +4422,13753927,"/fast/home/franz.srambical/jafar/utils/nn.py",9279,0,"",python,selection_command +4423,13754078,"/fast/home/franz.srambical/jafar/utils/nn.py",9213,0,"",python,selection_command +4424,13754249,"/fast/home/franz.srambical/jafar/utils/nn.py",9188,0,"",python,selection_command +4425,13754527,"/fast/home/franz.srambical/jafar/utils/nn.py",9144,0,"",python,selection_command +4426,13754659,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"",python,selection_command +4427,13754944,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,1,"i",python,selection_command +4428,13755118,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,1,"i",python,selection_command +4429,13755271,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,1,"i",python,selection_command +4430,13755530,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,1,"i",python,selection_command +4431,13755782,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"",python,selection_command +4432,13755909,"/fast/home/franz.srambical/jafar/utils/nn.py",9198,0,"#",python,content +4433,13755909,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"#",python,content +4434,13755909,"/fast/home/franz.srambical/jafar/utils/nn.py",9153,0,"",python,selection_keyboard +4435,13755942,"/fast/home/franz.srambical/jafar/utils/nn.py",9200,0," ",python,content +4436,13755942,"/fast/home/franz.srambical/jafar/utils/nn.py",9153,0," ",python,content +4437,13755942,"/fast/home/franz.srambical/jafar/utils/nn.py",9154,0,"",python,selection_keyboard +4438,13756158,"/fast/home/franz.srambical/jafar/utils/nn.py",9153,0,"",python,selection_command +4439,13757456,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +4440,13757743,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4441,13769431,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4442,13777721,"TERMINAL",0,0,"2025-07-27 13:08:17.525138: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4443,13779225,"TERMINAL",0,0,"2025-07-27 13:08:19.027879: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4444,13782796,"TERMINAL",0,0,"2025-07-27 13:08:22.590493: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4445,13783588,"TERMINAL",0,0,"DEBUG: mask.shape (1, 921, 1, 1, 1)\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 333, in attention_fn\r\n output_4d = jax.nn.dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 1152, in dot_product_attention\r\n _check_shape_and_dtype(mask, [-1] * 4, jnp.bool_, 'mask')\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 1144, in _check_shape_and_dtype\r\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\r\nValueError: mask dtype should be , but got float32\r\n",,terminal_output +4446,13784640,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +4447,13792294,"/fast/home/franz.srambical/jafar/utils/nn.py",10308,0,"",python,selection_mouse +4448,13799831,"/fast/home/franz.srambical/jafar/utils/nn.py",10288,0,"\n ",python,content +4449,13802464,"/fast/home/franz.srambical/jafar/utils/nn.py",10289,12,"",python,content +4450,13803324,"/fast/home/franz.srambical/jafar/utils/nn.py",10289,1,"",python,content +4451,13803324,"/fast/home/franz.srambical/jafar/utils/nn.py",10301,0,"",python,selection_command +4452,13806260,"/fast/home/franz.srambical/jafar/utils/nn.py",10258,0,"",python,selection_command +4453,13806491,"/fast/home/franz.srambical/jafar/utils/nn.py",10205,0,"",python,selection_command +4454,13806517,"/fast/home/franz.srambical/jafar/utils/nn.py",10176,0,"",python,selection_command +4455,13806550,"/fast/home/franz.srambical/jafar/utils/nn.py",10083,0,"",python,selection_command +4456,13806583,"/fast/home/franz.srambical/jafar/utils/nn.py",10070,0,"",python,selection_command +4457,13806617,"/fast/home/franz.srambical/jafar/utils/nn.py",10006,0,"",python,selection_command +4458,13806656,"/fast/home/franz.srambical/jafar/utils/nn.py",9930,0,"",python,selection_command +4459,13807324,"/fast/home/franz.srambical/jafar/utils/nn.py",9845,0,"",python,selection_command +4460,13808061,"/fast/home/franz.srambical/jafar/utils/nn.py",9930,0,"",python,selection_command +4461,13808286,"/fast/home/franz.srambical/jafar/utils/nn.py",10006,0,"",python,selection_command +4462,13808318,"/fast/home/franz.srambical/jafar/utils/nn.py",10070,0,"",python,selection_command +4463,13808344,"/fast/home/franz.srambical/jafar/utils/nn.py",10083,0,"",python,selection_command +4464,13808387,"/fast/home/franz.srambical/jafar/utils/nn.py",10176,0,"",python,selection_command +4465,13808429,"/fast/home/franz.srambical/jafar/utils/nn.py",10205,0,"",python,selection_command +4466,13808768,"/fast/home/franz.srambical/jafar/utils/nn.py",10258,0,"",python,selection_command +4467,13808956,"/fast/home/franz.srambical/jafar/utils/nn.py",10301,0,"",python,selection_command +4468,13811834,"/fast/home/franz.srambical/jafar/utils/nn.py",10333,0,"\n ",python,content +4469,13813030,"/fast/home/franz.srambical/jafar/utils/nn.py",10346,0,"m",python,content +4470,13813030,"/fast/home/franz.srambical/jafar/utils/nn.py",10347,0,"",python,selection_keyboard +4471,13813110,"/fast/home/franz.srambical/jafar/utils/nn.py",10347,0,"a",python,content +4472,13813111,"/fast/home/franz.srambical/jafar/utils/nn.py",10348,0,"",python,selection_keyboard +4473,13813158,"/fast/home/franz.srambical/jafar/utils/nn.py",10348,0,"s",python,content +4474,13813159,"/fast/home/franz.srambical/jafar/utils/nn.py",10349,0,"",python,selection_keyboard +4475,13813219,"/fast/home/franz.srambical/jafar/utils/nn.py",10349,0,"k",python,content +4476,13813220,"/fast/home/franz.srambical/jafar/utils/nn.py",10350,0,"",python,selection_keyboard +4477,13814349,"/fast/home/franz.srambical/jafar/utils/nn.py",10350,0,"_",python,content +4478,13814350,"/fast/home/franz.srambical/jafar/utils/nn.py",10351,0,"",python,selection_keyboard +4479,13814596,"/fast/home/franz.srambical/jafar/utils/nn.py",10351,0,"4",python,content +4480,13814596,"/fast/home/franz.srambical/jafar/utils/nn.py",10352,0,"",python,selection_keyboard +4481,13814876,"/fast/home/franz.srambical/jafar/utils/nn.py",10352,0,"d",python,content +4482,13814876,"/fast/home/franz.srambical/jafar/utils/nn.py",10353,0,"",python,selection_keyboard +4483,13815238,"/fast/home/franz.srambical/jafar/utils/nn.py",10353,0," ",python,content +4484,13815238,"/fast/home/franz.srambical/jafar/utils/nn.py",10354,0,"",python,selection_keyboard +4485,13815397,"/fast/home/franz.srambical/jafar/utils/nn.py",10354,0,"=",python,content +4486,13815397,"/fast/home/franz.srambical/jafar/utils/nn.py",10355,0,"",python,selection_keyboard +4487,13815530,"/fast/home/franz.srambical/jafar/utils/nn.py",10355,0," ",python,content +4488,13815530,"/fast/home/franz.srambical/jafar/utils/nn.py",10356,0,"",python,selection_keyboard +4489,13815680,"/fast/home/franz.srambical/jafar/utils/nn.py",10356,0,"m",python,content +4490,13815680,"/fast/home/franz.srambical/jafar/utils/nn.py",10357,0,"",python,selection_keyboard +4491,13815760,"/fast/home/franz.srambical/jafar/utils/nn.py",10357,0,"a",python,content +4492,13815760,"/fast/home/franz.srambical/jafar/utils/nn.py",10358,0,"",python,selection_keyboard +4493,13815827,"/fast/home/franz.srambical/jafar/utils/nn.py",10358,0,"s",python,content +4494,13815828,"/fast/home/franz.srambical/jafar/utils/nn.py",10359,0,"",python,selection_keyboard +4495,13815909,"/fast/home/franz.srambical/jafar/utils/nn.py",10359,0,"k",python,content +4496,13815910,"/fast/home/franz.srambical/jafar/utils/nn.py",10360,0,"",python,selection_keyboard +4497,13816509,"/fast/home/franz.srambical/jafar/utils/nn.py",10360,0,"4",python,content +4498,13816510,"/fast/home/franz.srambical/jafar/utils/nn.py",10361,0,"",python,selection_keyboard +4499,13817000,"/fast/home/franz.srambical/jafar/utils/nn.py",10360,1,"",python,content +4500,13817193,"/fast/home/franz.srambical/jafar/utils/nn.py",10360,0,"_",python,content +4501,13817194,"/fast/home/franz.srambical/jafar/utils/nn.py",10361,0,"",python,selection_keyboard +4502,13817343,"/fast/home/franz.srambical/jafar/utils/nn.py",10361,0,"4",python,content +4503,13817343,"/fast/home/franz.srambical/jafar/utils/nn.py",10362,0,"",python,selection_keyboard +4504,13817545,"/fast/home/franz.srambical/jafar/utils/nn.py",10362,0,"d",python,content +4505,13817545,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,"",python,selection_keyboard +4506,13817666,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,".",python,content +4507,13817666,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0,"",python,selection_keyboard +4508,13818686,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,1,"",python,content +4509,13819113,"/fast/home/franz.srambical/jafar/utils/nn.py",10362,0,"",python,selection_command +4510,13819331,"/fast/home/franz.srambical/jafar/utils/nn.py",10356,0,"",python,selection_command +4511,13820337,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,"",python,selection_command +4512,13820734,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,"a",python,content +4513,13820734,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0,"",python,selection_keyboard +4514,13821021,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,1,"",python,content +4515,13821184,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,".",python,content +4516,13821184,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0,"",python,selection_keyboard +4517,13821229,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0,"a",python,content +4518,13821229,"/fast/home/franz.srambical/jafar/utils/nn.py",10365,0,"",python,selection_keyboard +4519,13821305,"/fast/home/franz.srambical/jafar/utils/nn.py",10365,0,"s",python,content +4520,13821305,"/fast/home/franz.srambical/jafar/utils/nn.py",10366,0,"",python,selection_keyboard +4521,13822142,"/fast/home/franz.srambical/jafar/utils/nn.py",10366,0,"type(jnp.bool_)",python,content +4522,13822350,"/fast/home/franz.srambical/jafar/utils/nn.py",10380,0,"",python,selection_command +4523,13822830,"/fast/home/franz.srambical/jafar/utils/nn.py",10334,0,"",python,selection_command +4524,13824763,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +4525,13825030,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4526,13831406,"/fast/home/franz.srambical/jafar/utils/nn.py",10289,0,"",python,selection_command +4527,13831542,"/fast/home/franz.srambical/jafar/utils/nn.py",10246,0,"",python,selection_command +4528,13831681,"/fast/home/franz.srambical/jafar/utils/nn.py",10193,0,"",python,selection_command +4529,13832027,"/fast/home/franz.srambical/jafar/utils/nn.py",10193,53,"",python,content +4530,13832037,"/fast/home/franz.srambical/jafar/utils/nn.py",10205,0,"",python,selection_command +4531,13836744,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4532,13845186,"TERMINAL",0,0,"2025-07-27 13:09:24.987164: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4533,13846734,"TERMINAL",0,0,"2025-07-27 13:09:26.540652: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4534,13850418,"TERMINAL",0,0,"2025-07-27 13:09:30.220177: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4535,13851255,"TERMINAL",0,0,"DEBUG: mask.shape (1, 921, 1, 1, 1)\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 334, in attention_fn\r\n query=query_4d,\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 1204, in dot_product_attention\r\n out = cudnn_dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py"", line 1981, in dot_product_attention\r\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py"", line 332, in check_layout\r\n raise ValueError(\r\nValueError: Bias must have same seq length as QKV, got 1 and 1\r\n",,terminal_output +4536,13852343,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +4537,13861824,"/fast/home/franz.srambical/jafar/utils/nn.py",10707,0,"",python,selection_command +4538,13868740,"/fast/home/franz.srambical/jafar/utils/nn.py",10806,0,"",python,selection_mouse +4539,13876784,"/fast/home/franz.srambical/jafar/utils/nn.py",10514,0,"",python,selection_mouse +4540,13915175,"/fast/home/franz.srambical/jafar/utils/nn.py",10342,0,"",python,selection_mouse +4541,13915177,"/fast/home/franz.srambical/jafar/utils/nn.py",10341,0,"",python,selection_command +4542,14340766,"/fast/home/franz.srambical/jafar/utils/nn.py",10355,0,"",python,selection_command +4543,14341061,"/fast/home/franz.srambical/jafar/utils/nn.py",10429,0,"",python,selection_command +4544,14341217,"/fast/home/franz.srambical/jafar/utils/nn.py",10514,0,"",python,selection_command +4545,14341353,"/fast/home/franz.srambical/jafar/utils/nn.py",10527,0,"",python,selection_command +4546,14347273,"/fast/home/franz.srambical/jafar/utils/nn.py",9325,0,"",python,selection_command +4547,14348402,"/fast/home/franz.srambical/jafar/utils/nn.py",8793,0,"",python,selection_keyboard +4548,14348642,"/fast/home/franz.srambical/jafar/utils/nn.py",8708,0,"",python,selection_command +4549,14348901,"/fast/home/franz.srambical/jafar/utils/nn.py",8793,0,"",python,selection_command +4550,14349157,"/fast/home/franz.srambical/jafar/utils/nn.py",8891,0,"",python,selection_command +4551,14349234,"/fast/home/franz.srambical/jafar/utils/nn.py",8931,0,"",python,selection_command +4552,14349398,"/fast/home/franz.srambical/jafar/utils/nn.py",8935,0,"",python,selection_command +4553,14349536,"/fast/home/franz.srambical/jafar/utils/nn.py",8940,0,"",python,selection_command +4554,14349671,"/fast/home/franz.srambical/jafar/utils/nn.py",8944,0,"",python,selection_command +4555,14349917,"/fast/home/franz.srambical/jafar/utils/nn.py",8956,0,"",python,selection_command +4556,14350139,"/fast/home/franz.srambical/jafar/utils/nn.py",8944,0,"",python,selection_command +4557,14352542,"/fast/home/franz.srambical/jafar/utils/nn.py",11016,0,"",python,selection_command +4558,14358326,"/fast/home/franz.srambical/jafar/utils/nn.py",10747,0,"",python,selection_command +4559,14358661,"/fast/home/franz.srambical/jafar/utils/nn.py",10079,0,"",python,selection_command +4560,14358863,"/fast/home/franz.srambical/jafar/utils/nn.py",9294,0,"",python,selection_command +4561,14359061,"/fast/home/franz.srambical/jafar/utils/nn.py",8107,0,"",python,selection_command +4562,14361495,"/fast/home/franz.srambical/jafar/utils/nn.py",2614,0,"",python,selection_command +4563,14369102,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +4564,14369102,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +4565,14371556,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +4566,14371557,"/fast/home/franz.srambical/jafar/utils/nn.py",2614,0,"",python,selection_command +4567,14371858,"/fast/home/franz.srambical/jafar/utils/nn.py",11029,0,"",python,selection_command +4568,14372298,"/fast/home/franz.srambical/jafar/utils/nn.py",11005,0,"",python,selection_command +4569,14372556,"/fast/home/franz.srambical/jafar/utils/nn.py",11004,0,"",python,selection_command +4570,14372626,"/fast/home/franz.srambical/jafar/utils/nn.py",10925,0,"",python,selection_command +4571,14372626,"/fast/home/franz.srambical/jafar/utils/nn.py",10915,0,"",python,selection_command +4572,14372658,"/fast/home/franz.srambical/jafar/utils/nn.py",10882,0,"",python,selection_command +4573,14372687,"/fast/home/franz.srambical/jafar/utils/nn.py",10839,0,"",python,selection_command +4574,14372710,"/fast/home/franz.srambical/jafar/utils/nn.py",10813,0,"",python,selection_command +4575,14372744,"/fast/home/franz.srambical/jafar/utils/nn.py",10787,0,"",python,selection_command +4576,14372777,"/fast/home/franz.srambical/jafar/utils/nn.py",10759,0,"",python,selection_command +4577,14372811,"/fast/home/franz.srambical/jafar/utils/nn.py",10735,0,"",python,selection_command +4578,14372842,"/fast/home/franz.srambical/jafar/utils/nn.py",10707,0,"",python,selection_command +4579,14372878,"/fast/home/franz.srambical/jafar/utils/nn.py",10657,0,"",python,selection_command +4580,14372913,"/fast/home/franz.srambical/jafar/utils/nn.py",10587,0,"",python,selection_command +4581,14372945,"/fast/home/franz.srambical/jafar/utils/nn.py",10586,0,"",python,selection_command +4582,14372980,"/fast/home/franz.srambical/jafar/utils/nn.py",10515,0,"",python,selection_command +4583,14388450,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"# Copyright 2024 The JAX Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport enum\nimport functools\nimport json\nimport math\nfrom typing import TypedDict\n\nimport jax\nfrom jax import dtypes\nfrom jax._src import core\nfrom jax._src import dispatch\nfrom jax._src.custom_partitioning import custom_partitioning\nfrom jax._src.interpreters import batching\nfrom jax._src.interpreters import mlir\nfrom jax._src.lib import cuda_versions\nfrom jax._src import xla_bridge\nfrom jax._src.lib.mlir import ir\nfrom jax._src.lib.mlir.dialects import hlo\nimport jax.numpy as jnp\nfrom jax.sharding import NamedSharding, PartitionSpec\n\nArray = jnp.ndarray\n\nclass FP8Params(TypedDict):\n amax_dQ: float # Amax of gradient of query\n amax_dK: float # Amax of gradient of key\n amax_dV: float # Amax of gradient of value\n amax_dP: float # Amax of gradient of state\n descale_q: float # Descaling factor of query\n descale_k: float # Descaling factor of key\n descale_v: float # Descaling factor of value\n descale_s: float # Descaling factor of attention score\n scale_s: float # Scale factor for S tensor\n scale_o: float # Scale factor for output\n descale_o: float # Descale factor for output (bwd)\n descale_dO: float # Descale factor for output gradient (bwd)\n descale_dP: float # Descale factor for P gradient tensor (bwd)\n scale_dQ: float # Scale factor for query gradient (bwd)\n scale_dK: float # Scale factor for key gradient (bwd)\n scale_dV: float # Scale factor for value gradient (bwd)\n scale_dP: float # Scale factor for state gradient (bwd)\n\n\nclass AttentionLayout(enum.Enum):\n BTNH = 0\n BNTH = 1\n\n\nclass MaskType(enum.Enum):\n NO_MASK = 0\n PADDING = 1\n CAUSAL = 2\n PADDING_CAUSAL = 3\n ALIBI = 4\n\n\ndef convert_mask_type_to_string(mask_type: MaskType) -> str:\n if mask_type == MaskType.NO_MASK:\n return ""NO_MASK""\n elif mask_type == MaskType.PADDING:\n return ""PADDING""\n elif mask_type == MaskType.CAUSAL:\n return ""CAUSAL""\n elif mask_type == MaskType.PADDING_CAUSAL:\n return ""PADDING_CAUSAL""\n elif mask_type == MaskType.ALIBI:\n return ""ALIBI""\n else:\n raise ValueError(f""Unexpected mask type: {mask_type}"")\n\ndef has_padding(mask_type: MaskType) -> bool:\n return mask_type == MaskType.PADDING or mask_type == MaskType.PADDING_CAUSAL\n\ndef should_export_dbias(bias_shape, query_shape, layout) -> bool:\n b_B, b_N, _, _ = bias_shape\n if layout == AttentionLayout.BNTH.value:\n _, q_N, _, _ = query_shape\n else:\n _, _, q_N, _ = query_shape\n return b_B == 1 and b_N == q_N\n\ndef get_large_negative_number(dtype):\n # temp WAR as cuDNN has a bug for subtraction between two large negative value\n if dtype == jnp.bfloat16:\n return jnp.asarray(-2 << 40, dtype=dtype)\n elif dtype == jnp.float16:\n return jnp.asarray(-2 << 14, dtype=dtype)\n else:\n raise ValueError(""Unsupported dtype for inputs."")\n\ndef _normalize_layout(layout: str) -> AttentionLayout:\n layout_upper = layout.upper()\n if layout_upper in [""BSNH"", ""BNSH"", ""BTNH"", ""BNTH""]:\n return AttentionLayout[layout_upper.replace(""S"", ""T"")]\n else:\n raise ValueError(f""Unsupported qkv_layout: {layout}"")\n\ndef element_type_to_backend_config_type_mapping(dtype):\n _element_type_to_backend_config_type_mapping = {\n ir.BF16Type.get(): ""BF16"",\n ir.F16Type.get(): ""F16"",\n }\n return _element_type_to_backend_config_type_mapping[dtype]\n\ndef default_layouts(*shapes):\n return [range(len(shape) - 1, -1, -1) for shape in shapes]\n\ndef get_max_seg_per_batch(q_offsets):\n return q_offsets.shape[1] - 1 if len(q_offsets.shape) == 2 else 1\n\ndef check_is_paged_attention(page_table_k):\n return len(page_table_k.shape) == 4\n\ndef create_dot_product_attention_backend_config_base(\n batch, num_heads, seq_q, seq_kv, dtype, fmha_scale, mask_type, layout, is_bwd\n):\n # Q, K, V: query, key, value in shape of BT(S)NH or BNT(S)H\n # P: BMM1 output in shape of BNTS\n # O: BMM2 output in the same shape with Q\n # BMM1: Q @ K -> P\n # BMM2: P @ V -> O\n # BMM1Grad1: dP @ Q -> dK\n # BMM1Grad2: dP @ K -> dQ\n # BMM2Grad1: P @ dO -> dV\n # BMM2Grad2: dO @ V -> dP\n cudnn_fmha_backend_config = {\n ""algorithm"": {\n ""algo_id"": ""0"",\n ""math_type"": ""TENSOR_OP_MATH"",\n ""tuning_knobs"": {""17"": ""1"", ""24"": ""0""},\n ""is_cudnn_frontend"": True,\n ""workspace_size"": ""0"",\n },\n ""fmha_scale"": fmha_scale,\n ""intermediate_tensor_shape"": {\n ""element_type"": element_type_to_backend_config_type_mapping(dtype),\n ""dimensions"": [str(batch), str(num_heads), str(seq_q), str(seq_kv)],\n ""tuple_shapes"": [],\n ""layout"": {\n ""dim_level_types"": [],\n ""dim_unique"": [],\n ""dim_ordered"": [],\n ""minor_to_major"": [""3"", ""2"", ""1"", ""0""],\n ""tiles"": [],\n ""element_size_in_bits"": ""0"",\n ""memory_space"": ""0"",\n ""index_primitive_type"": ""PRIMITIVE_TYPE_INVALID"",\n ""pointer_primitive_type"": ""PRIMITIVE_TYPE_INVALID"",\n ""dynamic_shape_metadata_prefix_bytes"": ""0"",\n },\n ""is_dynamic_dimension"": [False, False, False, False],\n },\n ""is_flash_attention"": True,\n ""mask_type"": convert_mask_type_to_string(mask_type),\n }\n\n # We define the contracting and batch dims in the format of\n # ((lhs_contracting_dims, rhs_contracting_dims), (lhs_batch_dims,\n # rhs_batch_dims)).\n if layout == AttentionLayout.BNTH.value:\n dims = [\n ((3, 3), ((0, 1), (0, 1))), # BMM1: BNTH,BNSH->BNTS\n ((3, 2), ((0, 1), (0, 1))), # BMM2: BNTS,BNSH->BNTH\n ((2, 2), ((0, 1), (0, 1))), # BMM1_grad_1: BNTS,BNTH->BNSH\n ((3, 2), ((0, 1), (0, 1))), # BMM1_grad_2: BNTS,BNSH->BNTH\n ((2, 2), ((0, 1), (0, 1))), # BMM2_grad_1: BNTS,BNTH->BNSH\n ((3, 3), ((0, 1), (0, 1))), # BMM2_grad_2: BNTH,BNSH->BNTS\n ]\n else:\n dims = [\n ((3, 3), ((0, 2), (0, 2))), # BMM1: BTNH,BSNH->BNTS\n ((3, 1), ((0, 1), (0, 2))), # BMM2: BNTS,BSNH->BTNH\n ((2, 1), ((0, 1), (0, 2))), # BMM1_grad_1: BNTS,BTNH->BSNH\n ((3, 1), ((0, 1), (0, 2))), # BMM1_grad_2: BNTS,BSNH->BTNH\n ((2, 1), ((0, 1), (0, 2))), # BMM2_grad_1: BNTS,BTNH->BSNH\n ((3, 3), ((0, 2), (0, 2))), # BMM2_grad_2: BTNH,BSNH->BNTS\n ]\n keys = [\n ""bmm1_dot_dimension_numbers"",\n ""bmm2_dot_dimension_numbers"",\n ""bmm1_grad_gemm1_dot_dimension_numbers"",\n ""bmm1_grad_gemm2_dot_dimension_numbers"",\n ""bmm2_grad_gemm1_dot_dimension_numbers"",\n ""bmm2_grad_gemm2_dot_dimension_numbers"",\n ]\n fwd_dot_number = {}\n bwd_dot_number = {}\n for idx, (key, ((lc, rc), (lb, rb))) in enumerate(zip(keys, dims)):\n dims_to_write = fwd_dot_number if idx < 2 else bwd_dot_number\n dims_to_write[key] = {\n ""lhs_contracting_dimensions"": [str(lc)],\n ""rhs_contracting_dimensions"": [str(rc)],\n ""lhs_batch_dimensions"": [str(i) for i in lb],\n ""rhs_batch_dimensions"": [str(i) for i in rb],\n }\n\n if is_bwd:\n cudnn_fmha_backend_config = {**cudnn_fmha_backend_config, **bwd_dot_number}\n else:\n cudnn_fmha_backend_config = {**cudnn_fmha_backend_config, **fwd_dot_number}\n backend_config = {\n ""operation_queue_id"":""0"",\n ""wait_on_operation_queues"":[],\n ""cudnn_fmha_backend_config"": cudnn_fmha_backend_config\n }\n return backend_config\n\ndef create_dot_product_attention_backend_config(\n batch,\n num_heads,\n seq_q,\n seq_kv,\n dtype,\n fmha_scale,\n seed,\n dropout_rate,\n mask_type,\n layout,\n sliding_window_length,\n max_seg_per_batch,\n is_paged_attention,\n is_bwd\n):\n backend_config = create_dot_product_attention_backend_config_base(\n batch, num_heads, seq_q, seq_kv, dtype,\n fmha_scale, mask_type, layout, is_bwd\n )\n if sliding_window_length is None:\n sliding_window_length = 0\n backend_config['cudnn_fmha_backend_config'][""dropout_rate""] = dropout_rate\n backend_config['cudnn_fmha_backend_config'][""seed""] = seed\n backend_config['cudnn_fmha_backend_config'][""sliding_window_length""] = sliding_window_length\n backend_config['cudnn_fmha_backend_config'][""max_seg_per_batch""] = max_seg_per_batch\n backend_config['cudnn_fmha_backend_config'][""is_paged_attention""] = is_paged_attention\n return json.dumps(backend_config)\n\ndef create_dot_product_attention_fp8_backend_config(\n batch, num_heads, seq_q, seq_kv, dtype, fmha_scale, mask_type, layout, is_bwd):\n backend_config = create_dot_product_attention_backend_config_base(\n batch, num_heads, seq_q, seq_kv, dtype, fmha_scale, mask_type, layout, is_bwd)\n return json.dumps(backend_config)\n\n# mapping from (is_bwd, has_dropout, has_bias) to custom call name\n_custom_name_maps = {\n # fMHA forward call targets.\n (False, False, False, False): ""__cudnn$fmhaSoftmax"",\n (False, False, True, False): ""__cudnn$fmhaScaleBiasSoftmax"",\n (False, True, False, False): ""__cudnn$fmhaSoftmaxDropout"",\n (False, True, True, False): ""__cudnn$fmhaScaleBiasSoftmaxDropout"",\n (False, False, False, True): ""__cudnn$fmhaSoftmaxF8"",\n # fMHA backward call targets.\n (True, False, False, False): ""__cudnn$fmhaSoftmaxBackward"",\n (True, False, True, False): ""__cudnn$fmhaScaleBiasSoftmaxBackward"",\n (True, True, False, False): ""__cudnn$fmhaSoftmaxDropoutBackward"",\n (True, True, True, False): ""__cudnn$fmhaScaleBiasSoftmaxDropoutBackward"",\n (True, False, False, True): ""__cudnn$fmhaSoftmaxBackwardF8"",\n}\n\ndef get_custom_call_name(has_bias, has_dropout, is_bwd, is_fp8=False):\n return _custom_name_maps[(is_bwd, has_dropout, has_bias, is_fp8)]\n\nget_fp8_custom_call_name = functools.partial(\n get_custom_call_name, has_bias=False, has_dropout=False, is_fp8=True\n)\n\ndef check_layout(query, key, value, bias, q_seqlen, kv_seqlen,\n q_offsets, kv_offsets, page_table_k, page_table_v, layout):\n def check_eq(a, b, c, msg):\n if not (a == b == c):\n raise ValueError(f""{msg} must be same, got {a}, {b}, {b}"")\n\n q_rank, k_rank, v_rank = len(query.shape), len(key.shape), len(value.shape)\n if q_rank != 4:\n raise ValueError(f""Q must have a rank of 4, got {q_rank}"")\n check_eq(q_rank, k_rank, v_rank, ""QKV rank"")\n\n q_dtype, k_dtype, v_dtype = query.dtype, key.dtype, value.dtype\n if q_dtype not in [jnp.bfloat16, jnp.float16, jnp.float8_e4m3fn, jnp.float8_e5m2]:\n raise NotImplementedError(f""Q must be fp16/bf16/fp8_e4m3fn/fp8_e5m2, got {q_dtype}"")\n check_eq(q_dtype, k_dtype, v_dtype, ""QKV dtype"")\n\n if layout == AttentionLayout.BNTH:\n qB, qN, qT, qH = query.shape\n kB, kN, kS, kH = key.shape\n vB, vN, vS, vH = value.shape\n else:\n assert layout == AttentionLayout.BTNH\n qB, qT, qN, qH = query.shape\n kB, kS, kN, kH = key.shape\n vB, vS, vN, vH = value.shape\n\n if page_table_k is not None and page_table_v is not None:\n k_blocks, k_block_size = kB, kS\n v_blocks, v_block_size = vB, vS\n kB, _, k_blocks_per_batch, _ = page_table_k.shape\n vB, _, v_blocks_per_batch, _ = page_table_v.shape\n kS = k_blocks_per_batch * k_block_size\n vS = v_blocks_per_batch * v_block_size\n if kB * k_blocks_per_batch != k_blocks:\n raise ValueError(\n f""Key and page_table_k must have same number of blocks, ""\n f""got {k_blocks} vs {kB * k_blocks_per_batch}"")\n if vB * v_blocks_per_batch != v_blocks:\n raise ValueError(\n f""Value and page_table_v must have same number of blocks, ""\n f""got {v_blocks} vs {vB * v_blocks_per_batch}"")\n\n check_eq(qB, kB, vB, ""QKV batch"")\n check_eq(qH, kH, vH, ""QKV dim_per_head"")\n if kN != vN:\n raise ValueError(f""KV must have same number of heads, got {kN} vs {vN}"")\n if kS != vS:\n raise ValueError(f""KV must have same seq length, got {kS} vs {vS}"")\n\n # check bias\n if bias is not None:\n _, _, bT, bS = bias.shape\n if bT != qT or bS != vS:\n raise ValueError(\n f""Bias must have same seq length as QKV, got {bT} and {bS}"")\n\n # check q_seqlen/kv_seqlen/q_offsets/kv_offsets\n expected_rank = 2 if q_offsets is not None else 1\n def check_seqlen_offsets(tensor, name):\n if tensor is not None:\n dtype = tensor.dtype\n rank = len(tensor.shape)\n if dtype != jnp.int32:\n raise ValueError(f""{name} must have int32 datatype, got {dtype}"")\n if rank != expected_rank:\n raise ValueError(f""{name} must have a rank of {expected_rank}, got {rank}"")\n b = tensor.shape[0]\n if b != qB:\n raise ValueError(f""{name} must have same batch as Q, got {b}"")\n\n check_seqlen_offsets(q_seqlen, ""q_seqlen"")\n check_seqlen_offsets(kv_seqlen, ""kv_seqlen"")\n check_seqlen_offsets(q_offsets, ""q_offsets"")\n check_seqlen_offsets(kv_offsets, ""kv_offsets"")\n\n\ndef check_is_flash_attention(\n query, key, layout: int, cudnn_version, has_bias, is_training, is_packed=False,\n is_paged_attention=False, is_fp8=False):\n # Extract sequence length (T) and head dim (H) based on layout\n if layout == AttentionLayout.BNTH.value:\n _, _, T, H = query.shape\n _, _, S, _ = key.shape\n else:\n _, T, _, H = query.shape\n _, S, _, _ = key.shape\n\n # Flash attention conditions\n if is_fp8:\n # FP8 specific conditions\n if not ((is_training and H == 128 and T % 128 == 0 and S % 128 == 0) or\n (not is_training and H <= 256 and H % 16 == 0)):\n raise NotImplementedError(\n f""Unsupported sequence length Q {T}, KV {S} and head dim {H} for FP8.""\n )\n else:\n # bf16/fp16 attention conditions\n # Check the head dim.\n is_on_hopper = is_cuda_compute_capability_equal(""9.0"")\n H_max = 256 if cudnn_version >= 90500 and is_on_hopper else 128\n if not (H <= H_max and H % 8 == 0):\n raise NotImplementedError(\n f""The head dim must be <= {H_max} and a multiple of 8, ""\n f""but got {H}.""\n )\n\n # Check patterns with bias, seqlen should be divisible by 2\n if (is_training and has_bias and (T % 2 != 0 or S % 2 != 0)):\n raise NotImplementedError(\n f""Unsupported sequence length Q {T}, KV {S}.""\n )\n\n if is_packed and (cudnn_version < 90600 or not check_compute_capability(""9.0"")):\n raise NotImplementedError(\n ""Packed layout requires cudnn version >= 9.6 and at least hopper arch."")\n if is_paged_attention and cudnn_version < 90500:\n raise NotImplementedError(""Page attention requires cudnn version >= 9.5."")\n\ndef check_cudnn_version():\n # check if cuDNN is installed\n if cuda_versions is None:\n raise RuntimeError(""cuDNN is not detected."")\n return cuda_versions.cudnn_get_version()\n\ndef check_compute_capability(capability):\n if not 'cuda' in xla_bridge.get_backend().platform_version:\n return False\n d, *_ = jax.local_devices(backend=""gpu"")\n target = tuple(int(x) for x in capability.split("".""))\n current = tuple(int(x) for x in d.compute_capability.split("".""))\n return current >= target\n\ndef is_cuda_compute_capability_equal(capability):\n if not 'cuda' in xla_bridge.get_backend().platform_version:\n return False\n d, *_ = jax.local_devices(backend=""gpu"")\n target = tuple(int(x) for x in capability.split("".""))\n current = tuple(int(x) for x in d.compute_capability.split("".""))\n return current == target\n\ndef _dot_product_attention_fwd(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v,\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, cudnn_version, return_residual):\n # check if flash attention is supported for this attention pattern\n check_is_flash_attention(\n query, key, layout, cudnn_version, bias is not None, False,\n get_max_seg_per_batch(q_offsets) > 1, check_is_paged_attention(page_table_k))\n outputs = _dot_product_attention_fwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=False or return_residual)\n if return_residual:\n return tuple(outputs)\n else:\n return outputs[0]\n\ndef _dot_product_attention_fwd_rule(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, cudnn_version,\n return_residual):\n # check if flash attention is supported for this attention pattern\n check_is_flash_attention(\n query, key, layout, cudnn_version, bias is not None, True,\n get_max_seg_per_batch(q_offsets) > 1)\n outputs = _dot_product_attention_fwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=True)\n res = (query, key, value, bias, q_seqlen, kv_seqlen, q_offsets,\n kv_offsets, page_table_k, page_table_v, outputs[1], outputs[0])\n if return_residual:\n return tuple(outputs), res\n else:\n return outputs[0], res\n\ndef _dot_product_attention_bwd_rule(\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, is_training, return_residual, res, grad_output):\n (query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output) = res\n if return_residual:\n grad_output = grad_output[0]\n grads = _dot_product_attention_bwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale=scale, seed=seed, dropout_rate=dropout_rate, variadic_args=variadic_args,\n mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length\n )\n grads = (*grads,) + (None,) * (10 - len(grads))\n return grads\n\ndef _fix_seqlen_offsets(q_seqlen, kv_seqlen, q_offsets, kv_offsets, query, key):\n # fix seqlen and offsets to what cuDNN expects in sequence packing.\n # cuDNN expects seqlen to have shape [S] where S is the total number of segments\n # while the SDPA API accetps seqlen with shape [B, M] where B is the batch and M\n # is the maximum number of segments of one batch. B x M is larger than S and seqlen\n # is filled with -1 for padded regions. Therefore, we need to shift all non negative\n # values to left side to form a correct seqlen. Similar layout is required for\n # offsets tensors.\n # cuDNN expects offsets to have offset for each segment starting from first segment\n # while SDPA API accetps offsets to have offset for each segment starting from\n # current batch, therefore we need to calculate accumulative offset of each segment\n # starting from first segment.\n def _shift_to_left(x, fill_value):\n # shift any non-negative value to left\n # [[1, 3, -1, -1], [2, 3, 4, -1]]\n # -> [[1, 3, 2, 3], [4, -1, -1, -1]]\n x_shape = x.shape\n x = x.flatten()\n size = x.size\n indices = jnp.nonzero(x >= 0, size=size, fill_value=size)[0]\n y = jnp.take(x, indices, fill_value=fill_value)\n return jnp.reshape(y, x_shape)\n\n def _cu_offset(offsets, max_seq):\n # calculate accumulative offset by batch\n # [[1, 3, 5, 7], [4, 5, -1, -1]], max_seq = 8\n # -> [[1, 3, 5, 7], [12, 13, -1, -1]]\n batch = offsets.shape[0]\n offsets = jnp.where(\n offsets >= 0,\n offsets + (jnp.arange(batch, dtype=offsets.dtype) * max_seq)[..., jnp.newaxis],\n offsets,\n )\n return offsets\n\n if get_max_seg_per_batch(q_offsets) > 1:\n B, T, N, H = query.shape\n _, S, _, _ = key.shape\n\n q_seqlen = _shift_to_left(q_seqlen, -1)\n kv_seqlen = _shift_to_left(kv_seqlen, -1)\n\n q_offsets = _cu_offset(q_offsets, T)\n kv_offsets = _cu_offset(kv_offsets, S)\n q_offsets = _shift_to_left(q_offsets, -1)\n kv_offsets = _shift_to_left(kv_offsets, -1)\n\n # mark any invalid entries as maximum offset\n q_offsets = jnp.where(q_offsets < 0, B * T, q_offsets)\n kv_offsets = jnp.where(kv_offsets < 0, B * S, kv_offsets)\n\n # multiply by stride_per_token to get correct offsets\n # do it here because real stride changes after sharding\n q_offsets = q_offsets * N * H\n kv_offsets = kv_offsets * N * H\n\n return q_seqlen, kv_seqlen, q_offsets, kv_offsets\n\ndef _dot_product_attention_fwd_impl(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, is_training):\n # args: {Q, K, V, mask*, bias*}\n q_seqlen, kv_seqlen, q_offsets, kv_offsets = \\n _fix_seqlen_offsets(q_seqlen, kv_seqlen, q_offsets, kv_offsets, query, key)\n outputs = _dot_product_attention_fwd_p.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=is_training)\n return outputs\n\ndef _dot_product_attention_bwd_impl(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output, scale,\n seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length):\n q_seqlen, kv_seqlen, q_offsets, kv_offsets = \\n _fix_seqlen_offsets(q_seqlen, kv_seqlen, q_offsets, kv_offsets, query, key)\n grads = _dot_product_attention_bwd_p.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale=scale, seed=seed,\n dropout_rate=dropout_rate, variadic_args=variadic_args,\n mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length)\n return grads\n\ndef _dot_product_attention_fwd_abstract(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, *, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, is_training):\n query_dtype = dtypes.canonicalize_dtype(query.dtype)\n if layout == AttentionLayout.BNTH.value:\n B, N, T, _ = query.shape\n _, _, S, _ = key.shape\n else:\n B, T, N, _ = query.shape\n _, S, _, _ = key.shape\n output_shape = query.shape\n\n max_seg_per_batch = get_max_seg_per_batch(q_offsets)\n softmax_stat_shape = (B * max_seg_per_batch, N, T)\n\n if is_training:\n return (\n core.ShapedArray(output_shape, query_dtype), # output\n core.ShapedArray(softmax_stat_shape, jnp.float32), # softmax_stat\n )\n else:\n return (\n core.ShapedArray(output_shape, query_dtype), # output\n )\n\ndef _dot_product_attention_bwd_abstract(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output, *,\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length):\n query_dtype = dtypes.canonicalize_dtype(query.dtype)\n key_dtype = dtypes.canonicalize_dtype(key.dtype)\n value_dtype = dtypes.canonicalize_dtype(value.dtype)\n\n _, has_dbias = variadic_args\n if has_dbias:\n # cuDNN supports bias for this case\n bias_dtype = dtypes.canonicalize_dtype(bias.dtype)\n return (\n core.ShapedArray(\n query.shape, query_dtype\n ), # grad query\n core.ShapedArray(\n key.shape, key_dtype\n ), # grad key\n core.ShapedArray(\n value.shape, value_dtype\n ), # grad value\n core.ShapedArray(\n bias.shape, bias_dtype\n ), # grad bias\n )\n else:\n return (\n core.ShapedArray(\n query.shape, query_dtype\n ), # grad query\n core.ShapedArray(\n key.shape, key_dtype\n ), # grad key\n core.ShapedArray(\n value.shape, value_dtype\n ), # grad value\n )\n\ndef _dot_product_attention_fwd_cuda_lowering(\n ctx, query, key, value, bias, q_seqlen, kv_seqlen, q_offsets,\n kv_offsets, page_table_k, page_table_v, scale, seed, dropout_rate,\n variadic_args, mask_type, layout, sliding_window_length, is_training):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n key_shape = key_type.shape\n\n if layout == AttentionLayout.BNTH.value:\n B, N, T, H = query_shape\n _, _, S, _ = key_shape\n output_layout = (3, 2, 1, 0)\n output_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, N, H = query_shape\n _, S, _, _ = key_shape\n output_layout = (3, 1, 2, 0)\n output_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n max_seg_per_batch = get_max_seg_per_batch(ir.RankedTensorType(q_offsets.type))\n is_paged_attention = check_is_paged_attention(ir.RankedTensorType(page_table_k.type))\n\n output_shape = (B, N, T, H)\n softmax_stat_shape = (B * max_seg_per_batch, N, T)\n workspace_shape = (0,)\n workspace_type = ir.IntegerType.get_unsigned(8)\n\n has_bias, _ = variadic_args\n backend_config = create_dot_product_attention_backend_config(\n B, N, T, S, query_type.element_type, scale, seed, dropout_rate,\n mask_type, layout, sliding_window_length, max_seg_per_batch,\n is_paged_attention, is_bwd=False)\n # {Q, K, V, bias*, q_seqlen*, kv_seqlen*, q_offsets*, kv_offsets*}}\n # {output, activation*, workspace}\n has_dropout = dropout_rate > 0\n operands = [query, key, value]\n if has_bias:\n operands.append(bias)\n if has_padding(mask_type) or max_seg_per_batch > 1 or is_paged_attention:\n operands.append(q_seqlen)\n operands.append(kv_seqlen)\n if max_seg_per_batch > 1:\n operands.append(q_offsets)\n operands.append(kv_offsets)\n if is_paged_attention:\n operands.append(page_table_k)\n operands.append(page_table_v)\n\n custom_call_name = get_custom_call_name(has_bias, has_dropout, False)\n\n if is_training:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get(softmax_stat_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(workspace_shape, workspace_type),\n ]\n result_layouts = [output_layout] + default_layouts(softmax_stat_shape, workspace_shape)\n else:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get(workspace_shape, workspace_type)\n ]\n result_layouts = [output_layout] + default_layouts(workspace_shape)\n # create custom call here\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=default_layouts(\n *[ir.RankedTensorType(operand.type).shape for operand in operands]),\n result_layouts=result_layouts,\n )\n # drop workspace memory\n # output should be (B, T, N, H) instead of (B, N, T, H)\n if is_training:\n return [hlo.transpose(out.results[0], output_transpose_perm), out.results[1]]\n else:\n return [hlo.transpose(out.results[0], output_transpose_perm)]\n\ndef _dot_product_attention_bwd_cuda_lowering(\n ctx, query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n key_shape = key_type.shape\n value_type = ir.RankedTensorType(value.type)\n\n if layout == AttentionLayout.BNTH.value:\n B, q_N, T, H = query_shape\n _, k_N, S, _ = key_shape\n grad_layout = (3, 2, 1, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, q_N, H = query_shape\n _, S, k_N, _ = key_shape\n grad_layout = (3, 1, 2, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n workspace_shape = (0,)\n workspace_type = ir.IntegerType.get_unsigned(8)\n\n grad_query_shape = (B, q_N, T, H)\n grad_key_shape = (B, k_N, S, H)\n grad_value_shape = (B, k_N, S, H)\n\n has_bias, has_dbias = variadic_args\n max_seg_per_batch = get_max_seg_per_batch(ir.RankedTensorType(q_offsets.type))\n backend_config = create_dot_product_attention_backend_config(\n B, q_N, T, S, query_type.element_type, scale, seed, dropout_rate,\n mask_type, layout, sliding_window_length, max_seg_per_batch,\n False, is_bwd=True)\n # {Q, K, V, activation, dO, bias*, O, q_seqlen*, kv_seqlen*,\n # q_offsets*, kv_offsets*}\n # {dQ, dK, dV, dbias*, workspace}\n has_dropout = dropout_rate > 0\n # create operands\n operands = [query, key, value, activation, grad_output]\n if has_bias:\n # flash attention requires bias in the bwd for remat\n operands.append(bias)\n operands.append(fwd_output)\n if has_padding(mask_type) or max_seg_per_batch > 1:\n operands.append(q_seqlen)\n operands.append(kv_seqlen)\n if max_seg_per_batch > 1:\n operands.append(q_offsets)\n operands.append(kv_offsets)\n # get custom call name\n custom_call_name = get_custom_call_name(has_bias, has_dropout, True)\n\n # create output types and layouts\n # grad_query, grad_key, grad_value\n result_types = [\n ir.RankedTensorType.get(grad_query_shape, query_type.element_type),\n ir.RankedTensorType.get(grad_key_shape, key_type.element_type),\n ir.RankedTensorType.get(grad_value_shape, value_type.element_type),\n ]\n result_layouts = [grad_layout, grad_layout, grad_layout]\n bias_type = ir.RankedTensorType(bias.type)\n bias_shape = bias_type.shape\n if has_dbias:\n # cuDNN supports bias for this case\n result_types.append(\n ir.RankedTensorType.get(bias_shape, bias_type.element_type))\n result_layouts = result_layouts + default_layouts(bias_shape)\n # workspace\n result_types.append(ir.RankedTensorType.get(workspace_shape, workspace_type))\n result_layouts = result_layouts + default_layouts(workspace_shape)\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=default_layouts(\n *[ir.RankedTensorType(operand.type).shape for operand in operands]),\n result_layouts=result_layouts,\n )\n dqkv = (hlo.transpose(out.results[0], grad_transpose_perm),\n hlo.transpose(out.results[1], grad_transpose_perm),\n hlo.transpose(out.results[2], grad_transpose_perm))\n # Only keep dQ, dK, dV and dBias here\n if has_dbias:\n return dqkv + (out.results[3],)\n else:\n return dqkv\n\n# batcher\ndef _check_valid_batch_dims(bdims):\n for dim in bdims:\n if dim not in [0, None]:\n raise NotImplementedError(\n f""Currently only support batch_dim in [0, None], but got {dim=}"")\n\ndef _dot_product_attention_fwd_batcher(\n batched_args, batch_dims, *, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, is_training):\n _check_valid_batch_dims(batch_dims)\n query, key, value, bias, q_seqlen, kv_seqlen, \\n q_offsets, kv_offsets, page_table_k, page_table_v = batched_args\n query_bdim = batch_dims[0]\n if is_training:\n out_bdims = query_bdim, query_bdim\n else:\n out_bdims = (query_bdim,)\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n has_bias, _ = variadic_args\n original_shape = query.shape\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n if has_bias and batch_dims[3] is not None:\n bias = jnp.reshape(bias, (B, N, T, S))\n if has_padding(mask_type):\n q_seqlen = jnp.reshape(q_seqlen, (B, ))\n kv_seqlen = jnp.reshape(kv_seqlen, (B, ))\n\n outputs = _dot_product_attention_fwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=is_training)\n\n # reshape to original shape\n output = outputs[0]\n output = jnp.reshape(output, original_shape)\n if is_training:\n activation = outputs[1]\n activation = jnp.reshape(activation, (*Bs, N, T))\n return (output, activation), out_bdims\n else:\n return (output,), out_bdims\n\ndef _dot_product_attention_bwd_batcher(\n batched_args, batch_dims, *, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length):\n _check_valid_batch_dims(batch_dims)\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets, \\n page_table_k, page_table_v, activation, fwd_output, grad_output = batched_args\n query_bdim = batch_dims[0]\n out_bdims = query_bdim, query_bdim, query_bdim\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n has_bias, has_dbias = variadic_args\n # Reset the has_dbias if the combined batch size is not 1, because cuDNN only\n # supports dbias with a single batch. In this case, an all-zero dbias will be\n # appended instead.\n if B > 1:\n variadic_args = (has_bias, False)\n original_query_shape = query.shape\n original_key_shape = key.shape\n original_value_shape = value.shape\n original_bias_shape = bias.shape if has_bias else None\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n if has_bias and batch_dims[3] is not None:\n bias = jnp.reshape(bias, (B, N, T, S))\n if has_padding(mask_type):\n q_seqlen = jnp.reshape(q_seqlen, (B, ))\n kv_seqlen = jnp.reshape(kv_seqlen, (B, ))\n\n activation = jnp.reshape(activation, (B, N, T))\n fwd_output = jnp.reshape(fwd_output, (B,) + query.shape[-3:])\n grad_output = jnp.reshape(grad_output, (B,) + query.shape[-3:])\n\n grads = _dot_product_attention_bwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale=scale, seed=seed, dropout_rate=dropout_rate, variadic_args=variadic_args,\n mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length,\n )\n\n # reshape to original shape\n grads[0] = jnp.reshape(grads[0], original_query_shape)\n grads[1] = jnp.reshape(grads[1], original_key_shape)\n grads[2] = jnp.reshape(grads[2], original_value_shape)\n if has_dbias:\n assert has_bias\n if variadic_args[1]:\n grads[3] = jnp.reshape(grads[3], original_bias_shape)\n else:\n grads.append(jnp.zeros(original_bias_shape, bias.dtype))\n out_bdims += (batch_dims[3],)\n return grads, out_bdims\n\n# custom partitioning\ndef _get_padded_spec(arg_info):\n spec = None if arg_info.sharding is None else arg_info.sharding.spec\n ndim = arg_info.ndim\n if spec is None:\n return (None,) * ndim\n assert len(spec) <= ndim\n return spec + (None,) * (ndim - len(spec))\n\ndef _check_qkv_bias_mask_spec(\n query_spec, key_spec, value_spec, bias_spec, layout):\n # check qkv spec\n if not query_spec == key_spec == value_spec:\n raise ValueError(""Query, key and value should have same sharding."")\n if layout == AttentionLayout.BNTH.value:\n *batch_spec, num_head_spec, q_seq_spec, head_spec = query_spec\n else:\n *batch_spec, q_seq_spec, num_head_spec, head_spec = query_spec\n if q_seq_spec is not None:\n raise ValueError(""Sharding on sequence dim is not allowed."")\n if head_spec is not None:\n raise ValueError(""Sharding on head dim is not allowed."")\n # check bias spec\n if bias_spec:\n *bias_batch_spec, bias_num_head_spec, bias_q_seq_spec, bias_kv_seq_spec = bias_spec\n if any(bias_batch_spec) and bias_batch_spec != batch_spec or \\n bias_num_head_spec is not None and bias_num_head_spec != num_head_spec:\n raise ValueError(\n ""Query and bias should have same sharding on batch and num_head dim."")\n if bias_q_seq_spec is not None or bias_kv_seq_spec is not None:\n raise ValueError(""Sharding on bias sequence dim is not allowed."")\n\n\n# fwd custom partition\ndef _infer_fwd_output_sharding(mesh, arg_shapes, variadic_args,is_training, layout):\n # only sharding on batch and num_head dim is allowed\n # (*batch, q_seq, num_head, head)\n query_spec = _get_padded_spec(arg_shapes[0])\n # (*batch, kv_seq, num_head, head)\n key_spec = _get_padded_spec(arg_shapes[1])\n value_spec = _get_padded_spec(arg_shapes[2])\n has_bias, _ = variadic_args\n bias_spec = _get_padded_spec(arg_shapes[3]) if has_bias else None\n\n _check_qkv_bias_mask_spec(\n query_spec, key_spec, value_spec, bias_spec, layout)\n # keep out sharding same as query sharding since they have same shape\n out_sharding = NamedSharding(mesh, PartitionSpec(*query_spec))\n if is_training:\n # activation sharding\n *batch_spec, q_seq_spec, num_head_spec, _ = query_spec\n activation_sharding = NamedSharding(\n mesh, PartitionSpec(*batch_spec, num_head_spec, q_seq_spec, None))\n return [out_sharding, activation_sharding]\n return [out_sharding]\n\n_dot_product_attention_fwd_lower = custom_partitioning(\n _dot_product_attention_fwd_impl, static_argnums=(10, 11, 12, 13, 14, 15, 16, 17))\n\ndef _dot_product_attention_fwd_infer_sharding_from_operands(\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length,\n is_training, mesh, arg_shapes, result_shape):\n return _infer_fwd_output_sharding(mesh, arg_shapes, variadic_args, is_training, layout)\n\ndef _dot_product_attention_fwd_partition(\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length,\n is_training, mesh, arg_shapes, result_shape):\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n out_shardings = _infer_fwd_output_sharding(\n mesh, arg_shapes, variadic_args, is_training, layout)\n impl = functools.partial(\n _dot_product_attention_fwd_impl,\n scale=scale,\n seed=seed,\n dropout_rate=dropout_rate,\n variadic_args=variadic_args,\n mask_type=mask_type,\n layout=layout,\n sliding_window_length=sliding_window_length,\n is_training=is_training,\n )\n return mesh, impl, out_shardings, arg_shardings\n\n# bwd custom partition\ndef _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args):\n # (*batch, q_seq, num_head, head)\n query_spec = _get_padded_spec(arg_shapes[0])\n # (*batch, kv_seq, num_head, head)\n key_spec = _get_padded_spec(arg_shapes[1])\n value_spec = _get_padded_spec(arg_shapes[2])\n has_bias, has_dbias = variadic_args\n bias_spec = _get_padded_spec(arg_shapes[3]) if has_bias else None\n _check_qkv_bias_mask_spec(\n query_spec, key_spec, value_spec, bias_spec, layout)\n # keep grad query sharding same as query sharding\n grad_query_sharding = NamedSharding(mesh, PartitionSpec(*query_spec))\n grad_key_sharding = NamedSharding(mesh, PartitionSpec(*key_spec))\n grad_value_sharding = NamedSharding(mesh, PartitionSpec(*key_spec))\n out_shardings = [grad_query_sharding, grad_key_sharding, grad_value_sharding]\n if has_dbias:\n grad_bias_sharding = NamedSharding(mesh, PartitionSpec(*bias_spec))\n out_shardings = out_shardings + [grad_bias_sharding]\n return out_shardings\n\n_dot_product_attention_bwd_lower = custom_partitioning(\n _dot_product_attention_bwd_impl, static_argnums=(13, 14, 15, 16, 17, 18, 19)\n)\n\ndef _dot_product_attention_bwd_infer_sharding_from_operands(\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, mesh, arg_shapes, result_shape):\n return _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args)\n\ndef _dot_product_attention_bwd_partition(\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, mesh, arg_shapes, result_shape):\n out_shardings = _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args)\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n def sharded_impl(*args):\n impl = functools.partial(\n _dot_product_attention_bwd_impl,\n scale=scale,\n seed=seed,\n dropout_rate=dropout_rate,\n variadic_args=variadic_args,\n mask_type=mask_type,\n layout=layout,\n sliding_window_length=sliding_window_length,\n )\n grads = impl(*args)\n _, has_dbias = variadic_args\n if has_dbias:\n query_spec = arg_shardings[0].spec\n batch_spec = query_spec[0]\n local_dbias = grads[3]\n global_dbias = jax.lax.psum(local_dbias, batch_spec)\n grads = grads[:3] + [global_dbias]\n return grads\n return mesh, sharded_impl, out_shardings, arg_shardings\n\n# Create dot_product_attention_fwd_p for forward operation.\n_dot_product_attention_fwd_p = core.Primitive(""dot_product_attention_fwd"")\n_dot_product_attention_fwd_p.multiple_results = True\n_dot_product_attention_fwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_fwd_p)\n)\n_dot_product_attention_fwd_p.def_abstract_eval(\n _dot_product_attention_fwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_fwd_p,\n _dot_product_attention_fwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_fwd_p_wrapper = core.Primitive(\n ""dot_product_attention_fwd_wrapper""\n)\n_dot_product_attention_fwd_p_wrapper.multiple_results = True\n_dot_product_attention_fwd_p_wrapper.def_impl(_dot_product_attention_fwd_impl)\n_dot_product_attention_fwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_fwd_abstract\n)\n\n# Create dot_product_attention_bwd_p for backward operation.\n_dot_product_attention_bwd_p = core.Primitive(""dot_product_attention_bwd"")\n_dot_product_attention_bwd_p.multiple_results = True\n_dot_product_attention_bwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_bwd_p)\n)\n_dot_product_attention_bwd_p.def_abstract_eval(\n _dot_product_attention_bwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_bwd_p,\n _dot_product_attention_bwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_bwd_p_wrapper = core.Primitive(\n ""dot_product_attention_bwd_wrapper""\n)\n_dot_product_attention_bwd_p_wrapper.multiple_results = True\n_dot_product_attention_bwd_p_wrapper.def_impl(_dot_product_attention_bwd_impl)\n_dot_product_attention_bwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_bwd_abstract\n)\n\nbatching.primitive_batchers[\n _dot_product_attention_fwd_p_wrapper\n] = _dot_product_attention_fwd_batcher\nbatching.primitive_batchers[\n _dot_product_attention_bwd_p_wrapper\n] = _dot_product_attention_bwd_batcher\n\ndef not_implemented_sharding_rule(*args, **kwargs):\n return NotImplementedError(""Sharding rule not implemented."")\n\n_dot_product_attention_fwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_fwd_infer_sharding_from_operands,\n partition=_dot_product_attention_fwd_partition,\n sharding_rule=not_implemented_sharding_rule)\n\nmlir.register_lowering(_dot_product_attention_fwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_fwd_lower, multiple_results=True))\n\n_dot_product_attention_bwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_bwd_infer_sharding_from_operands,\n partition=_dot_product_attention_bwd_partition,\n sharding_rule=not_implemented_sharding_rule)\n\nmlir.register_lowering(_dot_product_attention_bwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_bwd_lower, multiple_results=True))\n\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fwd_p_wrapper\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_bwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_bwd_p_wrapper\n)\n\n@functools.partial(jax.custom_vjp, nondiff_argnums=(10, 11, 12, 13, 14, 15, 16, 17, 18))\ndef _dot_product_attention(query: Array,\n key: Array,\n value: Array,\n bias: Array,\n q_seqlen: Array,\n kv_seqlen: Array,\n q_offsets: Array,\n kv_offsets: Array,\n page_table_k: Array,\n page_table_v: Array,\n scale: float,\n seed: int,\n dropout_rate: float,\n variadic_args: tuple[bool, ...],\n mask_type: bool,\n layout: int,\n sliding_window_length: int | None,\n cudnn_version: int,\n return_residual: bool):\n output = _dot_product_attention_fwd(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length,\n cudnn_version=cudnn_version, return_residual=return_residual)\n return output\n\n_dot_product_attention.defvjp(\n _dot_product_attention_fwd_rule, _dot_product_attention_bwd_rule\n)\n\nfp8_params_keys = [\n 'amax_dQ', 'amax_dK', 'amax_dV', 'amax_dP', # place holder for bwd output\n 'descale_q', 'descale_k', 'descale_v', 'descale_s',\n 'scale_s', 'scale_o', 'descale_o', 'descale_dO',\n 'descale_dP', 'scale_dQ', 'scale_dK', 'scale_dV',\n 'scale_dP'\n]\n\nfp8_params_keys_fwd = [\n 'descale_q', 'descale_k', 'descale_v', 'descale_s', 'scale_s', 'scale_o'\n]\nfp8_params_keys_bwd = [\n 'descale_q', 'descale_k', 'descale_v', 'descale_o', 'descale_dO', 'descale_s',\n 'descale_dP', 'scale_s', 'scale_dQ', 'scale_dK', 'scale_dV', 'scale_dP',\n]\nparams_from_keys = lambda params, keys: [params[key] for key in keys]\n\ndef check_fp8_params(params):\n # Check if all required keys are present\n missing_keys = set(fp8_params_keys) - set(params)\n if missing_keys:\n raise ValueError(f""The following keys are missing from fp8_params: {', '.join(missing_keys)}"")\n\ncheck_is_flash_attention_fp8 = functools.partial(\n check_is_flash_attention,\n has_bias=False,\n is_fp8=True\n)\n\ndef _dot_product_attention_fp8_fwd(\n query, key, value,\n fp8_params_fwd,\n scale, use_causal_mask, layout, cudnn_version):\n check_is_flash_attention_fp8(\n query, key, layout, cudnn_version, is_training=False)\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o = fp8_params_fwd\n outputs = _dot_product_attention_fp8_fwd_p_wrapper.bind(\n query, key, value,\n descale_q, descale_k, descale_v, descale_s,\n scale_s, scale_o,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout, is_training=False)\n return outputs\n\ndef _dot_product_attention_fp8_fwd_rule(\n query, key, value,\n fp8_params,\n scale, use_causal_mask, layout, cudnn_version):\n check_is_flash_attention_fp8(\n query, key, layout, cudnn_version, is_training=True)\n\n outputs = _dot_product_attention_fp8_fwd_p_wrapper.bind(\n query, key, value, *params_from_keys(fp8_params, fp8_params_keys_fwd),\n scale=scale, use_causal_mask=use_causal_mask, layout=layout, is_training=True)\n res = (query, key, value, outputs[3], outputs[0], params_from_keys(fp8_params, fp8_params_keys_bwd))\n return (outputs[0], outputs[1], outputs[2]), res\n\ndef _dot_product_attention_fp8_bwd_rule(\n scale, use_causal_mask, layout, cudnn_version, res, g):\n (query, key, value, activation, fwd_output, aux_params) = res\n grad_output = g[0]\n grads = _dot_product_attention_fp8_bwd_p_wrapper.bind(\n query,\n key,\n value,\n fwd_output,\n grad_output,\n activation,\n *aux_params,\n scale=scale,\n use_causal_mask=use_causal_mask,\n layout=layout,\n )\n\n fp8_params_grads = dict.fromkeys(fp8_params_keys)\n keys_to_grad_indices = ['amax_dQ', 'amax_dK', 'amax_dV', 'amax_dP']\n # grads structure: (dQ, dK, dV, amax_dq, amax_dk, amax_dv, amax_dp)\n for i, key in enumerate(keys_to_grad_indices, start=3):\n fp8_params_grads[key] = grads[i]\n\n return (grads[0], grads[1], grads[2], fp8_params_grads)\n\ndef _dot_product_attention_fp8_fwd_impl(\n query, key, value,\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale, use_causal_mask, layout, is_training):\n outputs = _dot_product_attention_fp8_fwd_p.bind(\n query,\n key,\n value,\n descale_q,\n descale_k,\n descale_v,\n descale_s,\n scale_s,\n scale_o,\n scale=scale,\n use_causal_mask=use_causal_mask,\n layout=layout,\n is_training=is_training,\n )\n return outputs\n\ndef _dot_product_attention_fp8_bwd_impl(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale, use_causal_mask, layout):\n grads = _dot_product_attention_fp8_bwd_p.bind(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout)\n return grads\n\n\ndef _dot_product_attention_fp8_fwd_abstract(\n query, key, value,\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale, use_causal_mask, layout, is_training):\n query_dtype = dtypes.canonicalize_dtype(query.dtype)\n if layout == AttentionLayout.BNTH.value:\n B, N, T, _ = query.shape\n _, _, S, _ = key.shape\n else:\n B, T, N, _ = query.shape\n _, S, _, _ = key.shape\n output_shape = query.shape\n softmax_stat_shape = (B, N, T)\n\n # output, amax_s, amax_o[, softmax_stat]\n if is_training:\n return (\n core.ShapedArray(output_shape, query_dtype),\n core.ShapedArray((1,1,1,1), jnp.float32),\n core.ShapedArray((1,1,1,1), jnp.float32),\n core.ShapedArray(softmax_stat_shape, jnp.float32),\n )\n else:\n return (\n core.ShapedArray(output_shape, query_dtype),\n core.ShapedArray((1,1,1,1), jnp.float32),\n core.ShapedArray((1,1,1,1), jnp.float32),\n )\n\ndef _dot_product_attention_fp8_bwd_abstract(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale, use_causal_mask, layout):\n query_dtype = dtypes.canonicalize_dtype(query.dtype)\n key_dtype = dtypes.canonicalize_dtype(key.dtype)\n value_dtype = dtypes.canonicalize_dtype(value.dtype)\n\n amax_shape = (1,1,1,1)\n\n return (\n core.ShapedArray(query.shape, query_dtype),\n core.ShapedArray(key.shape, key_dtype),\n core.ShapedArray(value.shape, value_dtype),\n core.ShapedArray(amax_shape, jnp.float32),\n core.ShapedArray(amax_shape, jnp.float32),\n core.ShapedArray(amax_shape, jnp.float32),\n core.ShapedArray(amax_shape, jnp.float32),\n )\n\ndef _dot_product_attention_fp8_fwd_cuda_lowering(\n ctx, query, key, value,\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale, use_causal_mask, layout, is_training):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n key_shape = key_type.shape\n\n if layout == AttentionLayout.BNTH.value:\n B, N, T, H = query_shape\n _, _, S, _ = key_shape\n output_layout = (3, 2, 1, 0)\n output_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, N, H = query_shape\n _, S, _, _ = key_shape\n output_layout = (3, 1, 2, 0)\n output_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n output_shape = (B, N, T, H)\n softmax_stat_shape = (B, N, T)\n workspace_shape = (0,)\n amax_shape = (1,1,1,1)\n workspace_type = ir.IntegerType.get_unsigned(8)\n mask_type = MaskType.CAUSAL if use_causal_mask else MaskType.NO_MASK\n backend_config = create_dot_product_attention_fp8_backend_config(\n B, N, T, S, ir.BF16Type.get(), # query_type.element_type,\n scale, mask_type, layout, is_bwd=False,\n )\n\n operands = [query, key, value, descale_q, descale_k, descale_v, descale_s, scale_s, scale_o]\n custom_call_name = get_fp8_custom_call_name(is_bwd=False)\n\n if is_training:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get(softmax_stat_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(workspace_shape, workspace_type),\n ]\n result_layouts = [output_layout] + default_layouts(amax_shape, amax_shape, softmax_stat_shape, workspace_shape)\n else:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get(workspace_shape, workspace_type)\n ]\n result_layouts = [output_layout] + default_layouts(amax_shape, amax_shape, workspace_shape)\n\n operand_shapes = [ir.RankedTensorType(operand.type).shape for operand in operands[:3]]\n operand_shapes += [[1, 1, 1, 1]] * 6\n operand_layouts = default_layouts(*operand_shapes)\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=operand_layouts,\n result_layouts=result_layouts,\n )\n\n if is_training:\n return [hlo.transpose(out.results[0], output_transpose_perm), out.results[1], out.results[2], out.results[3]]\n else:\n return [hlo.transpose(out.results[0], output_transpose_perm), out.results[1], out.results[2]]\n\n\n\ndef _dot_product_attention_fp8_bwd_cuda_lowering(\n ctx, query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP, scale,\n use_causal_mask, layout):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n key_shape = key_type.shape\n value_type = ir.RankedTensorType(value.type)\n\n if layout == AttentionLayout.BNTH.value:\n B, q_N, T, H = query_shape\n _, k_N, S, _ = key_shape\n grad_layout = (3, 2, 1, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, q_N, H = query_shape\n _, S, k_N, _ = key_shape\n grad_layout = (3, 1, 2, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n workspace_shape = (0,)\n workspace_type = ir.IntegerType.get_unsigned(8)\n amax_shape = (1,1,1,1)\n\n grad_query_shape = (B, q_N, T, H)\n grad_key_shape = (B, k_N, S, H)\n grad_value_shape = (B, k_N, S, H)\n mask_type = MaskType.CAUSAL if use_causal_mask else MaskType.NO_MASK\n\n backend_config = create_dot_product_attention_fp8_backend_config(\n B, q_N, T, S, ir.BF16Type.get(),\n scale, mask_type, layout, is_bwd=True,\n )\n\n operands = [\n query,\n key,\n value,\n fwd_output,\n grad_output,\n activation,\n descale_q,\n descale_k,\n descale_v,\n descale_o,\n descale_dO,\n descale_s,\n descale_dP,\n scale_s,\n scale_dQ,\n scale_dK,\n scale_dV,\n scale_dP,\n ]\n\n custom_call_name = get_fp8_custom_call_name(is_bwd=True)\n\n result_types = [\n ir.RankedTensorType.get(grad_query_shape, query_type.element_type),\n ir.RankedTensorType.get(grad_key_shape, key_type.element_type),\n ir.RankedTensorType.get(grad_value_shape, value_type.element_type),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ]\n result_layouts = [grad_layout, grad_layout, grad_layout] + default_layouts(amax_shape, amax_shape, amax_shape, amax_shape)\n\n result_types.append(ir.RankedTensorType.get(workspace_shape, workspace_type))\n result_layouts = result_layouts + default_layouts(workspace_shape)\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=default_layouts(\n *[ir.RankedTensorType(operand.type).shape for operand in operands]),\n result_layouts=result_layouts,\n )\n dqkv_amaxs = (hlo.transpose(out.results[0], grad_transpose_perm),\n hlo.transpose(out.results[1], grad_transpose_perm),\n hlo.transpose(out.results[2], grad_transpose_perm),\n out.results[3], out.results[4], out.results[5], out.results[6])\n # Only keep dQ, dK, dV, amax_dQ, amax_dK, amax_dV, amax_dP here\n return dqkv_amaxs\n\ndef _dot_product_attention_fp8_fwd_batcher(\n batched_args, batch_dims, *, scale, use_causal_mask, layout, is_training):\n _check_valid_batch_dims(batch_dims)\n query, key, value,\\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o, = batched_args\n query_bdim = batch_dims[0]\n if is_training:\n out_bdims = query_bdim, query_bdim\n else:\n out_bdims = (query_bdim,)\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n\n outputs = _dot_product_attention_fp8_fwd_p_wrapper.bind(\n query, key, value, descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout, is_training=is_training)\n\n # reshape to original shape\n output, amax_s, amax_o = outputs[0], outputs[1], outputs[2]\n output = jnp.reshape(output, query.shape)\n if is_training:\n activation = outputs[3]\n activation = jnp.reshape(activation, (*Bs, N, T))\n return (output, amax_s, amax_o, activation), out_bdims\n else:\n return (output, amax_s, amax_o), out_bdims\n\ndef _dot_product_attention_fp8_bwd_batcher(\n batched_args, batch_dims, *, scale, use_causal_mask, layout):\n _check_valid_batch_dims(batch_dims)\n query, key, value, fwd_output, grad_output, activation,\\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s, descale_dP,\\n scale_s, scale_dQ, scale_dK, scale_dV, scale_dP = batched_args\n query_bdim = batch_dims[0]\n out_bdims = query_bdim, query_bdim, query_bdim\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n\n activation = jnp.reshape(activation, (B, N, T))\n fwd_output = jnp.reshape(fwd_output, (B,) + query.shape[-3:])\n grad_output = jnp.reshape(grad_output, (B,) + query.shape[-3:])\n\n grads = _dot_product_attention_fp8_bwd_p_wrapper.bind(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s, descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout,\n )\n\n grad_query, grad_key, grad_value = grads[:3]\n # reshape to original shape\n grad_query = jnp.reshape(grad_query, query.shape)\n grad_key = jnp.reshape(grad_key, key.shape)\n grad_value = jnp.reshape(grad_value, value.shape)\n\n return grads, out_bdims\n\ndef _infer_fp8_fwd_output_sharding(mesh, arg_shapes, is_training, layout):\n # Prepare variadic_args for the original function\n has_bias = False # Adjust as needed\n variadic_args = (has_bias, None) # Dummy value, adjust as necessary\n\n # Call the original function with the required parameters\n output_sharding = _infer_fwd_output_sharding(mesh, arg_shapes, variadic_args, is_training, layout)\n amax_sharding = NamedSharding(mesh, PartitionSpec())\n if is_training:\n out_sharding, activation_sharding = output_sharding[0], output_sharding[1]\n return [out_sharding, amax_sharding, amax_sharding, activation_sharding]\n return output_sharding + [amax_sharding, amax_sharding]\n\n_dot_product_attention_fp8_fwd_lower = custom_partitioning(\n _dot_product_attention_fp8_fwd_impl, static_argnums=(9, 10, 11, 12))\n\ndef _dot_product_attention_fp8_fwd_infer_sharding_from_operands(\n scale, use_causal_mask, layout, is_training,\n mesh, arg_shapes, result_shape):\n return _infer_fp8_fwd_output_sharding(mesh, arg_shapes, is_training, layout)\n\ndef _dot_product_attention_fp8_fwd_partition(\n scale, use_causal_mask, layout, is_training,\n mesh, arg_shapes, result_shape):\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n out_shardings = _infer_fp8_fwd_output_sharding(\n mesh, arg_shapes, is_training, layout)\n impl = functools.partial(\n _dot_product_attention_fp8_fwd_impl, scale=scale, use_causal_mask=use_causal_mask,\n layout=layout, is_training=is_training)\n return mesh, impl, out_shardings, arg_shardings\n\ndef _infer_fp8_bwd_output_sharding(mesh, arg_shapes, layout):\n # Prepare variadic_args for the original function\n has_bias = False # Adjust as needed\n has_dbias = False # Adjust as needed\n variadic_args = (has_bias, has_dbias) # Dummy value, adjust as necessary\n\n # Call the original function with the required parameters\n output_shardings = _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args)\n\n # Prepare amax_sharding\n amax_sharding = NamedSharding(mesh, PartitionSpec()) # Use a default spec or adjust as needed\n\n # Append amax_sharding for each output sharding\n out_shardings_with_amax = output_shardings + [amax_sharding] * 4\n\n return out_shardings_with_amax\n\n_dot_product_attention_fp8_bwd_lower = custom_partitioning(\n _dot_product_attention_fp8_bwd_impl, static_argnums=(18,19,20)\n)\n\ndef _dot_product_attention_fp8_bwd_infer_sharding_from_operands(\n scale, use_causal_mask, layout, mesh,\n arg_shapes, result_shape):\n return _infer_fp8_bwd_output_sharding(mesh, arg_shapes, layout)\n\ndef _dot_product_attention_fp8_bwd_partition(\n scale, use_causal_mask, layout, mesh,\n arg_shapes, result_shape):\n out_shardings = _infer_fp8_bwd_output_sharding(mesh, arg_shapes, layout)\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n impl = functools.partial(\n _dot_product_attention_fp8_bwd_impl, scale=scale,\n use_causal_mask=use_causal_mask, layout=layout\n )\n return mesh, impl, out_shardings, arg_shardings\n\n# Create dot_product_attention_fp8_fwd_p for forward operation.\n_dot_product_attention_fp8_fwd_p = core.Primitive(""dot_product_attention_fp8_fwd"")\n_dot_product_attention_fp8_fwd_p.multiple_results = True\n_dot_product_attention_fp8_fwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_fp8_fwd_p)\n)\n_dot_product_attention_fp8_fwd_p.def_abstract_eval(\n _dot_product_attention_fp8_fwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_fp8_fwd_p,\n _dot_product_attention_fp8_fwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_fp8_fwd_p_wrapper = core.Primitive(\n ""dot_product_attention_fp8_fwd_wrapper""\n)\n_dot_product_attention_fp8_fwd_p_wrapper.multiple_results = True\n_dot_product_attention_fp8_fwd_p_wrapper.def_impl(_dot_product_attention_fp8_fwd_impl)\n_dot_product_attention_fp8_fwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_fp8_fwd_abstract\n)\n\n# Create dot_product_attention_bwd_p for backward operation.\n_dot_product_attention_fp8_bwd_p = core.Primitive(""dot_product_attention_fp8_bwd"")\n_dot_product_attention_fp8_bwd_p.multiple_results = True\n_dot_product_attention_fp8_bwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_fp8_bwd_p)\n)\n_dot_product_attention_fp8_bwd_p.def_abstract_eval(\n _dot_product_attention_fp8_bwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_fp8_bwd_p,\n _dot_product_attention_fp8_bwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_fp8_bwd_p_wrapper = core.Primitive(\n ""dot_product_attention_fp8_bwd_wrapper""\n)\n_dot_product_attention_fp8_bwd_p_wrapper.multiple_results = True\n_dot_product_attention_fp8_bwd_p_wrapper.def_impl(_dot_product_attention_fp8_bwd_impl)\n_dot_product_attention_fp8_bwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_fp8_bwd_abstract\n)\n\nbatching.primitive_batchers[\n _dot_product_attention_fp8_fwd_p_wrapper\n] = _dot_product_attention_fp8_fwd_batcher\nbatching.primitive_batchers[\n _dot_product_attention_fp8_bwd_p_wrapper\n] = _dot_product_attention_fp8_bwd_batcher\n\n_dot_product_attention_fp8_fwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_fp8_fwd_infer_sharding_from_operands,\n partition=_dot_product_attention_fp8_fwd_partition)\n\nmlir.register_lowering(_dot_product_attention_fp8_fwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_fp8_fwd_lower, multiple_results=True))\n\n_dot_product_attention_fp8_bwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_fp8_bwd_infer_sharding_from_operands,\n partition=_dot_product_attention_fp8_bwd_partition)\n\nmlir.register_lowering(_dot_product_attention_fp8_bwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_fp8_bwd_lower, multiple_results=True))\n\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_fwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_fwd_p_wrapper\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_bwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_bwd_p_wrapper\n)\n\n@functools.partial(jax.custom_vjp, nondiff_argnums=(4, 5, 6, 7))\ndef _dot_product_attention_fp8(query: Array,\n key: Array,\n value: Array,\n fp8_params: dict[str, Array],\n scale: float,\n use_causal_mask: bool,\n layout: int,\n cudnn_version: int):\n output, amax_s, amax_o = _dot_product_attention_fp8_fwd(\n query, key, value, params_from_keys(fp8_params, fp8_params_keys_fwd),\n scale, use_causal_mask, layout, cudnn_version\n )\n return output, amax_s, amax_o\n\n_dot_product_attention_fp8.defvjp(_dot_product_attention_fp8_fwd_rule, _dot_product_attention_fp8_bwd_rule)\n\ndef combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask\n if bias is None:\n bias = mask\n else:\n if mask is not None:\n # should be broadcast to same shape\n bias = bias + mask\n return bias\n\n# User interface\ndef paged_attention(\n query: Array,\n key: Array,\n value: Array,\n q_seqlen: Array,\n kv_seqlen: Array,\n page_table_k: Array,\n page_table_v: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes paged attention described in https://arxiv.org/pdf/2309.06180.\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of\n [num_blocks, block_size, N, H] or [num_blocks, N, block_size, H] where\n num_blocks = B * Ceil(S / block_size).\n value: Values to be used in attention with a shape of\n [num_blocks, block_size, N, H] or [num_blocks, N, block_size, H] where\n num_blocks = B * Ceil(S / block_size).\n q_seqlen: Non padded sequence length of query with a shape of B.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n page_table_k: page table for key of shape [B, 1, num_blocks_per_batch, 1]\n where num_blocks_per_batch = Ceil(S / block_size).\n page_table_v: page table for value of shape [B, 1, num_blocks_per_batch, 1]\n where num_blocks_per_batch = Ceil(S / block_size).\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n scale: Scale for the query.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n """"""\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n if use_fp8:\n raise ValueError(""Paged attention doesn't support fp8 for now."")\n if has_padding(mask_type) and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to generate padding mask."")\n if sliding_window_length is not None and sliding_window_length <= 0:\n raise ValueError(\n f""Require sliding_window_length > 0, got {sliding_window_length}."")\n\n bias = combine_bias_and_mask(bias, mask, query.dtype)\n # check if input shape and data type is compatiable\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, None, None,\n page_table_k, page_table_v, layout)\n has_bias = bias is not None\n has_dbias = has_bias and \\n should_export_dbias(bias.shape, query.shape, layout) # type: ignore[union-attr]\n variadic_args = (has_bias, has_dbias)\n\n _not_used = jnp.zeros(0, dtype=query.dtype)\n if bias is None:\n bias = _not_used\n\n output = _dot_product_attention(\n query, key, value, bias, q_seqlen, kv_seqlen, _not_used, _not_used,\n page_table_k, page_table_v, scale, seed, dropout_rate, variadic_args,\n mask_type, layout.value, sliding_window_length, cudnn_version,\n return_residual)\n return output\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n The supported layouts for Q, K, V are either BT(S)NH or BNT(S)H, and they must\n adhere to the same layout. The output layout remains consistent with Q,\n defaulting to BT(S)NH.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of BSNH or BNSH.\n value: Values to be used in attention with a shape of BSNH or BNSH.\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n q_seqlen: Non padded sequence length of query with a shape of B.\n If q_offsets is set, q_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n If kv_offsets is set, kv_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n q_offsets: offset of each segment packed in query with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, q_offsets = [[0,1,2,-1], [0,1,-1,-1]]. q_seqlen should be set\n to indicate the size of each segment.\n kv_offsets: offset of each segment packed in key with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, kv_offsets = [[0,1,2,-1], [0,1,-1,-1]]. kv_seqlen should be set\n to indicate the size of each segment.\n scale: Scale for the query.\n dropout_rate: Dropout rate.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n amax_s: amax of state. (fp8 only)\n amax_o: amax of output. (fp8 only)\n """"""\n # TODO(b/380898464): Check the compute capability, e.g., require GPU device,\n # in the kernel implementation (c++) code.\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n\n if use_fp8:\n if fp8_params is None:\n raise ValueError(""fp8_params should not be None."")\n if mask_type not in (MaskType.NO_MASK, MaskType.CAUSAL):\n raise ValueError(""Only NO_MASK or CAUSAL masks are supported for fp8."")\n if not all(x is None for x in [bias, mask, q_seqlen, kv_seqlen]):\n raise ValueError(\n f""Expected 'None' for bias, mask, q_seqlen, and kv_seqlen, ""\n f""but got: bias={bias}, mask={mask}, q_seqlen={q_seqlen}, kv_seqlen={kv_seqlen}""\n )\n check_fp8_params(fp8_params)\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n output, amax_s, amax_o = _dot_product_attention_fp8(\n query, key, value, fp8_params,\n scale, mask_type == MaskType.CAUSAL, layout.value, cudnn_version\n )\n return output, amax_s, amax_o\n else:\n if has_padding(mask_type) and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to generate padding mask"")\n if sliding_window_length is not None and sliding_window_length <= 0:\n raise ValueError(\n f""Require sliding_window_length > 0, got {sliding_window_length}"")\n if q_offsets is not None and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to use packed layout"")\n\n bias = combine_bias_and_mask(bias, mask, query.dtype)\n # check if input shape and data type is compatiable\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n has_bias = bias is not None\n has_dbias = has_bias and \\n should_export_dbias(bias.shape, query.shape, layout) # type: ignore[union-attr]\n variadic_args = (has_bias, has_dbias)\n\n _not_used = jnp.zeros(0, dtype=query.dtype)\n if bias is None:\n bias = _not_used\n if q_seqlen is None:\n q_seqlen = _not_used\n if kv_seqlen is None:\n kv_seqlen = _not_used\n if q_offsets is None:\n q_offsets = _not_used\n if kv_offsets is None:\n kv_offsets = _not_used\n\n output = _dot_product_attention(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n _not_used, _not_used, scale, seed, dropout_rate, variadic_args,\n mask_type, layout.value, sliding_window_length, cudnn_version,\n return_residual)\n return output\n",python,tab +4584,14388451,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12107,0,"",python,selection_command +4585,14407693,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12106,0,"\n ",python,content +4586,14407853,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12113,0,"b",python,content +4587,14407853,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12114,0,"",python,selection_keyboard +4588,14407965,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12114,0,"r",python,content +4589,14407965,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12115,0,"",python,selection_keyboard +4590,14408023,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12115,0,"e",python,content +4591,14408023,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12116,0,"",python,selection_keyboard +4592,14408082,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12116,0,"a",python,content +4593,14408083,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12117,0,"",python,selection_keyboard +4594,14408137,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12117,0,"k",python,content +4595,14408137,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12118,0,"",python,selection_keyboard +4596,14408271,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12118,0,"p",python,content +4597,14408271,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12119,0,"",python,selection_keyboard +4598,14408349,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12119,0,"o",python,content +4599,14408350,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12120,0,"",python,selection_keyboard +4600,14408397,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12120,0,"i",python,content +4601,14408397,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12121,0,"",python,selection_keyboard +4602,14408432,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12121,0,"n",python,content +4603,14408432,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12122,0,"",python,selection_keyboard +4604,14408497,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12122,0,"t",python,content +4605,14408498,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12123,0,"",python,selection_keyboard +4606,14408773,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12123,0,"()",python,content +4607,14408774,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12124,0,"",python,selection_keyboard +4608,14408774,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12124,1,")",python,content +4609,14408774,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12125,0,"",python,selection_keyboard +4610,14408902,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12124,0,"",python,selection_command +4611,14410284,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +4612,14410335,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +4613,14410599,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4614,14422247,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4615,14442502,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10981,0,"",python,selection_command +4616,14442670,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",9752,0,"",python,selection_command +4617,14444108,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",9823,0,"",python,selection_command +4618,14444350,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",9891,0,"",python,selection_command +4619,14444381,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",9892,0,"",python,selection_command +4620,14444514,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",9938,0,"",python,selection_command +4621,14444644,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10011,0,"",python,selection_command +4622,14444796,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10013,0,"",python,selection_command +4623,14445014,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10014,0,"",python,selection_command +4624,14445198,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10077,0,"",python,selection_command +4625,14445365,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10079,0,"",python,selection_command +4626,14445588,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10016,0,"",python,selection_command +4627,14445815,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10018,0,"",python,selection_command +4628,14456741,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76984,12,"check_layout",python,selection_command +4629,14457054,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76995,0,"",python,selection_command +4630,14465062,"TERMINAL",0,0,"2025-07-27 13:19:44.865084: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4631,14466598,"TERMINAL",0,0,"2025-07-27 13:19:46.400781: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4632,14470241,"TERMINAL",0,0,"2025-07-27 13:19:50.043637: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4633,14471030,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py(333)check_layout()\r\n-> raise ValueError(\r\n",,terminal_output +4634,14521883,"TERMINAL",0,0,"l",,terminal_output +4635,14522072,"TERMINAL",0,0,"\r\n(Pdb) 328 \t # check bias\r\n329 \t if bias is not None:\r\n330 \t _, _, bT, bS = bias.shape\r\n331 \t if bT != qT or bS != vS:\r\n332 \t breakpoint()\r\n333 ->\t raise ValueError(\r\n334 \t f""Bias must have same seq length as QKV, got {bT} and {bS}"")\r\n335 \t\r\n336 \t # check q_seqlen/kv_seqlen/q_offsets/kv_offsets\r\n337 \t expected_rank = 2 if q_offsets is not None else 1\r\n338 \t def check_seqlen_offsets(tensor, name):\r\n",,terminal_output +4636,14523121,"TERMINAL",0,0,"b",,terminal_output +4637,14523524,"TERMINAL",0,0,"T",,terminal_output +4638,14523730,"TERMINAL",0,0,"\r\n(Pdb) 1\r\n",,terminal_output +4639,14525718,"TERMINAL",0,0,"q",,terminal_output +4640,14526050,"TERMINAL",0,0,"T",,terminal_output +4641,14526254,"TERMINAL",0,0,"\r\n(Pdb) 4\r\n",,terminal_output +4642,14528153,"TERMINAL",0,0,"b",,terminal_output +4643,14528319,"TERMINAL",0,0,"S",,terminal_output +4644,14528532,"TERMINAL",0,0,"\r\n(Pdb) 1\r\n",,terminal_output +4645,14529747,"TERMINAL",0,0,"c",,terminal_output +4646,14530171,"TERMINAL",0,0," ",,terminal_output +4647,14530246,"TERMINAL",0,0,"v",,terminal_output +4648,14530399,"TERMINAL",0,0,"S",,terminal_output +4649,14531347,"TERMINAL",0,0,"\r\n(Pdb) 4\r\n",,terminal_output +4650,14551280,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12113,0,"",python,selection_command +4651,14551594,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12107,19,"",python,content +4652,14551626,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12113,0,"",python,selection_command +4653,14553113,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +4654,14556349,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +4655,14566140,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +4656,15023058,"/fast/home/franz.srambical/jafar/utils/nn.py",10515,71," if bias is not None:\n # Handle bias tensor - it might need to be broadcasted to match the padded dimensions\n bias_4d = _rearrange(bias)\n # If bias has fewer dimensions than expected, pad it\n if bias_4d.shape[-2] != target_seq_len or bias_4d.shape[-1] != target_seq_len:\n bias_4d = _pad(bias_4d)\n else:\n bias_4d = None\n",python,content +4657,15029334,"/fast/home/franz.srambical/jafar/utils/nn.py",10544,333," # Handle bias tensor - ensure it has the correct shape for attention\n bias_4d = _rearrange(bias)\n # The bias should have shape (batch, target_seq, source_seq) for attention\n # If it doesn't match the padded dimensions, we need to handle it properly\n if len(bias_4d.shape) == 4: # (batch, target_seq, source_seq, heads)\n if bias_4d.shape[-3] != target_seq_len or bias_4d.shape[-2] != target_seq_len:\n bias_4d = _pad(bias_4d)\n else:\n # If bias has unexpected shape, skip it for now\n bias_4d = None\n",python,content +4658,15038018,"/fast/home/franz.srambical/jafar/utils/nn.py",10664,477," # For attention bias, we expect shape (batch, target_seq, source_seq, heads)\n # If the bias doesn't match the expected dimensions, we need to handle it\n if bias_4d.shape[-3] != target_seq_len or bias_4d.shape[-2] != target_seq_len:\n # If bias dimensions don't match, we might need to broadcast or skip\n # For now, let's skip the bias if dimensions don't match\n",python,content +4659,15074610,"utils/nn.py",0,0,"",python,tab +4660,15074612,"utils/nn.py",10515,0,"",python,selection_command +4661,15089018,"utils/nn.py",10515,644," bias_4d = _pad(_rearrange(bias)) if bias is not None else None",python,content +4662,15089752,"utils/nn.py",10584,0,"",python,selection_command +4663,15089990,"utils/nn.py",10514,0,"\n",python,content +4664,15090003,"utils/nn.py",10515,0," ",python,content +4665,15090901,"utils/nn.py",10515,8,"",python,content +4666,15091210,"TERMINAL",0,0,"salloc: Job 13997 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T13:30:11.016] error: *** STEP 13997.interactive ON hai003 CANCELLED AT 2025-07-27T13:30:11 DUE TO TIME LIMIT ***\r\n[2025-07-27T13:30:11.016] error: *** STEP 13997.7 ON hai003 CANCELLED AT 2025-07-27T13:30:11 DUE TO TIME LIMIT ***\r\n",,terminal_output +4667,15091820,"TERMINAL",0,0,"2025-07-27 13:30:11.623856: W external/xla/xla/tsl/distributed_runtime/preemption/preemption_notifier.cc:89] SIGTERM caught at 2025-07-27T13:30:11.623822692+02:00\r\n",,terminal_output +4668,15092325,"utils/nn.py",10515,1,"",python,content +4669,15092326,"utils/nn.py",10523,0,"",python,selection_command +4670,15097894,"utils/nn.py",10514,0,"\n",python,content +4671,15097906,"utils/nn.py",10515,0," ",python,content +4672,15098111,"utils/nn.py",10523,0,"i",python,content +4673,15098111,"utils/nn.py",10524,0,"",python,selection_keyboard +4674,15098155,"utils/nn.py",10524,0,"f",python,content +4675,15098156,"utils/nn.py",10525,0,"",python,selection_keyboard +4676,15098240,"utils/nn.py",10525,0," ",python,content +4677,15098240,"utils/nn.py",10526,0,"",python,selection_keyboard +4678,15098424,"utils/nn.py",10526,0,"q",python,content +4679,15098424,"utils/nn.py",10527,0,"",python,selection_keyboard +4680,15098551,"utils/nn.py",10527,0,"u",python,content +4681,15098552,"utils/nn.py",10528,0,"",python,selection_keyboard +4682,15098637,"utils/nn.py",10528,0,"e",python,content +4683,15098638,"utils/nn.py",10529,0,"",python,selection_keyboard +4684,15098700,"utils/nn.py",10529,0,"r",python,content +4685,15098700,"utils/nn.py",10530,0,"",python,selection_keyboard +4686,15098788,"utils/nn.py",10530,0,"y",python,content +4687,15098789,"utils/nn.py",10531,0,"",python,selection_keyboard +4688,15100131,"utils/nn.py",10530,0,"",python,selection_command +4689,15100597,"utils/nn.py",10515,16," ",python,content +4690,15100722,"utils/nn.py",10523,0,"b",python,content +4691,15100722,"utils/nn.py",10524,0,"",python,selection_keyboard +4692,15100801,"utils/nn.py",10524,0,"r",python,content +4693,15100801,"utils/nn.py",10525,0,"",python,selection_keyboard +4694,15100868,"utils/nn.py",10525,0,"e",python,content +4695,15100869,"utils/nn.py",10526,0,"",python,selection_keyboard +4696,15100900,"utils/nn.py",10526,0,"a",python,content +4697,15100900,"utils/nn.py",10527,0,"",python,selection_keyboard +4698,15101011,"utils/nn.py",10527,0,"k",python,content +4699,15101011,"utils/nn.py",10528,0,"",python,selection_keyboard +4700,15101137,"utils/nn.py",10528,0,"p",python,content +4701,15101138,"utils/nn.py",10529,0,"",python,selection_keyboard +4702,15101241,"utils/nn.py",10529,0,"o",python,content +4703,15101241,"utils/nn.py",10530,0,"",python,selection_keyboard +4704,15101254,"utils/nn.py",10530,0,"i",python,content +4705,15101254,"utils/nn.py",10531,0,"",python,selection_keyboard +4706,15101339,"utils/nn.py",10531,0,"n",python,content +4707,15101339,"utils/nn.py",10532,0,"",python,selection_keyboard +4708,15101445,"utils/nn.py",10532,0,"t",python,content +4709,15101446,"utils/nn.py",10533,0,"",python,selection_keyboard +4710,15101840,"utils/nn.py",10533,0,"()",python,content +4711,15101841,"utils/nn.py",10534,0,"",python,selection_keyboard +4712,15101857,"utils/nn.py",10534,1,")",python,content +4713,15101857,"utils/nn.py",10535,0,"",python,selection_keyboard +4714,15102054,"utils/nn.py",10534,0,"",python,selection_command +4715,15103246,"TERMINAL",0,0,"l",,terminal_output +4716,15103298,"TERMINAL",0,0,"^P",,terminal_output +4717,15104703,"TERMINAL",0,0,"   ",,terminal_output +4718,15105308,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=13997.7 task 0: running\r\n",,terminal_output +4719,15105887,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13997.7\r\nsrun: forcing job termination\r\n",,terminal_output +4720,15106064,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13997.7\r\nsrun: job abort in progress\r\n",,terminal_output +4721,15107562,"TERMINAL",0,0,"^C",,terminal_output +4722,15107725,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13997.7\r\nsrun: job abort in progress\r\n",,terminal_output +4723,15107873,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13997.7\r\n",,terminal_output +4724,15108077,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13997.7\r\n",,terminal_output +4725,15108212,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13997.7\r\nsrun: job abort in progress\r\n",,terminal_output +4726,15108421,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13997.7\r\n",,terminal_output +4727,15108610,"TERMINAL",0,0,"^C",,terminal_output +4728,15109936,"TERMINAL",0,0,"bash",,terminal_focus +4729,15121847,"TERMINAL",0,0,"srun: error: hai003: task 0: Killed\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +4730,15123030,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G --time=01:00:00",,terminal_command +4731,15123090,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 13998\r\n",,terminal_output +4732,15123195,"TERMINAL",0,0,"salloc: Nodes hai007 are ready for job\r\n",,terminal_output +4733,15123558,"TERMINAL",0,0,"Running inside SLURM, Job ID 13998.\r\n",,terminal_output +4734,15123635,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +4735,15124168,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +4736,15125021,"TERMINAL",0,0,"b",,terminal_output +4737,15125085,"TERMINAL",0,0,"a",,terminal_output +4738,15125372,"TERMINAL",0,0,"",,terminal_output +4739,15125502,"TERMINAL",0,0,"",,terminal_output +4740,15125687,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +4741,15125904,"TERMINAL",0,0,"s': salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G ",,terminal_output +4742,15126048,"TERMINAL",0,0,"\ra': salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100Gm': bash experiments/sample.sh \r\n\r[1@p': bash experiments/samp",,terminal_output +4743,15126132,"TERMINAL",0,0,"[1@l': bash experiments/sampl",,terminal_output +4744,15126413,"TERMINAL",0,0,"\r[23@[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sampl",,terminal_output +4745,15126560,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4746,15139534,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4747,15144374,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(331)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +4748,15158134,"TERMINAL",0,0,"b",,terminal_output +4749,15158336,"TERMINAL",0,0,"i",,terminal_output +4750,15158534,"TERMINAL",0,0,"as",,terminal_output +4751,15158964,"TERMINAL",0,0,".",,terminal_output +4752,15159151,"TERMINAL",0,0,"s",,terminal_output +4753,15159237,"TERMINAL",0,0,"h",,terminal_output +4754,15159313,"TERMINAL",0,0,"a",,terminal_output +4755,15159397,"TERMINAL",0,0,"p",,terminal_output +4756,15159480,"TERMINAL",0,0,"e",,terminal_output +4757,15159580,"TERMINAL",0,0,"\r\n(Pdb) *** AttributeError: 'NoneType' object has no attribute 'shape'\r\n",,terminal_output +4758,15161338,"TERMINAL",0,0,"b",,terminal_output +4759,15161462,"TERMINAL",0,0,"i",,terminal_output +4760,15161566,"TERMINAL",0,0,"a",,terminal_output +4761,15161633,"TERMINAL",0,0,"s",,terminal_output +4762,15164538,"TERMINAL",0,0,"\r\n",,terminal_output +4763,15169167,"TERMINAL",0,0,"c",,terminal_output +4764,15169709,"TERMINAL",0,0,"\r\n(Pdb) (Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(331)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +4765,15170979,"TERMINAL",0,0,"b",,terminal_output +4766,15171116,"TERMINAL",0,0,"i",,terminal_output +4767,15171232,"TERMINAL",0,0,"as",,terminal_output +4768,15171516,"TERMINAL",0,0,".",,terminal_output +4769,15171876,"TERMINAL",0,0,"s",,terminal_output +4770,15171974,"TERMINAL",0,0,"h",,terminal_output +4771,15172040,"TERMINAL",0,0,"a",,terminal_output +4772,15172177,"TERMINAL",0,0,"p",,terminal_output +4773,15172295,"TERMINAL",0,0,"e",,terminal_output +4774,15172387,"TERMINAL",0,0,"\r\n(Pdb) *** AttributeError: 'NoneType' object has no attribute 'shape'\r\n",,terminal_output +4775,15173514,"TERMINAL",0,0,"c",,terminal_output +4776,15173616,"TERMINAL",0,0,"\r\n",,terminal_output +4777,15174545,"TERMINAL",0,0,"b",,terminal_output +4778,15174678,"TERMINAL",0,0,"i",,terminal_output +4779,15174793,"TERMINAL",0,0,"as",,terminal_output +4780,15174982,"TERMINAL",0,0,".",,terminal_output +4781,15175095,"TERMINAL",0,0,"s",,terminal_output +4782,15175197,"TERMINAL",0,0,"h",,terminal_output +4783,15175319,"TERMINAL",0,0,"a",,terminal_output +4784,15175461,"TERMINAL",0,0,"p",,terminal_output +4785,15175564,"TERMINAL",0,0,"e",,terminal_output +4786,15175699,"TERMINAL",0,0,"\r\n",,terminal_output +4787,15176221,"TERMINAL",0,0,"c",,terminal_output +4788,15176682,"TERMINAL",0,0,"\r\n",,terminal_output +4789,15176798,"TERMINAL",0,0,"(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(331)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n(Pdb) *** AttributeError: 'NoneType' object has no attribute 'shape'\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(331)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +4790,15178285,"TERMINAL",0,0,"q",,terminal_output +4791,15178475,"TERMINAL",0,0,"uit",,terminal_output +4792,15178730,"TERMINAL",0,0,"()",,terminal_output +4793,15178931,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 193, in \r\n action_batch = jasmine.vq_encode(batch, training=False)\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 391, in vq_encode\r\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\r\n File ""/fast/home/franz.srambical/jafar/models/lam.py"", line 133, in vq_encode\r\n z = self.encoder(padded_patches) # (B, T, N, E)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 331, in attention_fn\r\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 331, in attention_fn\r\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +4794,15180116,"TERMINAL",0,0,"(Pdb) srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +4795,15181179,"utils/nn.py",10514,0,"\n",python,content +4796,15181186,"utils/nn.py",10515,0," ",python,content +4797,15181504,"utils/nn.py",10523,0,"i",python,content +4798,15181504,"utils/nn.py",10524,0,"",python,selection_keyboard +4799,15181554,"utils/nn.py",10524,0,"f",python,content +4800,15181555,"utils/nn.py",10525,0,"",python,selection_keyboard +4801,15181623,"utils/nn.py",10525,0," ",python,content +4802,15181624,"utils/nn.py",10526,0,"",python,selection_keyboard +4803,15182756,"utils/nn.py",10526,0,"q",python,content +4804,15182757,"utils/nn.py",10527,0,"",python,selection_keyboard +4805,15182874,"utils/nn.py",10527,0,"u",python,content +4806,15182875,"utils/nn.py",10528,0,"",python,selection_keyboard +4807,15182906,"utils/nn.py",10528,0,"e",python,content +4808,15182906,"utils/nn.py",10529,0,"",python,selection_keyboard +4809,15182974,"utils/nn.py",10529,0,"r",python,content +4810,15182974,"utils/nn.py",10530,0,"",python,selection_keyboard +4811,15183102,"utils/nn.py",10530,0,"y",python,content +4812,15183103,"utils/nn.py",10531,0,"",python,selection_keyboard +4813,15183346,"utils/nn.py",10531,0,"_",python,content +4814,15183346,"utils/nn.py",10532,0,"",python,selection_keyboard +4815,15183710,"utils/nn.py",10531,1,"",python,content +4816,15183878,"utils/nn.py",10531,0,".",python,content +4817,15183879,"utils/nn.py",10532,0,"",python,selection_keyboard +4818,15183961,"utils/nn.py",10532,0,"s",python,content +4819,15183961,"utils/nn.py",10533,0,"",python,selection_keyboard +4820,15184076,"utils/nn.py",10533,0,"h",python,content +4821,15184077,"utils/nn.py",10534,0,"",python,selection_keyboard +4822,15184161,"utils/nn.py",10534,0,"a",python,content +4823,15184161,"utils/nn.py",10535,0,"",python,selection_keyboard +4824,15184230,"utils/nn.py",10535,0,"p",python,content +4825,15184230,"utils/nn.py",10536,0,"",python,selection_keyboard +4826,15184331,"utils/nn.py",10536,0,"e",python,content +4827,15184331,"utils/nn.py",10537,0,"",python,selection_keyboard +4828,15184915,"utils/nn.py",10538,0," ",python,content +4829,15184916,"utils/nn.py",10537,0," == (1, 921, 1, 8, 64):",python,content +4830,15185204,"utils/nn.py",10559,0,"",python,selection_command +4831,15185602,"utils/nn.py",10584,0,"",python,selection_command +4832,15187499,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +4833,15187893,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4834,15199157,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4835,15207195,"TERMINAL",0,0,"2025-07-27 13:32:07.000059: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4836,15208673,"TERMINAL",0,0,"2025-07-27 13:32:08.474714: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4837,15212162,"TERMINAL",0,0,"2025-07-27 13:32:11.958043: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4838,15212911,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(332)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +4839,15225114,"TERMINAL",0,0,"i",,terminal_output +4840,15225185,"TERMINAL",0,0,"a",,terminal_output +4841,15225292,"TERMINAL",0,0,"s",,terminal_output +4842,15225509,"TERMINAL",0,0," ",,terminal_output +4843,15225657,"TERMINAL",0,0," ",,terminal_output +4844,15225787,"TERMINAL",0,0," ",,terminal_output +4845,15226021,"TERMINAL",0,0,"bi",,terminal_output +4846,15226431,"TERMINAL",0,0,"a",,terminal_output +4847,15226490,"TERMINAL",0,0,"s",,terminal_output +4848,15226607,"TERMINAL",0,0,".",,terminal_output +4849,15226805,"TERMINAL",0,0,"s",,terminal_output +4850,15226889,"TERMINAL",0,0,"h",,terminal_output +4851,15227002,"TERMINAL",0,0,"a",,terminal_output +4852,15227083,"TERMINAL",0,0,"p",,terminal_output +4853,15227182,"TERMINAL",0,0,"e",,terminal_output +4854,15227288,"TERMINAL",0,0,"\r\n(Pdb) *** AttributeError: 'NoneType' object has no attribute 'shape'\r\n",,terminal_output +4855,15233272,"utils/nn.py",9554,0,"",python,selection_command +4856,15236618,"utils/nn.py",9511,0,"",python,selection_command +4857,15236854,"utils/nn.py",9474,0,"",python,selection_command +4858,15236881,"utils/nn.py",9473,0,"",python,selection_command +4859,15236907,"utils/nn.py",9402,0,"",python,selection_command +4860,15236940,"utils/nn.py",9381,0,"",python,selection_command +4861,15236975,"utils/nn.py",9380,0,"",python,selection_command +4862,15237008,"utils/nn.py",9313,0,"",python,selection_command +4863,15237047,"utils/nn.py",9286,0,"",python,selection_command +4864,15237075,"utils/nn.py",9285,0,"",python,selection_command +4865,15237109,"utils/nn.py",9219,0,"",python,selection_command +4866,15237147,"utils/nn.py",9192,0,"",python,selection_command +4867,15237389,"utils/nn.py",9144,0,"",python,selection_command +4868,15237508,"utils/nn.py",9152,0,"",python,selection_command +4869,15237701,"utils/nn.py",9154,0,"",python,selection_command +4870,15238067,"utils/nn.py",9157,0,"",python,selection_command +4871,15238456,"utils/nn.py",9499,0,"",python,selection_command +4872,15238922,"utils/nn.py",9538,0,"",python,selection_command +4873,15239425,"utils/nn.py",9742,0,"",python,selection_command +4874,15240912,"utils/nn.py",10526,0,"",python,selection_command +4875,15241366,"utils/nn.py",10790,0,"",python,selection_command +4876,15242427,"utils/nn.py",10526,0,"",python,selection_command +4877,15242715,"utils/nn.py",9742,0,"",python,selection_command +4878,15243459,"utils/nn.py",10526,0,"",python,selection_command +4879,15256860,"utils/nn.py",10515,45," if query.shape == (1, 921, 1, 8, 64):",python,selection_command +4880,15257257,"utils/nn.py",10515,70," if query.shape == (1, 921, 1, 8, 64):\n breakpoint()",python,selection_command +4881,15257377,"utils/nn.py",10515,71,"",python,content +4882,15257401,"utils/nn.py",10523,0,"",python,selection_command +4883,15258546,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=13998.1 task 0: running\r\n",,terminal_output +4884,15258670,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=13998.1\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T13:32:58.476] error: *** STEP 13998.1 ON hai007 CANCELLED AT 2025-07-27T13:32:58 DUE to SIGNAL Killed ***\r\n",,terminal_output +4885,15258768,"TERMINAL",0,0,"(Pdb) ",,terminal_output +4886,15258844,"TERMINAL",0,0,"^L",,terminal_output +4887,15259031,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ [franz.srambical@hai007.haicore.berlin:~/jafar] $ [franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +4888,15259608,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4889,15260573,"utils/nn.py",10586,0,"",python,selection_command +4890,15260703,"utils/nn.py",10595,0,"",python,selection_command +4891,15260859,"utils/nn.py",10665,0,"",python,selection_command +4892,15261004,"utils/nn.py",10715,0,"",python,selection_command +4893,15261106,"utils/nn.py",10665,0,"",python,selection_command +4894,15261286,"utils/nn.py",10675,0,"",python,selection_command +4895,15261470,"utils/nn.py",10677,0,"",python,selection_command +4896,15261620,"utils/nn.py",10680,0,"",python,selection_command +4897,15261760,"utils/nn.py",10681,0,"",python,selection_command +4898,15261913,"utils/nn.py",10683,0,"",python,selection_command +4899,15262081,"utils/nn.py",10684,0,"",python,selection_command +4900,15263132,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"# Copyright 2019 The JAX Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Shared neural network activations and other functions.""""""\n\nfrom __future__ import annotations\n\nfrom collections.abc import Sequence\nfrom functools import partial\nimport operator\nimport math\nimport numpy as np\nfrom typing import Any, List, Literal\nimport warnings\n\nimport jax\nimport jax.numpy as jnp\nfrom jax import custom_jvp\nfrom jax import lax\nfrom jax._src import config\nfrom jax._src import core\nfrom jax._src import deprecations\nfrom jax._src import dtypes\nfrom jax._src import util\nfrom jax._src.core import AxisName\nfrom jax._src.sharding_impls import NamedSharding, PartitionSpec as P\nfrom jax._src.cudnn.fused_attention_stablehlo import (\n dot_product_attention as cudnn_dot_product_attention, MaskType)\nfrom jax._src.cudnn.scaled_matmul_stablehlo import (\n scaled_matmul_wrapper as cudnn_scaled_matmul,\n scaled_dot_general_wrapper as cudnn_scaled_dot_general,\n BlockScaleConfig)\nfrom jax._src.interpreters import batching\nfrom jax._src.interpreters import mlir\nfrom jax._src.numpy import util as numpy_util\nfrom jax._src.typing import Array, ArrayLike, DType, DTypeLike\nfrom jax._src.ops.special import logsumexp as _logsumexp\n\n\n# activations\n@jax.jit\ndef identity(x: ArrayLike) -> Array:\n r""""""Identity activation function.\n\n Returns the argument unmodified.\n\n Args:\n x : input array\n\n Returns:\n The argument `x` unmodified.\n\n Examples:\n >>> jax.nn.identity(jax.numpy.array([-2., -1., -0.5, 0, 0.5, 1., 2.]))\n Array([-2. , -1. , -0.5, 0. , 0.5, 1. , 2. ], dtype=float32)\n\n """"""\n numpy_util.check_arraylike(""identity"", x)\n return jnp.asarray(x)\n\n@custom_jvp\n@jax.jit\ndef relu(x: ArrayLike) -> Array:\n r""""""Rectified linear unit activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{relu}(x) = \max(x, 0)\n\n except under differentiation, we take:\n\n .. math::\n \nabla \mathrm{relu}(0) = 0\n\n For more information see\n `Numerical influence of ReLU’(0) on backpropagation\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n Examples:\n >>> jax.nn.relu(jax.numpy.array([-2., -1., -0.5, 0, 0.5, 1., 2.]))\n Array([0. , 0. , 0. , 0. , 0.5, 1. , 2. ], dtype=float32)\n\n See also:\n :func:`relu6`\n\n """"""\n return jnp.maximum(x, 0)\n# For behavior at 0, see https://dl.acm.org/doi/10.5555/3540261.3540297\nrelu.defjvps(lambda g, ans, x: lax.select(x > 0, g, lax.full_like(g, 0)))\n\n@jax.jit\ndef squareplus(x: ArrayLike, b: ArrayLike = 4) -> Array:\n r""""""Squareplus activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{squareplus}(x) = \frac{x + \sqrt{x^2 + b}}{2}\n\n as described in https://arxiv.org/abs/2112.11687.\n\n Args:\n x : input array\n b : smoothness parameter\n """"""\n numpy_util.check_arraylike(""squareplus"", x)\n numpy_util.check_arraylike(""squareplus"", b)\n x = jnp.asarray(x)\n b = jnp.asarray(b)\n y = x + jnp.sqrt(jnp.square(x) + b)\n return y / 2\n\n@jax.jit\ndef softplus(x: ArrayLike) -> Array:\n r""""""Softplus activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{softplus}(x) = \log(1 + e^x)\n\n Args:\n x : input array\n """"""\n return jnp.logaddexp(x, 0)\n\n@jax.jit\ndef sparse_plus(x: ArrayLike) -> Array:\n r""""""Sparse plus function.\n\n Computes the function:\n\n .. math::\n\n \mathrm{sparse\_plus}(x) = \begin{cases}\n 0, & x \leq -1\\\n \frac{1}{4}(x+1)^2, & -1 < x < 1 \\\n x, & 1 \leq x\n \end{cases}\n\n This is the twin function of the softplus activation ensuring a zero output\n for inputs less than -1 and a linear output for inputs greater than 1,\n while remaining smooth, convex, monotonic by an adequate definition between\n -1 and 1.\n\n Args:\n x: input (float)\n """"""\n numpy_util.check_arraylike(""sparse_plus"", x)\n x = jnp.asarray(x)\n return jnp.where(x <= -1.0, 0.0, jnp.where(x >= 1.0, x, (x + 1.0)**2/4))\n\n@jax.jit\ndef soft_sign(x: ArrayLike) -> Array:\n r""""""Soft-sign activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{soft\_sign}(x) = \frac{x}{|x| + 1}\n\n Args:\n x : input array\n """"""\n numpy_util.check_arraylike(""soft_sign"", x)\n x_arr = jnp.asarray(x)\n return x_arr / (jnp.abs(x_arr) + 1)\n\n@partial(jax.jit, inline=True)\ndef sigmoid(x: ArrayLike) -> Array:\n r""""""Sigmoid activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{sigmoid}(x) = \frac{1}{1 + e^{-x}}\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`log_sigmoid`\n\n """"""\n return lax.logistic(x)\n\n@jax.jit\ndef sparse_sigmoid(x: ArrayLike) -> Array:\n r""""""Sparse sigmoid activation function.\n\n Computes the function:\n\n .. math::\n\n \mathrm{sparse\_sigmoid}(x) = \begin{cases}\n 0, & x \leq -1\\\n \frac{1}{2}(x+1), & -1 < x < 1 \\\n 1, & 1 \leq x\n \end{cases}\n\n This is the twin function of the ``sigmoid`` activation ensuring a zero output\n for inputs less than -1, a 1 output for inputs greater than 1, and a linear\n output for inputs between -1 and 1. It is the derivative of ``sparse_plus``.\n\n For more information, see `Learning with Fenchel-Young Losses (section 6.2)\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n return 0.5 * jnp.clip(x + 1.0, 0.0, 2.0)\n\n@jax.jit\ndef silu(x: ArrayLike) -> Array:\n r""""""SiLU (aka swish) activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{silu}(x) = x \cdot \mathrm{sigmoid}(x) = \frac{x}{1 + e^{-x}}\n\n :func:`swish` and :func:`silu` are both aliases for the same function.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n numpy_util.check_arraylike(""silu"", x)\n x_arr = jnp.asarray(x)\n return x_arr * sigmoid(x_arr)\n\nswish = silu\n\n@jax.jit\ndef mish(x: ArrayLike) -> Array:\n r""""""Mish activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{mish}(x) = x \cdot \mathrm{tanh}(\mathrm{softplus}(x))\n\n For more information, see\n `Mish: A Self Regularized Non-Monotonic Activation Function\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n """"""\n numpy_util.check_arraylike(""mish"", x)\n x_arr = jnp.asarray(x)\n return x_arr * jnp.tanh(softplus(x_arr))\n\n@jax.jit\ndef log_sigmoid(x: ArrayLike) -> Array:\n r""""""Log-sigmoid activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{log\_sigmoid}(x) = \log(\mathrm{sigmoid}(x)) = -\log(1 + e^{-x})\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n numpy_util.check_arraylike(""log_sigmoid"", x)\n x_arr = jnp.asarray(x)\n return -softplus(-x_arr)\n\n@jax.jit\ndef elu(x: ArrayLike, alpha: ArrayLike = 1.0) -> Array:\n r""""""Exponential linear unit activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{elu}(x) = \begin{cases}\n x, & x > 0\\\n \alpha \left(\exp(x) - 1\right), & x \le 0\n \end{cases}\n\n Args:\n x : input array\n alpha : scalar or array of alpha values (default: 1.0)\n\n Returns:\n An array.\n\n See also:\n :func:`selu`\n """"""\n numpy_util.check_arraylike(""elu"", x)\n x_arr = jnp.asarray(x)\n return jnp.where(x_arr > 0,\n x_arr,\n alpha * jnp.expm1(jnp.where(x_arr > 0, 0., x_arr)))\n\n@jax.jit\ndef leaky_relu(x: ArrayLike, negative_slope: ArrayLike = 1e-2) -> Array:\n r""""""Leaky rectified linear unit activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{leaky\_relu}(x) = \begin{cases}\n x, & x \ge 0\\\n \alpha x, & x < 0\n \end{cases}\n\n where :math:`\alpha` = :code:`negative_slope`.\n\n Args:\n x : input array\n negative_slope : array or scalar specifying the negative slope (default: 0.01)\n\n Returns:\n An array.\n\n See also:\n :func:`relu`\n """"""\n numpy_util.check_arraylike(""leaky_relu"", x)\n x_arr = jnp.asarray(x)\n return jnp.where(x_arr >= 0, x_arr, negative_slope * x_arr)\n\n@jax.jit\ndef hard_tanh(x: ArrayLike) -> Array:\n r""""""Hard :math:`\mathrm{tanh}` activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{hard\_tanh}(x) = \begin{cases}\n -1, & x < -1\\\n x, & -1 \le x \le 1\\\n 1, & 1 < x\n \end{cases}\n\n Args:\n x : input array\n\n Returns:\n An array.\n """"""\n numpy_util.check_arraylike(""hard_tanh"", x)\n x_arr = jnp.asarray(x)\n return jnp.where(x_arr > 1, 1, jnp.where(x_arr < -1, -1, x_arr))\n\n@jax.jit\ndef celu(x: ArrayLike, alpha: ArrayLike = 1.0) -> Array:\n r""""""Continuously-differentiable exponential linear unit activation.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{celu}(x) = \begin{cases}\n x, & x > 0\\\n \alpha \left(\exp(\frac{x}{\alpha}) - 1\right), & x \le 0\n \end{cases}\n\n For more information, see\n `Continuously Differentiable Exponential Linear Units\n `_.\n\n Args:\n x : input array\n alpha : array or scalar (default: 1.0)\n\n Returns:\n An array.\n """"""\n return jnp.maximum(x, 0.0) + alpha * jnp.expm1(jnp.minimum(x, 0.0) / alpha)\n\n@jax.jit\ndef selu(x: ArrayLike) -> Array:\n r""""""Scaled exponential linear unit activation.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{selu}(x) = \lambda \begin{cases}\n x, & x > 0\\\n \alpha e^x - \alpha, & x \le 0\n \end{cases}\n\n where :math:`\lambda = 1.0507009873554804934193349852946` and\n :math:`\alpha = 1.6732632423543772848170429916717`.\n\n For more information, see\n `Self-Normalizing Neural Networks\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`elu`\n """"""\n alpha = 1.6732632423543772848170429916717\n scale = 1.0507009873554804934193349852946\n return scale * elu(x, alpha)\n\n# TODO(phawkins): this jit was found to change numerics in a test. Debug this.\n# @partial(jax.jit, static_argnames=(""approximate"",))\ndef gelu(x: ArrayLike, approximate: bool = True) -> Array:\n r""""""Gaussian error linear unit activation function.\n\n If ``approximate=False``, computes the element-wise function:\n\n .. math::\n \mathrm{gelu}(x) = \frac{x}{2} \left(\mathrm{erfc} \left(\n \frac{-x}{\sqrt{2}} \right) \right)\n\n If ``approximate=True``, uses the approximate formulation of GELU:\n\n .. math::\n \mathrm{gelu}(x) = \frac{x}{2} \left(1 + \mathrm{tanh} \left(\n \sqrt{\frac{2}{\pi}} \left(x + 0.044715 x^3 \right) \right) \right)\n\n For more information, see `Gaussian Error Linear Units (GELUs)\n `_, section 2.\n\n Args:\n x: input array\n approximate: whether to use the approximate or exact formulation.\n """"""\n [x_arr] = numpy_util.promote_args_inexact(""gelu"", x)\n\n if approximate:\n sqrt_2_over_pi = np.sqrt(2 / np.pi).astype(x_arr.dtype)\n cdf = 0.5 * (1.0 + jnp.tanh(sqrt_2_over_pi * (x_arr + 0.044715 * (x_arr ** 3))))\n return x_arr * cdf\n else:\n sqrt_half = np.sqrt(0.5).astype(x_arr.dtype)\n return jnp.array(\n 0.5 * x_arr * (lax.erfc(-x_arr * sqrt_half)), dtype=x_arr.dtype\n )\n\n@partial(jax.jit, static_argnames=(""axis"",))\ndef glu(x: ArrayLike, axis: int = -1) -> Array:\n r""""""Gated linear unit activation function.\n\n Computes the function:\n\n .. math::\n \mathrm{glu}(x) = x\left[\ldots, 0:\frac{n}{2}, \ldots\right] \cdot\n \mathrm{sigmoid} \left( x\left[\ldots, \frac{n}{2}:n, \ldots\right]\n \right)\n\n where the array is split into two along ``axis``. The size of the ``axis``\n dimension must be divisible by two.\n\n Args:\n x : input array\n axis: the axis along which the split should be computed (default: -1)\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n numpy_util.check_arraylike(""glu"", x)\n x_arr = jnp.asarray(x)\n size = x_arr.shape[axis]\n assert size % 2 == 0, ""axis size must be divisible by 2""\n x1, x2 = jnp.split(x_arr, 2, axis)\n return x1 * sigmoid(x2)\n\n# other functions\n\nlogsumexp = _logsumexp\n\n\n@partial(jax.jit, static_argnames=(""axis"",))\ndef log_softmax(x: ArrayLike,\n axis: int | tuple[int, ...] | None = -1,\n where: ArrayLike | None = None) -> Array:\n r""""""Log-Softmax function.\n\n Computes the logarithm of the :code:`softmax` function, which rescales\n elements to the range :math:`[-\infty, 0)`.\n\n .. math ::\n \mathrm{log\_softmax}(x)_i = \log \left( \frac{\exp(x_i)}{\sum_j \exp(x_j)}\n \right)\n\n Args:\n x : input array\n axis: the axis or axes along which the :code:`log_softmax` should be\n computed. Either an integer or a tuple of integers.\n where: Elements to include in the :code:`log_softmax`.\n\n Returns:\n An array.\n\n Note:\n If any input values are ``+inf``, the result will be all ``NaN``: this reflects the\n fact that ``inf / inf`` is not well-defined in the context of floating-point math.\n\n See also:\n :func:`softmax`\n """"""\n numpy_util.check_arraylike(""log_softmax"", x)\n x_arr = jnp.asarray(x)\n x_max = jnp.max(x_arr, axis, where=where, initial=-jnp.inf, keepdims=True)\n x_safe = x_arr if where is None else jnp.where(where, x_arr, -jnp.inf)\n shifted = x_safe - lax.stop_gradient(x_max)\n shifted_logsumexp = jnp.log(\n jnp.sum(jnp.exp(shifted), axis, where=where, keepdims=True))\n result = shifted - shifted_logsumexp\n if where is not None:\n return jnp.where(where, result, -jnp.inf)\n return result\n\n\n# TODO(phawkins): this jit was found to change numerics in a test. Debug this.\n# @partial(jax.jit, static_argnames=(""axis"",))\ndef softmax(x: ArrayLike,\n axis: int | tuple[int, ...] | None = -1,\n where: ArrayLike | None = None) -> Array:\n r""""""Softmax function.\n\n Computes the function which rescales elements to the range :math:`[0, 1]`\n such that the elements along :code:`axis` sum to :math:`1`.\n\n .. math ::\n \mathrm{softmax}(x) = \frac{\exp(x_i)}{\sum_j \exp(x_j)}\n\n Args:\n x : input array\n axis: the axis or axes along which the softmax should be computed. The\n softmax output summed across these dimensions should sum to :math:`1`.\n Either an integer or a tuple of integers.\n where: Elements to include in the :code:`softmax`.\n\n Returns:\n An array.\n\n Note:\n If any input values are ``+inf``, the result will be all ``NaN``: this reflects the\n fact that ``inf / inf`` is not well-defined in the context of floating-point math.\n\n See also:\n :func:`log_softmax`\n """"""\n if config.softmax_custom_jvp.value:\n # mypy is confused by the `functools.partial` application in the definition\n # of `_softmax` and incorrectly concludes that `_softmax` returns\n # `ReturnValue` -- the unsubstituted type parameter of `custom_jvp`.\n return _softmax(x, axis, where)\n else:\n return _softmax_deprecated(x, axis, where)\n\n# TODO(mattjj): replace softmax with _softmax when deprecation flag is removed\n@partial(jax.custom_jvp, nondiff_argnums=(1,))\ndef _softmax(\n x: ArrayLike,\n axis: int | tuple[int, ...] | None = -1,\n where: ArrayLike | None = None,\n initial: ArrayLike | None = -jnp.inf) -> Array:\n x_max = jnp.max(x, axis, where=where, initial=initial, keepdims=True)\n x_safe = x if where is None else jnp.where(where, x, initial)\n unnormalized = jnp.exp(x_safe - x_max)\n result = unnormalized / jnp.sum(unnormalized, axis, where=where, keepdims=True)\n if where is not None:\n result = jnp.where(where, result, 0)\n return result\n\n@_softmax.defjvp\ndef _softmax_jvp(axis, primals, tangents):\n (x, where, initial), (x_dot, _, _) = primals, tangents\n y = _softmax(x, axis, where, initial)\n return y, y * (x_dot - (y * x_dot).sum(axis, where=where, keepdims=True))\n\ndef _softmax_deprecated(\n x: ArrayLike,\n axis: int | tuple[int, ...] | None = -1,\n where: ArrayLike | None = None,\n initial: ArrayLike | None = -jnp.inf) -> Array:\n x_max = jnp.max(x, axis, where=where, initial=initial, keepdims=True)\n x_safe = x if where is None else jnp.where(where, x, initial)\n unnormalized = jnp.exp(x_safe - lax.stop_gradient(x_max))\n result = unnormalized / jnp.sum(unnormalized, axis, where=where, keepdims=True)\n if where is not None:\n result = jnp.where(where, result, 0)\n return result\n\n\n@partial(jax.jit, static_argnames=(""axis"",))\ndef standardize(x: ArrayLike,\n axis: int | tuple[int, ...] | None = -1,\n mean: ArrayLike | None = None,\n variance: ArrayLike | None = None,\n epsilon: ArrayLike = 1e-5,\n where: ArrayLike | None = None) -> Array:\n r""""""Standardizes input to zero mean and unit variance.\n\n The standardization is given by:\n\n .. math::\n\n x_{std} = \frac{x - \langle x\rangle}{\sqrt{\langle(x - \langle x\rangle)^2\rangle + \epsilon}}\n\n where :math:`\langle x\rangle` indicates the mean of :math:`x`, and :math:`\epsilon` is\n a small correction factor introduced to avoid division by zero.\n\n Args:\n x: input array to be standardized.\n axis: integer or tuple of integers representing the axes along which\n to standardize. Defaults to the last axis (``-1``).\n mean: optionally specify the mean used for standardization. If not specified,\n then ``x.mean(axis, where=where)`` will be used.\n variance: optionally specify the variance used for standardization. If not\n specified, then ``x.var(axis, where=where)`` will be used.\n epsilon: correction factor added to variance to avoid division by zero; defaults\n to ``1E-5``.\n where: optional boolean mask specifying which elements to use when computing\n the mean and variance.\n\n Returns:\n An array of the same shape as ``x`` containing the standardized input.\n """"""\n numpy_util.check_arraylike(""standardize"", x)\n numpy_util.check_arraylike_or_none(""standardize"", mean, variance, where)\n if mean is None:\n mean = jnp.mean(x, axis, keepdims=True, where=where)\n if variance is None:\n # this definition is traditionally seen as less accurate than jnp.var's\n # mean((x - mean(x))**2) but may be faster and even, given typical\n # activation distributions and low-precision arithmetic, more accurate\n # when used in neural network normalization layers\n variance = jnp.mean(\n jnp.square(x), axis, keepdims=True, where=where) - jnp.square(mean)\n return jnp.subtract(x, jnp.asarray(mean)) * lax.rsqrt(jnp.asarray(variance) + epsilon)\n\n# TODO(slebedev): Change the type of `x` to `ArrayLike`.\n@partial(jax.jit, static_argnames=(""num_classes"", ""dtype"", ""axis""))\ndef _one_hot(x: Array, num_classes: int, *,\n dtype: Any, axis: int | AxisName) -> Array:\n num_classes = core.concrete_dim_or_error(\n num_classes,\n ""The error arose in jax.nn.one_hot argument `num_classes`."")\n dtype = dtypes.canonicalize_dtype(dtype)\n try:\n output_pos_axis = util.canonicalize_axis(axis, x.ndim + 1) # type: ignore[arg-type]\n except TypeError:\n axis_size = lax.axis_size(axis)\n if num_classes != axis_size:\n raise ValueError(f""Expected num_classes to match the size of axis {axis}, ""\n f""but {num_classes} != {axis_size}"") from None\n axis_idx = lax.axis_index(axis)\n return jnp.asarray(_dot_product_attention_xla == axis_idx, dtype=dtype)\n axis = operator.index(axis) # type: ignore[arg-type]\n lhs = lax.expand_dims(x, (axis,))\n rhs_shape = [1] * x.ndim\n rhs_shape.insert(output_pos_axis, num_classes)\n # TODO(yashkatariya): Maybe expose `out_sharding` on `one_hot` too?\n rhs_sharding = NamedSharding(x.aval.sharding.mesh, P(*[None] * len(rhs_shape))) # pytype: disable=attribute-error\n rhs = lax.broadcasted_iota(x.dtype, rhs_shape, output_pos_axis,\n out_sharding=rhs_sharding)\n return (lhs == rhs).astype(dtype)\n\n# TODO(slebedev): Change the type of `x` to `ArrayLike`.\ndef one_hot(x: Any, num_classes: int, *,\n dtype: Any = jnp.float_, axis: int | AxisName = -1) -> Array:\n """"""One-hot encodes the given indices.\n\n Each index in the input ``x`` is encoded as a vector of zeros of length\n ``num_classes`` with the element at ``index`` set to one::\n\n >>> jax.nn.one_hot(jnp.array([0, 1, 2]), 3)\n Array([[1., 0., 0.],\n [0., 1., 0.],\n [0., 0., 1.]], dtype=float32)\n\n Indices outside the range [0, num_classes) will be encoded as zeros::\n\n >>> jax.nn.one_hot(jnp.array([-1, 3]), 3)\n Array([[0., 0., 0.],\n [0., 0., 0.]], dtype=float32)\n\n Args:\n x: A tensor of indices.\n num_classes: Number of classes in the one-hot dimension.\n dtype: optional, a float dtype for the returned values (default :obj:`jnp.float_`).\n axis: the axis or axes along which the function should be\n computed.\n """"""\n num_classes = core.concrete_dim_or_error(\n num_classes,\n ""The error arose in jax.nn.one_hot argument `num_classes`."")\n x_arr = jnp.asarray(x)\n if not jnp.isdtype(x_arr.dtype, ""integral""):\n # Deprecated 2024-12-18\n deprecations.warn(\n 'jax-nn-one-hot-float-input',\n f""jax.nn.one_hot input should be integer-typed; got dtype={x_arr.dtype}"",\n stacklevel=1)\n return _one_hot(x_arr, num_classes, dtype=dtype, axis=axis)\n\n\n@jax.custom_jvp\n@jax.jit\ndef relu6(x: ArrayLike) -> Array:\n r""""""Rectified Linear Unit 6 activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{relu6}(x) = \min(\max(x, 0), 6)\n\n except under differentiation, we take:\n\n .. math::\n \nabla \mathrm{relu}(0) = 0\n\n and\n\n .. math::\n \nabla \mathrm{relu}(6) = 0\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`relu`\n """"""\n return jnp.minimum(jnp.maximum(x, 0), 6.)\nrelu6.defjvps(lambda g, ans, x:\n lax.select((x > 0) & (x < 6), g, lax.full_like(g, 0)))\n\n@jax.jit\ndef hard_sigmoid(x: ArrayLike) -> Array:\n r""""""Hard Sigmoid activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{hard\_sigmoid}(x) = \frac{\mathrm{relu6}(x + 3)}{6}\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`relu6`\n """"""\n return relu6(x + 3.) / 6.\n\n@jax.jit\ndef hard_silu(x: ArrayLike) -> Array:\n r""""""Hard SiLU (swish) activation function\n\n Computes the element-wise function\n\n .. math::\n \mathrm{hard\_silu}(x) = x \cdot \mathrm{hard\_sigmoid}(x)\n\n Both :func:`hard_silu` and :func:`hard_swish` are aliases for the same\n function.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`hard_sigmoid`\n """"""\n numpy_util.check_arraylike(""hard_silu"", x)\n x_arr = jnp.asarray(x)\n return x_arr * hard_sigmoid(x_arr)\n\nhard_swish = hard_silu\n\ndef _get_large_negative(dtype):\n dtype_max = jnp.finfo(dtype).max\n return jnp.asarray(-0.7 * dtype_max, dtype=dtype)\n\ndef _get_causal_mask(T, S):\n mask = jnp.tril(jnp.ones((T, S), dtype=jnp.bool_))\n return mask[None, None, :, :]\n\ndef _get_window_mask(T: int, S: int, local_window_size: tuple[int, int]):\n query_pos = jnp.array(range(T))\n key_pos = jnp.array(range(S))\n left_window, right_window = local_window_size\n left_mask = query_pos[..., None] <= key_pos[..., None, :] + left_window\n right_mask = query_pos[..., None] >= key_pos[..., None, :] - right_window\n return jnp.logical_and(right_mask, left_mask)[None, None, :, :]\n\ndef _get_padding_mask_logits(T, S, q_seqlen, kv_seqlen):\n q_mask = True\n kv_mask = True\n if q_seqlen is not None:\n q_indices = jnp.arange(0, T)[None, :, None]\n q_mask = q_indices < q_seqlen[:, None, None]\n if kv_seqlen is not None:\n kv_indices = jnp.arange(0, S)[None, None, :]\n kv_mask = kv_indices < kv_seqlen[:, None, None]\n mask = jnp.logical_and(q_mask, kv_mask)\n return mask[:, None, :, :]\n\ndef _get_padding_mask_encoded(T, q_seqlen):\n q_indices = jnp.arange(0, T)[None, :]\n mask = q_indices < q_seqlen[:, None]\n return mask[:, :, None, None]\n\ndef _apply_masks(logits, mask, is_causal, q_seqlen, kv_seqlen,\n local_window_size):\n if mask is None and not is_causal and q_seqlen is None and kv_seqlen is None:\n return logits\n\n combined_mask = jnp.ones_like(logits, dtype=jnp.bool_)\n if mask is not None:\n assert mask.dtype == jnp.bool_\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n T, S = logits.shape[2], logits.shape[3]\n\n if is_causal:\n mask = _get_causal_mask(T, S)\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n if local_window_size is not None:\n mask = _get_window_mask(T, S, local_window_size)\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n if q_seqlen is not None or kv_seqlen is not None:\n mask = _get_padding_mask_logits(T, S, q_seqlen, kv_seqlen)\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n large_negative_number = _get_large_negative(logits.dtype)\n padded_logits = jnp.where(combined_mask, logits, large_negative_number)\n return padded_logits\n\ndef _dot_product_attention_core(query, key, value, bias, mask, is_causal,\n scale, q_seqlen, kv_seqlen, local_window_size):\n logits_dtype = jnp.promote_types(query.dtype, jnp.float32)\n\n # If the query and logits dtypes are different, then the default precision\n # can use inconsistent types in the backwards pass\n # (see https://github.com/jax-ml/jax/issues/24047).\n if query.dtype == jnp.bfloat16:\n precision = jax.lax.DotAlgorithmPreset.BF16_BF16_F32\n elif query.dtype == jnp.float16:\n precision = jax.lax.DotAlgorithmPreset.F16_F16_F32\n # TODO(sbodenstein): Implement this fix for all dtypes.\n else:\n precision = None\n\n # Explicit precision will fail on platforms that don't support it. For example,\n # some GPUs do not support BF16_BF16_F32, and TPU does not support F16_F16_F32.\n # Use the default precision as a fallback in these cases.\n try:\n logits = jnp.einsum(\n ""BTNH,BSNH->BNTS"",\n query,\n key,\n precision=precision,\n preferred_element_type=logits_dtype,\n )\n except: # pylint: disable=bare-except\n logits = jnp.einsum(\n ""BTNH,BSNH->BNTS"",\n query,\n key,\n precision=None,\n preferred_element_type=logits_dtype,\n )\n\n logits *= jnp.array(scale, dtype=logits.dtype)\n\n if bias is not None:\n logits = (logits + bias).astype(logits.dtype)\n\n padded_logits = _apply_masks(logits, mask, is_causal, q_seqlen, kv_seqlen,\n local_window_size)\n\n # Softmax and it is always carried out in fp32.\n padded_logits = padded_logits.astype(jnp.float32)\n probs = jax.nn.softmax(padded_logits, axis=-1).astype(key.dtype)\n\n encoded = jnp.einsum('BNTS,BSNH->BTNH', probs, value)\n if q_seqlen is not None and kv_seqlen is not None:\n mask = _get_padding_mask_encoded(encoded.shape[1], q_seqlen)\n encoded *= mask.astype(encoded.dtype)\n return encoded\n\ndef _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n\ndef bias_fwd_batch_rule(batched_args, batch_dims):\n x, query_head_num = batched_args\n a = batch_dims[0]\n output, _ = bias_fwd_rule(x, query_head_num)\n return output, a\ndef bias_bwd_batch_rule(batched_args, batch_dims):\n g, x, query_head_num = batched_args\n b = batch_dims[0]\n *Bs, _, _, _ = x.shape\n B = math.prod(Bs)\n x = jnp.reshape(x, (B,) + x.shape[-3:])\n output, = bias_bwd_rule(query_head_num, x, g)\n return output, b\nbatching.primitive_batchers[bias_fwd_p] = bias_fwd_batch_rule\nbatching.primitive_batchers[bias_bwd_p] = bias_bwd_batch_rule\n\ndef dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n\n def _check_shape_and_dtype(t: Array | None, shape: Sequence[int],\n dtype: DType | None, name: str) -> None:\n if t is None:\n return\n if t.ndim != len(shape):\n raise ValueError(f""{name} ndim should be {len(shape)}, but got {t.ndim}"")\n if dtype is not None and t.dtype != dtype:\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\n for i in range(t.ndim):\n if shape[i] != -1 and t.shape[i] != shape[i]:\n raise ValueError(f""{name} shape should be {shape}: but got {t.shape}"")\n\n B, S, K, H = key_arr.shape\n _check_shape_and_dtype(value_arr, [B, S, K, H], key_arr.dtype, 'value')\n _check_shape_and_dtype(query_arr, [B, -1, -1, H], key_arr.dtype, 'query')\n _check_shape_and_dtype(mask, [-1] * 4, jnp.bool_, 'mask')\n _check_shape_and_dtype(bias, [-1] * 4, None, 'bias')\n _check_shape_and_dtype(query_seq_lengths, [B], jnp.int32,\n 'query_seq_lengths')\n _check_shape_and_dtype(key_value_seq_lengths, [B], jnp.int32,\n 'key_value_seq_lengths')\n if query_arr.shape[-2] % K != 0:\n raise ValueError(f""The number of query heads must be a multiple of ""\n f""key/value heads, but got {query_arr.shape[-2]} vs {K}"")\n\n scale_val = (1.0 / np.sqrt(H)) if scale is None else scale\n\n match implementation:\n case 'xla':\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case 'cudnn':\n if bias is not None:\n bias = check_valid_bias_batch(bias, query_arr.shape[-2])\n bias = jnp.asarray(bias)\n use_padding = (\n query_seq_lengths is not None or key_value_seq_lengths is not None\n )\n if use_padding:\n if query_seq_lengths is None:\n T = query_arr.shape[1]\n query_seq_lengths = jnp.full((B,), T, dtype=jnp.int32)\n if key_value_seq_lengths is None:\n key_value_seq_lengths = jnp.full((B,), S, dtype=jnp.int32)\n\n mask_type = MaskType.NO_MASK\n if use_padding and is_causal:\n mask_type = MaskType.PADDING_CAUSAL\n elif is_causal:\n mask_type = MaskType.CAUSAL\n elif use_padding:\n mask_type = MaskType.PADDING\n # CuDNN supports only the left window with an exclusive boundary when\n # causal mask is enabled.\n sliding_window = None\n if local_window_size is not None:\n l_window, r_window = local_window_size\n if r_window == 0 or mask_type == MaskType.CAUSAL:\n sliding_window = l_window + 1\n else:\n raise ValueError(f""cuDNN doesn't support right window: {r_window} ""\n ""when causal mask is not used."")\n\n out = cudnn_dot_product_attention(\n query_arr, key_arr, value_arr, bias, mask, query_seq_lengths,\n key_value_seq_lengths, scale=scale_val, mask_type=mask_type,\n sliding_window_length=sliding_window,\n )\n case None:\n # TODO(kaixih@nvidia) Defaults to XLA for now. Will automatically select\n # best backend.\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case _:\n raise ValueError(f""Unsupported implementation option: {implementation}"")\n\n return jnp.reshape(out, output_shape)\n\ndef scaled_matmul(\n lhs: Array,\n rhs: Array,\n lhs_scales: Array,\n rhs_scales: Array,\n preferred_element_type: DTypeLike = jnp.float32,\n) -> Array:\n r""""""Scaled matrix multiplication function.\n\n Performs block-scaled matmul of `a` and `b` using `a_scales` and `b_scales`.\n The last dim is the contracting dim, and block size is inferred.\n\n Mathematically, this operation is equivalent to::\n\n a_block_size = a.shape[-1] // a_scales.shape[-1]\n b_block_size = b.shape[-1] // b_scales.shape[-1]\n a_scaled = a * jnp.repeat(a_scales, a_block_size, axis=-1)\n b_scaled = b * jnp.repeat(b_scales, b_block_size, axis=-1)\n jnp.einsum('BMK,BNK->BMN', a_scaled, b_scaled)\n\n Args:\n lhs (Array): Operand a, shape (B, M, K).\n rhs (Array): Operand b, shape (B, N, K).\n lhs_scales (Array): Shape (B, M, K_a), where `K % K_a == 0`.\n rhs_scales (Array): Shape (B, N, K_b), where `K % K_b == 0`.\n preferred_element_type (DTypeLike, optional): Defaults to `jnp.float32`.\n\n Returns:\n Array of shape (B, M, N).\n\n Notes:\n - We currently do not support user-defined `precision` for customizing the\n compute data type. It is fixed to `jnp.float32`.\n - Block size is inferred as `K // K_a` for `a` and `K // K_b` for `b`.\n - To use cuDNN with Nvidia Blackwell GPUs, inputs must match::\n\n # mxfp8\n a, b: jnp.float8_e4m3fn | jnp.float8_e5m2\n a_scales, b_scales: jnp.float8_e8m0fnu\n block_size: 32\n # nvfp4\n a, b: jnp.float4_e2m1fn\n a_scales, b_scales: jnp.float8_e4m3fn\n block_size: 16\n\n Examples:\n\n Basic case:\n\n >>> a = jnp.array([1, 2, 3]).reshape((1, 1, 3))\n >>> b = jnp.array([4, 5, 6]).reshape((1, 1, 3))\n >>> a_scales = jnp.array([0.5]).reshape((1, 1, 1))\n >>> b_scales = jnp.array([0.5]).reshape((1, 1, 1))\n >>> scaled_matmul(a, b, a_scales, b_scales) # doctest: +SKIP\n Array([[[8.]]], dtype=float32)\n\n Using fused cuDNN call on Blackwell GPUs:\n\n >>> dtype = jnp.float8_e4m3fn\n >>> a = jax.random.normal(jax.random.PRNGKey(1), (3, 128, 64), dtype=dtype)\n >>> b = jax.random.normal(jax.random.PRNGKey(2), (3, 128, 64), dtype=dtype)\n >>> a_scales = jnp.ones((3, 128, 4), dtype=jnp.float8_e8m0fnu)\n >>> b_scales = jnp.ones((3, 128, 4), dtype=jnp.float8_e8m0fnu)\n >>> scaled_matmul(a, b, a_scales, b_scales) # doctest: +SKIP\n """"""\n a, b, a_scales, b_scales = lhs, rhs, lhs_scales, rhs_scales\n if not all(x.ndim == 3 for x in (a, b, a_scales, b_scales)):\n raise ValueError(\n ""scaled_matmul requires all inputs to be 3-dimensional arrays""\n )\n\n B_a, M_a, K_a = a.shape\n B_b, N_b, K_b = b.shape\n if K_a != K_b or B_a != B_b:\n raise ValueError(\n ""scaled_matmul requires inputs a and b to have matching batch (B) ""\n f""and contract (K) dimensions, but got shapes {a.shape} and ""\n f""{b.shape}""\n )\n\n B_as, M_as, K_as = a_scales.shape\n B_bs, N_bs, K_bs = b_scales.shape\n if K_as != K_bs or B_as != B_bs:\n raise ValueError(\n ""scaled_matmul requires scales to have matching batch (B) and ""\n f""contract (K) dimensions, but got shapes {a_scales.shape} and ""\n f""{b_scales.shape}""\n )\n\n if M_as != M_a or N_bs != N_b:\n raise ValueError(\n ""scaled_matmul requires scales to match non-contract dimensions of ""\n f""inputs, but got shapes a: {a.shape}, b: {b.shape}, a_scales: ""\n f""{a_scales.shape}, b_scales: {b_scales.shape}""\n )\n\n preferred_element_type = dtypes.canonicalize_dtype(\n np.dtype(preferred_element_type)\n )\n out = cudnn_scaled_matmul(\n a,\n b,\n a_scales,\n b_scales,\n preferred_element_type=preferred_element_type,\n )\n return out\n\ndef get_scaled_dot_general_config(mode: Literal['nvfp4', 'mxfp8'],\n global_scale: Array | None = None):\n r""""""Get quantization configs for scaled_dot_general.\n\n Create quantization configs for the `jax.nn.scaled_dot_general`.\n\n See Also:\n - :func:`jax.nn.scaled_dot_general`: Scaled dot general function.\n """"""\n\n if mode == 'nvfp4':\n one = jnp.ones((1,), dtype=jnp.float32)\n return BlockScaleConfig(\n mode='nvfp4',\n block_size=16,\n data_type=jnp.float4_e2m1fn,\n scale_type=jnp.float8_e4m3fn,\n global_scale=one if global_scale is None else global_scale,\n infer_only=False\n )\n elif mode == 'mxfp8':\n return BlockScaleConfig(\n mode='mxfp8',\n block_size=32,\n data_type=jnp.float8_e4m3fn,\n scale_type=jnp.float8_e8m0fnu,\n global_scale=None,\n infer_only=False\n )\n else:\n raise ValueError(f""Unsupported mode: {mode}"")\n\ndef scaled_dot_general(\n lhs, rhs,\n dimension_numbers,\n preferred_element_type=jnp.float32,\n configs: List[BlockScaleConfig] | None = None,\n implementation: Literal['cudnn'] | None = None,\n ):\n r""""""Scaled dot general operation.\n\n Performs a generalized dot product with block-scaled quantization on the\n lhs and rhs inputs. This operation extends `lax.dot_general` to support\n user-defined scaling configurations.\n\n Essentially, the operation follows::\n\n a, a_scales = quantize(lhs, configs[0])\n b, b_scales = quantize(rhs, configs[1])\n c = jax.nn.scaled_matmul(a, b, a_scales, b_scales)\n\n Args:\n lhs (ArrayLike): Input array.\n rhs (ArrayLike): Input array.\n dimension_numbers (DotDimensionNumbers): A tuple of two tuples specifying\n the contraction and batch dimensions:\n `((lhs_contracting_dims, rhs_contracting_dims), (lhs_batch_dims, rhs_batch_dims))`.\n preferred_element_type (DTypeLike, optional): Output data type of the dot\n product. Defaults to `jnp.float32`. Other valid types include\n `jnp.bfloat16` and `jnp.float16`.\n configs (list of BlockScaleConfig, optional): Scaling configurations for\n lhs, rhs, and gradients. Users can obtain valid configurations via\n `jax.nn.get_scaled_dot_general_config`. Currently, `nvfp4` and `mxfp8`\n are supported. If `None`, falls back to `lax.dot_general`.\n implementation: str\n (Deprecated) Backend selector, now ignored. The system chooses the backend\n automatically. Scheduled for removal in future releases.\n\n Returns:\n Array: The resulting tensor, with batch dimensions first, followed by\n non-contracting/non-batch dimensions of lhs, and then those of rhs.\n\n See Also:\n - :func:`jax.nn.scaled_matmul`: Scaled matmul function.\n - :func:`jax.lax.dot_general`: General dot product operator.\n\n Notes:\n - Unlike `nn.scaled_matmul`, which assumes quantized low-precision\n inputs with explicit scaling factors, this operator takes high-precision\n inputs, applies quantization internally, and handles the backward pass.\n\n Examples:\n\n Creating config for mxfp8:\n\n >>> configs = [jax.nn.get_scaled_dot_general_config('mxfp8')] * 3\n\n Creating config for nvfp4:\n\n >>> global_scale = jnp.array([0.5], jnp.float32)\n >>> configs = [jax.nn.get_scaled_dot_general_config('nvfp4', global_scale)] * 3\n\n Using scaled_dot_general with the configs:\n\n >>> import functools\n >>> scaled_dot_general_fn = functools.partial(jax.nn.scaled_dot_general, configs=configs)\n >>> lhs = jax.random.normal(jax.random.PRNGKey(1), (3, 128, 64))\n >>> rhs = jax.random.normal(jax.random.PRNGKey(2), (3, 128, 64))\n >>> out = scaled_dot_general_fn(lhs, rhs, (((2,), (2,)), ((0,), (0,)))) # doctest: +SKIP\n """"""\n if implementation is not None:\n warnings.warn(""Backend selector, now ignored. The system chooses the ""\n ""backend automatically."", DeprecationWarning)\n\n if configs is None:\n return lax.dot_general(lhs, rhs, dimension_numbers,\n preferred_element_type=preferred_element_type)\n\n out = cudnn_scaled_dot_general(\n lhs, rhs, dimension_numbers,\n preferred_element_type=preferred_element_type,\n configs=configs\n )\n\n return out\n",python,tab +4901,15263132,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30613,0,"",python,selection_command +4902,15268613,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30704,0,"",python,selection_command +4903,15269038,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32291,0,"",python,selection_command +4904,15270891,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4905,15276272,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32307,0,"",python,selection_command +4906,15276421,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32616,0,"",python,selection_command +4907,15276950,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34566,0,"",python,selection_command +4908,15279342,"TERMINAL",0,0,"2025-07-27 13:33:18.758219: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4909,15280465,"TERMINAL",0,0,"2025-07-27 13:33:20.266871: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4910,15284317,"TERMINAL",0,0,"2025-07-27 13:33:23.779701: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4911,15284762,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 333, in attention_fn\r\n output_4d = jax.nn.dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 1204, in dot_product_attention\r\n out = cudnn_dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py"", line 1981, in dot_product_attention\r\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py"", line 332, in check_layout\r\n raise ValueError(\r\nValueError: Bias must have same seq length as QKV, got 1 and 1\r\n",,terminal_output +4912,15285746,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34584,0,"",python,selection_command +4913,15285798,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +4914,15286253,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34593,0,"",python,selection_command +4915,15286618,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35801,0,"",python,selection_command +4916,15293099,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +4917,15293099,"/fast/home/franz.srambical/jafar/utils/nn.py",4090,0,"",python,selection_command +4918,15293184,"/fast/home/franz.srambical/jafar/utils/nn.py",10515,645," bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n",python,content +4919,15295955,"/fast/home/franz.srambical/jafar/utils/nn.py",10657,0,"",python,selection_command +4920,15298877,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"# Copyright 2019 The JAX Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Shared neural network activations and other functions.""""""\n\nfrom __future__ import annotations\n\nfrom collections.abc import Sequence\nfrom functools import partial\nimport operator\nimport math\nimport numpy as np\nfrom typing import Any, List, Literal\nimport warnings\n\nimport jax\nimport jax.numpy as jnp\nfrom jax import custom_jvp\nfrom jax import lax\nfrom jax._src import config\nfrom jax._src import core\nfrom jax._src import deprecations\nfrom jax._src import dtypes\nfrom jax._src import util\nfrom jax._src.core import AxisName\nfrom jax._src.sharding_impls import NamedSharding, PartitionSpec as P\nfrom jax._src.cudnn.fused_attention_stablehlo import (\n dot_product_attention as cudnn_dot_product_attention, MaskType)\nfrom jax._src.cudnn.scaled_matmul_stablehlo import (\n scaled_matmul_wrapper as cudnn_scaled_matmul,\n scaled_dot_general_wrapper as cudnn_scaled_dot_general,\n BlockScaleConfig)\nfrom jax._src.interpreters import batching\nfrom jax._src.interpreters import mlir\nfrom jax._src.numpy import util as numpy_util\nfrom jax._src.typing import Array, ArrayLike, DType, DTypeLike\nfrom jax._src.ops.special import logsumexp as _logsumexp\n\n\n# activations\n@jax.jit\ndef identity(x: ArrayLike) -> Array:\n r""""""Identity activation function.\n\n Returns the argument unmodified.\n\n Args:\n x : input array\n\n Returns:\n The argument `x` unmodified.\n\n Examples:\n >>> jax.nn.identity(jax.numpy.array([-2., -1., -0.5, 0, 0.5, 1., 2.]))\n Array([-2. , -1. , -0.5, 0. , 0.5, 1. , 2. ], dtype=float32)\n\n """"""\n numpy_util.check_arraylike(""identity"", x)\n return jnp.asarray(x)\n\n@custom_jvp\n@jax.jit\ndef relu(x: ArrayLike) -> Array:\n r""""""Rectified linear unit activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{relu}(x) = \max(x, 0)\n\n except under differentiation, we take:\n\n .. math::\n \nabla \mathrm{relu}(0) = 0\n\n For more information see\n `Numerical influence of ReLU’(0) on backpropagation\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n Examples:\n >>> jax.nn.relu(jax.numpy.array([-2., -1., -0.5, 0, 0.5, 1., 2.]))\n Array([0. , 0. , 0. , 0. , 0.5, 1. , 2. ], dtype=float32)\n\n See also:\n :func:`relu6`\n\n """"""\n return jnp.maximum(x, 0)\n# For behavior at 0, see https://dl.acm.org/doi/10.5555/3540261.3540297\nrelu.defjvps(lambda g, ans, x: lax.select(x > 0, g, lax.full_like(g, 0)))\n\n@jax.jit\ndef squareplus(x: ArrayLike, b: ArrayLike = 4) -> Array:\n r""""""Squareplus activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{squareplus}(x) = \frac{x + \sqrt{x^2 + b}}{2}\n\n as described in https://arxiv.org/abs/2112.11687.\n\n Args:\n x : input array\n b : smoothness parameter\n """"""\n numpy_util.check_arraylike(""squareplus"", x)\n numpy_util.check_arraylike(""squareplus"", b)\n x = jnp.asarray(x)\n b = jnp.asarray(b)\n y = x + jnp.sqrt(jnp.square(x) + b)\n return y / 2\n\n@jax.jit\ndef softplus(x: ArrayLike) -> Array:\n r""""""Softplus activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{softplus}(x) = \log(1 + e^x)\n\n Args:\n x : input array\n """"""\n return jnp.logaddexp(x, 0)\n\n@jax.jit\ndef sparse_plus(x: ArrayLike) -> Array:\n r""""""Sparse plus function.\n\n Computes the function:\n\n .. math::\n\n \mathrm{sparse\_plus}(x) = \begin{cases}\n 0, & x \leq -1\\\n \frac{1}{4}(x+1)^2, & -1 < x < 1 \\\n x, & 1 \leq x\n \end{cases}\n\n This is the twin function of the softplus activation ensuring a zero output\n for inputs less than -1 and a linear output for inputs greater than 1,\n while remaining smooth, convex, monotonic by an adequate definition between\n -1 and 1.\n\n Args:\n x: input (float)\n """"""\n numpy_util.check_arraylike(""sparse_plus"", x)\n x = jnp.asarray(x)\n return jnp.where(x <= -1.0, 0.0, jnp.where(x >= 1.0, x, (x + 1.0)**2/4))\n\n@jax.jit\ndef soft_sign(x: ArrayLike) -> Array:\n r""""""Soft-sign activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{soft\_sign}(x) = \frac{x}{|x| + 1}\n\n Args:\n x : input array\n """"""\n numpy_util.check_arraylike(""soft_sign"", x)\n x_arr = jnp.asarray(x)\n return x_arr / (jnp.abs(x_arr) + 1)\n\n@partial(jax.jit, inline=True)\ndef sigmoid(x: ArrayLike) -> Array:\n r""""""Sigmoid activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{sigmoid}(x) = \frac{1}{1 + e^{-x}}\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`log_sigmoid`\n\n """"""\n return lax.logistic(x)\n\n@jax.jit\ndef sparse_sigmoid(x: ArrayLike) -> Array:\n r""""""Sparse sigmoid activation function.\n\n Computes the function:\n\n .. math::\n\n \mathrm{sparse\_sigmoid}(x) = \begin{cases}\n 0, & x \leq -1\\\n \frac{1}{2}(x+1), & -1 < x < 1 \\\n 1, & 1 \leq x\n \end{cases}\n\n This is the twin function of the ``sigmoid`` activation ensuring a zero output\n for inputs less than -1, a 1 output for inputs greater than 1, and a linear\n output for inputs between -1 and 1. It is the derivative of ``sparse_plus``.\n\n For more information, see `Learning with Fenchel-Young Losses (section 6.2)\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n return 0.5 * jnp.clip(x + 1.0, 0.0, 2.0)\n\n@jax.jit\ndef silu(x: ArrayLike) -> Array:\n r""""""SiLU (aka swish) activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{silu}(x) = x \cdot \mathrm{sigmoid}(x) = \frac{x}{1 + e^{-x}}\n\n :func:`swish` and :func:`silu` are both aliases for the same function.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n numpy_util.check_arraylike(""silu"", x)\n x_arr = jnp.asarray(x)\n return x_arr * sigmoid(x_arr)\n\nswish = silu\n\n@jax.jit\ndef mish(x: ArrayLike) -> Array:\n r""""""Mish activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{mish}(x) = x \cdot \mathrm{tanh}(\mathrm{softplus}(x))\n\n For more information, see\n `Mish: A Self Regularized Non-Monotonic Activation Function\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n """"""\n numpy_util.check_arraylike(""mish"", x)\n x_arr = jnp.asarray(x)\n return x_arr * jnp.tanh(softplus(x_arr))\n\n@jax.jit\ndef log_sigmoid(x: ArrayLike) -> Array:\n r""""""Log-sigmoid activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{log\_sigmoid}(x) = \log(\mathrm{sigmoid}(x)) = -\log(1 + e^{-x})\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n numpy_util.check_arraylike(""log_sigmoid"", x)\n x_arr = jnp.asarray(x)\n return -softplus(-x_arr)\n\n@jax.jit\ndef elu(x: ArrayLike, alpha: ArrayLike = 1.0) -> Array:\n r""""""Exponential linear unit activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{elu}(x) = \begin{cases}\n x, & x > 0\\\n \alpha \left(\exp(x) - 1\right), & x \le 0\n \end{cases}\n\n Args:\n x : input array\n alpha : scalar or array of alpha values (default: 1.0)\n\n Returns:\n An array.\n\n See also:\n :func:`selu`\n """"""\n numpy_util.check_arraylike(""elu"", x)\n x_arr = jnp.asarray(x)\n return jnp.where(x_arr > 0,\n x_arr,\n alpha * jnp.expm1(jnp.where(x_arr > 0, 0., x_arr)))\n\n@jax.jit\ndef leaky_relu(x: ArrayLike, negative_slope: ArrayLike = 1e-2) -> Array:\n r""""""Leaky rectified linear unit activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{leaky\_relu}(x) = \begin{cases}\n x, & x \ge 0\\\n \alpha x, & x < 0\n \end{cases}\n\n where :math:`\alpha` = :code:`negative_slope`.\n\n Args:\n x : input array\n negative_slope : array or scalar specifying the negative slope (default: 0.01)\n\n Returns:\n An array.\n\n See also:\n :func:`relu`\n """"""\n numpy_util.check_arraylike(""leaky_relu"", x)\n x_arr = jnp.asarray(x)\n return jnp.where(x_arr >= 0, x_arr, negative_slope * x_arr)\n\n@jax.jit\ndef hard_tanh(x: ArrayLike) -> Array:\n r""""""Hard :math:`\mathrm{tanh}` activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{hard\_tanh}(x) = \begin{cases}\n -1, & x < -1\\\n x, & -1 \le x \le 1\\\n 1, & 1 < x\n \end{cases}\n\n Args:\n x : input array\n\n Returns:\n An array.\n """"""\n numpy_util.check_arraylike(""hard_tanh"", x)\n x_arr = jnp.asarray(x)\n return jnp.where(x_arr > 1, 1, jnp.where(x_arr < -1, -1, x_arr))\n\n@jax.jit\ndef celu(x: ArrayLike, alpha: ArrayLike = 1.0) -> Array:\n r""""""Continuously-differentiable exponential linear unit activation.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{celu}(x) = \begin{cases}\n x, & x > 0\\\n \alpha \left(\exp(\frac{x}{\alpha}) - 1\right), & x \le 0\n \end{cases}\n\n For more information, see\n `Continuously Differentiable Exponential Linear Units\n `_.\n\n Args:\n x : input array\n alpha : array or scalar (default: 1.0)\n\n Returns:\n An array.\n """"""\n return jnp.maximum(x, 0.0) + alpha * jnp.expm1(jnp.minimum(x, 0.0) / alpha)\n\n@jax.jit\ndef selu(x: ArrayLike) -> Array:\n r""""""Scaled exponential linear unit activation.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{selu}(x) = \lambda \begin{cases}\n x, & x > 0\\\n \alpha e^x - \alpha, & x \le 0\n \end{cases}\n\n where :math:`\lambda = 1.0507009873554804934193349852946` and\n :math:`\alpha = 1.6732632423543772848170429916717`.\n\n For more information, see\n `Self-Normalizing Neural Networks\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`elu`\n """"""\n alpha = 1.6732632423543772848170429916717\n scale = 1.0507009873554804934193349852946\n return scale * elu(x, alpha)\n\n# TODO(phawkins): this jit was found to change numerics in a test. Debug this.\n# @partial(jax.jit, static_argnames=(""approximate"",))\ndef gelu(x: ArrayLike, approximate: bool = True) -> Array:\n r""""""Gaussian error linear unit activation function.\n\n If ``approximate=False``, computes the element-wise function:\n\n .. math::\n \mathrm{gelu}(x) = \frac{x}{2} \left(\mathrm{erfc} \left(\n \frac{-x}{\sqrt{2}} \right) \right)\n\n If ``approximate=True``, uses the approximate formulation of GELU:\n\n .. math::\n \mathrm{gelu}(x) = \frac{x}{2} \left(1 + \mathrm{tanh} \left(\n \sqrt{\frac{2}{\pi}} \left(x + 0.044715 x^3 \right) \right) \right)\n\n For more information, see `Gaussian Error Linear Units (GELUs)\n `_, section 2.\n\n Args:\n x: input array\n approximate: whether to use the approximate or exact formulation.\n """"""\n [x_arr] = numpy_util.promote_args_inexact(""gelu"", x)\n\n if approximate:\n sqrt_2_over_pi = np.sqrt(2 / np.pi).astype(x_arr.dtype)\n cdf = 0.5 * (1.0 + jnp.tanh(sqrt_2_over_pi * (x_arr + 0.044715 * (x_arr ** 3))))\n return x_arr * cdf\n else:\n sqrt_half = np.sqrt(0.5).astype(x_arr.dtype)\n return jnp.array(\n 0.5 * x_arr * (lax.erfc(-x_arr * sqrt_half)), dtype=x_arr.dtype\n )\n\n@partial(jax.jit, static_argnames=(""axis"",))\ndef glu(x: ArrayLike, axis: int = -1) -> Array:\n r""""""Gated linear unit activation function.\n\n Computes the function:\n\n .. math::\n \mathrm{glu}(x) = x\left[\ldots, 0:\frac{n}{2}, \ldots\right] \cdot\n \mathrm{sigmoid} \left( x\left[\ldots, \frac{n}{2}:n, \ldots\right]\n \right)\n\n where the array is split into two along ``axis``. The size of the ``axis``\n dimension must be divisible by two.\n\n Args:\n x : input array\n axis: the axis along which the split should be computed (default: -1)\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n numpy_util.check_arraylike(""glu"", x)\n x_arr = jnp.asarray(x)\n size = x_arr.shape[axis]\n assert size % 2 == 0, ""axis size must be divisible by 2""\n x1, x2 = jnp.split(x_arr, 2, axis)\n return x1 * sigmoid(x2)\n\n# other functions\n\nlogsumexp = _logsumexp\n\n\n@partial(jax.jit, static_argnames=(""axis"",))\ndef log_softmax(x: ArrayLike,\n axis: int | tuple[int, ...] | None = -1,\n where: ArrayLike | None = None) -> Array:\n r""""""Log-Softmax function.\n\n Computes the logarithm of the :code:`softmax` function, which rescales\n elements to the range :math:`[-\infty, 0)`.\n\n .. math ::\n \mathrm{log\_softmax}(x)_i = \log \left( \frac{\exp(x_i)}{\sum_j \exp(x_j)}\n \right)\n\n Args:\n x : input array\n axis: the axis or axes along which the :code:`log_softmax` should be\n computed. Either an integer or a tuple of integers.\n where: Elements to include in the :code:`log_softmax`.\n\n Returns:\n An array.\n\n Note:\n If any input values are ``+inf``, the result will be all ``NaN``: this reflects the\n fact that ``inf / inf`` is not well-defined in the context of floating-point math.\n\n See also:\n :func:`softmax`\n """"""\n numpy_util.check_arraylike(""log_softmax"", x)\n x_arr = jnp.asarray(x)\n x_max = jnp.max(x_arr, axis, where=where, initial=-jnp.inf, keepdims=True)\n x_safe = x_arr if where is None else jnp.where(where, x_arr, -jnp.inf)\n shifted = x_safe - lax.stop_gradient(x_max)\n shifted_logsumexp = jnp.log(\n jnp.sum(jnp.exp(shifted), axis, where=where, keepdims=True))\n result = shifted - shifted_logsumexp\n if where is not None:\n return jnp.where(where, result, -jnp.inf)\n return result\n\n\n# TODO(phawkins): this jit was found to change numerics in a test. Debug this.\n# @partial(jax.jit, static_argnames=(""axis"",))\ndef softmax(x: ArrayLike,\n axis: int | tuple[int, ...] | None = -1,\n where: ArrayLike | None = None) -> Array:\n r""""""Softmax function.\n\n Computes the function which rescales elements to the range :math:`[0, 1]`\n such that the elements along :code:`axis` sum to :math:`1`.\n\n .. math ::\n \mathrm{softmax}(x) = \frac{\exp(x_i)}{\sum_j \exp(x_j)}\n\n Args:\n x : input array\n axis: the axis or axes along which the softmax should be computed. The\n softmax output summed across these dimensions should sum to :math:`1`.\n Either an integer or a tuple of integers.\n where: Elements to include in the :code:`softmax`.\n\n Returns:\n An array.\n\n Note:\n If any input values are ``+inf``, the result will be all ``NaN``: this reflects the\n fact that ``inf / inf`` is not well-defined in the context of floating-point math.\n\n See also:\n :func:`log_softmax`\n """"""\n if config.softmax_custom_jvp.value:\n # mypy is confused by the `functools.partial` application in the definition\n # of `_softmax` and incorrectly concludes that `_softmax` returns\n # `ReturnValue` -- the unsubstituted type parameter of `custom_jvp`.\n return _softmax(x, axis, where)\n else:\n return _softmax_deprecated(x, axis, where)\n\n# TODO(mattjj): replace softmax with _softmax when deprecation flag is removed\n@partial(jax.custom_jvp, nondiff_argnums=(1,))\ndef _softmax(\n x: ArrayLike,\n axis: int | tuple[int, ...] | None = -1,\n where: ArrayLike | None = None,\n initial: ArrayLike | None = -jnp.inf) -> Array:\n x_max = jnp.max(x, axis, where=where, initial=initial, keepdims=True)\n x_safe = x if where is None else jnp.where(where, x, initial)\n unnormalized = jnp.exp(x_safe - x_max)\n result = unnormalized / jnp.sum(unnormalized, axis, where=where, keepdims=True)\n if where is not None:\n result = jnp.where(where, result, 0)\n return result\n\n@_softmax.defjvp\ndef _softmax_jvp(axis, primals, tangents):\n (x, where, initial), (x_dot, _, _) = primals, tangents\n y = _softmax(x, axis, where, initial)\n return y, y * (x_dot - (y * x_dot).sum(axis, where=where, keepdims=True))\n\ndef _softmax_deprecated(\n x: ArrayLike,\n axis: int | tuple[int, ...] | None = -1,\n where: ArrayLike | None = None,\n initial: ArrayLike | None = -jnp.inf) -> Array:\n x_max = jnp.max(x, axis, where=where, initial=initial, keepdims=True)\n x_safe = x if where is None else jnp.where(where, x, initial)\n unnormalized = jnp.exp(x_safe - lax.stop_gradient(x_max))\n result = unnormalized / jnp.sum(unnormalized, axis, where=where, keepdims=True)\n if where is not None:\n result = jnp.where(where, result, 0)\n return result\n\n\n@partial(jax.jit, static_argnames=(""axis"",))\ndef standardize(x: ArrayLike,\n axis: int | tuple[int, ...] | None = -1,\n mean: ArrayLike | None = None,\n variance: ArrayLike | None = None,\n epsilon: ArrayLike = 1e-5,\n where: ArrayLike | None = None) -> Array:\n r""""""Standardizes input to zero mean and unit variance.\n\n The standardization is given by:\n\n .. math::\n\n x_{std} = \frac{x - \langle x\rangle}{\sqrt{\langle(x - \langle x\rangle)^2\rangle + \epsilon}}\n\n where :math:`\langle x\rangle` indicates the mean of :math:`x`, and :math:`\epsilon` is\n a small correction factor introduced to avoid division by zero.\n\n Args:\n x: input array to be standardized.\n axis: integer or tuple of integers representing the axes along which\n to standardize. Defaults to the last axis (``-1``).\n mean: optionally specify the mean used for standardization. If not specified,\n then ``x.mean(axis, where=where)`` will be used.\n variance: optionally specify the variance used for standardization. If not\n specified, then ``x.var(axis, where=where)`` will be used.\n epsilon: correction factor added to variance to avoid division by zero; defaults\n to ``1E-5``.\n where: optional boolean mask specifying which elements to use when computing\n the mean and variance.\n\n Returns:\n An array of the same shape as ``x`` containing the standardized input.\n """"""\n numpy_util.check_arraylike(""standardize"", x)\n numpy_util.check_arraylike_or_none(""standardize"", mean, variance, where)\n if mean is None:\n mean = jnp.mean(x, axis, keepdims=True, where=where)\n if variance is None:\n # this definition is traditionally seen as less accurate than jnp.var's\n # mean((x - mean(x))**2) but may be faster and even, given typical\n # activation distributions and low-precision arithmetic, more accurate\n # when used in neural network normalization layers\n variance = jnp.mean(\n jnp.square(x), axis, keepdims=True, where=where) - jnp.square(mean)\n return jnp.subtract(x, jnp.asarray(mean)) * lax.rsqrt(jnp.asarray(variance) + epsilon)\n\n# TODO(slebedev): Change the type of `x` to `ArrayLike`.\n@partial(jax.jit, static_argnames=(""num_classes"", ""dtype"", ""axis""))\ndef _one_hot(x: Array, num_classes: int, *,\n dtype: Any, axis: int | AxisName) -> Array:\n num_classes = core.concrete_dim_or_error(\n num_classes,\n ""The error arose in jax.nn.one_hot argument `num_classes`."")\n dtype = dtypes.canonicalize_dtype(dtype)\n try:\n output_pos_axis = util.canonicalize_axis(axis, x.ndim + 1) # type: ignore[arg-type]\n except TypeError:\n axis_size = lax.axis_size(axis)\n if num_classes != axis_size:\n raise ValueError(f""Expected num_classes to match the size of axis {axis}, ""\n f""but {num_classes} != {axis_size}"") from None\n axis_idx = lax.axis_index(axis)\n return jnp.asarray(_dot_product_attention_xla == axis_idx, dtype=dtype)\n axis = operator.index(axis) # type: ignore[arg-type]\n lhs = lax.expand_dims(x, (axis,))\n rhs_shape = [1] * x.ndim\n rhs_shape.insert(output_pos_axis, num_classes)\n # TODO(yashkatariya): Maybe expose `out_sharding` on `one_hot` too?\n rhs_sharding = NamedSharding(x.aval.sharding.mesh, P(*[None] * len(rhs_shape))) # pytype: disable=attribute-error\n rhs = lax.broadcasted_iota(x.dtype, rhs_shape, output_pos_axis,\n out_sharding=rhs_sharding)\n return (lhs == rhs).astype(dtype)\n\n# TODO(slebedev): Change the type of `x` to `ArrayLike`.\ndef one_hot(x: Any, num_classes: int, *,\n dtype: Any = jnp.float_, axis: int | AxisName = -1) -> Array:\n """"""One-hot encodes the given indices.\n\n Each index in the input ``x`` is encoded as a vector of zeros of length\n ``num_classes`` with the element at ``index`` set to one::\n\n >>> jax.nn.one_hot(jnp.array([0, 1, 2]), 3)\n Array([[1., 0., 0.],\n [0., 1., 0.],\n [0., 0., 1.]], dtype=float32)\n\n Indices outside the range [0, num_classes) will be encoded as zeros::\n\n >>> jax.nn.one_hot(jnp.array([-1, 3]), 3)\n Array([[0., 0., 0.],\n [0., 0., 0.]], dtype=float32)\n\n Args:\n x: A tensor of indices.\n num_classes: Number of classes in the one-hot dimension.\n dtype: optional, a float dtype for the returned values (default :obj:`jnp.float_`).\n axis: the axis or axes along which the function should be\n computed.\n """"""\n num_classes = core.concrete_dim_or_error(\n num_classes,\n ""The error arose in jax.nn.one_hot argument `num_classes`."")\n x_arr = jnp.asarray(x)\n if not jnp.isdtype(x_arr.dtype, ""integral""):\n # Deprecated 2024-12-18\n deprecations.warn(\n 'jax-nn-one-hot-float-input',\n f""jax.nn.one_hot input should be integer-typed; got dtype={x_arr.dtype}"",\n stacklevel=1)\n return _one_hot(x_arr, num_classes, dtype=dtype, axis=axis)\n\n\n@jax.custom_jvp\n@jax.jit\ndef relu6(x: ArrayLike) -> Array:\n r""""""Rectified Linear Unit 6 activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{relu6}(x) = \min(\max(x, 0), 6)\n\n except under differentiation, we take:\n\n .. math::\n \nabla \mathrm{relu}(0) = 0\n\n and\n\n .. math::\n \nabla \mathrm{relu}(6) = 0\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`relu`\n """"""\n return jnp.minimum(jnp.maximum(x, 0), 6.)\nrelu6.defjvps(lambda g, ans, x:\n lax.select((x > 0) & (x < 6), g, lax.full_like(g, 0)))\n\n@jax.jit\ndef hard_sigmoid(x: ArrayLike) -> Array:\n r""""""Hard Sigmoid activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{hard\_sigmoid}(x) = \frac{\mathrm{relu6}(x + 3)}{6}\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`relu6`\n """"""\n return relu6(x + 3.) / 6.\n\n@jax.jit\ndef hard_silu(x: ArrayLike) -> Array:\n r""""""Hard SiLU (swish) activation function\n\n Computes the element-wise function\n\n .. math::\n \mathrm{hard\_silu}(x) = x \cdot \mathrm{hard\_sigmoid}(x)\n\n Both :func:`hard_silu` and :func:`hard_swish` are aliases for the same\n function.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`hard_sigmoid`\n """"""\n numpy_util.check_arraylike(""hard_silu"", x)\n x_arr = jnp.asarray(x)\n return x_arr * hard_sigmoid(x_arr)\n\nhard_swish = hard_silu\n\ndef _get_large_negative(dtype):\n dtype_max = jnp.finfo(dtype).max\n return jnp.asarray(-0.7 * dtype_max, dtype=dtype)\n\ndef _get_causal_mask(T, S):\n mask = jnp.tril(jnp.ones((T, S), dtype=jnp.bool_))\n return mask[None, None, :, :]\n\ndef _get_window_mask(T: int, S: int, local_window_size: tuple[int, int]):\n query_pos = jnp.array(range(T))\n key_pos = jnp.array(range(S))\n left_window, right_window = local_window_size\n left_mask = query_pos[..., None] <= key_pos[..., None, :] + left_window\n right_mask = query_pos[..., None] >= key_pos[..., None, :] - right_window\n return jnp.logical_and(right_mask, left_mask)[None, None, :, :]\n\ndef _get_padding_mask_logits(T, S, q_seqlen, kv_seqlen):\n q_mask = True\n kv_mask = True\n if q_seqlen is not None:\n q_indices = jnp.arange(0, T)[None, :, None]\n q_mask = q_indices < q_seqlen[:, None, None]\n if kv_seqlen is not None:\n kv_indices = jnp.arange(0, S)[None, None, :]\n kv_mask = kv_indices < kv_seqlen[:, None, None]\n mask = jnp.logical_and(q_mask, kv_mask)\n return mask[:, None, :, :]\n\ndef _get_padding_mask_encoded(T, q_seqlen):\n q_indices = jnp.arange(0, T)[None, :]\n mask = q_indices < q_seqlen[:, None]\n return mask[:, :, None, None]\n\ndef _apply_masks(logits, mask, is_causal, q_seqlen, kv_seqlen,\n local_window_size):\n if mask is None and not is_causal and q_seqlen is None and kv_seqlen is None:\n return logits\n\n combined_mask = jnp.ones_like(logits, dtype=jnp.bool_)\n if mask is not None:\n assert mask.dtype == jnp.bool_\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n T, S = logits.shape[2], logits.shape[3]\n\n if is_causal:\n mask = _get_causal_mask(T, S)\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n if local_window_size is not None:\n mask = _get_window_mask(T, S, local_window_size)\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n if q_seqlen is not None or kv_seqlen is not None:\n mask = _get_padding_mask_logits(T, S, q_seqlen, kv_seqlen)\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n large_negative_number = _get_large_negative(logits.dtype)\n padded_logits = jnp.where(combined_mask, logits, large_negative_number)\n return padded_logits\n\ndef _dot_product_attention_core(query, key, value, bias, mask, is_causal,\n scale, q_seqlen, kv_seqlen, local_window_size):\n logits_dtype = jnp.promote_types(query.dtype, jnp.float32)\n\n # If the query and logits dtypes are different, then the default precision\n # can use inconsistent types in the backwards pass\n # (see https://github.com/jax-ml/jax/issues/24047).\n if query.dtype == jnp.bfloat16:\n precision = jax.lax.DotAlgorithmPreset.BF16_BF16_F32\n elif query.dtype == jnp.float16:\n precision = jax.lax.DotAlgorithmPreset.F16_F16_F32\n # TODO(sbodenstein): Implement this fix for all dtypes.\n else:\n precision = None\n\n # Explicit precision will fail on platforms that don't support it. For example,\n # some GPUs do not support BF16_BF16_F32, and TPU does not support F16_F16_F32.\n # Use the default precision as a fallback in these cases.\n try:\n logits = jnp.einsum(\n ""BTNH,BSNH->BNTS"",\n query,\n key,\n precision=precision,\n preferred_element_type=logits_dtype,\n )\n except: # pylint: disable=bare-except\n logits = jnp.einsum(\n ""BTNH,BSNH->BNTS"",\n query,\n key,\n precision=None,\n preferred_element_type=logits_dtype,\n )\n\n logits *= jnp.array(scale, dtype=logits.dtype)\n\n if bias is not None:\n logits = (logits + bias).astype(logits.dtype)\n\n padded_logits = _apply_masks(logits, mask, is_causal, q_seqlen, kv_seqlen,\n local_window_size)\n\n # Softmax and it is always carried out in fp32.\n padded_logits = padded_logits.astype(jnp.float32)\n probs = jax.nn.softmax(padded_logits, axis=-1).astype(key.dtype)\n\n encoded = jnp.einsum('BNTS,BSNH->BTNH', probs, value)\n if q_seqlen is not None and kv_seqlen is not None:\n mask = _get_padding_mask_encoded(encoded.shape[1], q_seqlen)\n encoded *= mask.astype(encoded.dtype)\n return encoded\n\ndef _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n\ndef bias_fwd_batch_rule(batched_args, batch_dims):\n x, query_head_num = batched_args\n a = batch_dims[0]\n output, _ = bias_fwd_rule(x, query_head_num)\n return output, a\ndef bias_bwd_batch_rule(batched_args, batch_dims):\n g, x, query_head_num = batched_args\n b = batch_dims[0]\n *Bs, _, _, _ = x.shape\n B = math.prod(Bs)\n x = jnp.reshape(x, (B,) + x.shape[-3:])\n output, = bias_bwd_rule(query_head_num, x, g)\n return output, b\nbatching.primitive_batchers[bias_fwd_p] = bias_fwd_batch_rule\nbatching.primitive_batchers[bias_bwd_p] = bias_bwd_batch_rule\n\ndef dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n\n def _check_shape_and_dtype(t: Array | None, shape: Sequence[int],\n dtype: DType | None, name: str) -> None:\n if t is None:\n return\n if t.ndim != len(shape):\n raise ValueError(f""{name} ndim should be {len(shape)}, but got {t.ndim}"")\n if dtype is not None and t.dtype != dtype:\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\n for i in range(t.ndim):\n if shape[i] != -1 and t.shape[i] != shape[i]:\n raise ValueError(f""{name} shape should be {shape}: but got {t.shape}"")\n\n B, S, K, H = key_arr.shape\n _check_shape_and_dtype(value_arr, [B, S, K, H], key_arr.dtype, 'value')\n _check_shape_and_dtype(query_arr, [B, -1, -1, H], key_arr.dtype, 'query')\n _check_shape_and_dtype(mask, [-1] * 4, jnp.bool_, 'mask')\n _check_shape_and_dtype(bias, [-1] * 4, None, 'bias')\n _check_shape_and_dtype(query_seq_lengths, [B], jnp.int32,\n 'query_seq_lengths')\n _check_shape_and_dtype(key_value_seq_lengths, [B], jnp.int32,\n 'key_value_seq_lengths')\n if query_arr.shape[-2] % K != 0:\n raise ValueError(f""The number of query heads must be a multiple of ""\n f""key/value heads, but got {query_arr.shape[-2]} vs {K}"")\n\n scale_val = (1.0 / np.sqrt(H)) if scale is None else scale\n\n match implementation:\n case 'xla':\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case 'cudnn':\n if bias is not None:\n bias = check_valid_bias_batch(bias, query_arr.shape[-2])\n bias = jnp.asarray(bias)\n use_padding = (\n query_seq_lengths is not None or key_value_seq_lengths is not None\n )\n if use_padding:\n if query_seq_lengths is None:\n T = query_arr.shape[1]\n query_seq_lengths = jnp.full((B,), T, dtype=jnp.int32)\n if key_value_seq_lengths is None:\n key_value_seq_lengths = jnp.full((B,), S, dtype=jnp.int32)\n\n mask_type = MaskType.NO_MASK\n if use_padding and is_causal:\n mask_type = MaskType.PADDING_CAUSAL\n elif is_causal:\n mask_type = MaskType.CAUSAL\n elif use_padding:\n mask_type = MaskType.PADDING\n # CuDNN supports only the left window with an exclusive boundary when\n # causal mask is enabled.\n sliding_window = None\n if local_window_size is not None:\n l_window, r_window = local_window_size\n if r_window == 0 or mask_type == MaskType.CAUSAL:\n sliding_window = l_window + 1\n else:\n raise ValueError(f""cuDNN doesn't support right window: {r_window} ""\n ""when causal mask is not used."")\n\n out = cudnn_dot_product_attention(\n query_arr, key_arr, value_arr, bias, mask, query_seq_lengths,\n key_value_seq_lengths, scale=scale_val, mask_type=mask_type,\n sliding_window_length=sliding_window,\n )\n case None:\n # TODO(kaixih@nvidia) Defaults to XLA for now. Will automatically select\n # best backend.\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case _:\n raise ValueError(f""Unsupported implementation option: {implementation}"")\n\n return jnp.reshape(out, output_shape)\n\ndef scaled_matmul(\n lhs: Array,\n rhs: Array,\n lhs_scales: Array,\n rhs_scales: Array,\n preferred_element_type: DTypeLike = jnp.float32,\n) -> Array:\n r""""""Scaled matrix multiplication function.\n\n Performs block-scaled matmul of `a` and `b` using `a_scales` and `b_scales`.\n The last dim is the contracting dim, and block size is inferred.\n\n Mathematically, this operation is equivalent to::\n\n a_block_size = a.shape[-1] // a_scales.shape[-1]\n b_block_size = b.shape[-1] // b_scales.shape[-1]\n a_scaled = a * jnp.repeat(a_scales, a_block_size, axis=-1)\n b_scaled = b * jnp.repeat(b_scales, b_block_size, axis=-1)\n jnp.einsum('BMK,BNK->BMN', a_scaled, b_scaled)\n\n Args:\n lhs (Array): Operand a, shape (B, M, K).\n rhs (Array): Operand b, shape (B, N, K).\n lhs_scales (Array): Shape (B, M, K_a), where `K % K_a == 0`.\n rhs_scales (Array): Shape (B, N, K_b), where `K % K_b == 0`.\n preferred_element_type (DTypeLike, optional): Defaults to `jnp.float32`.\n\n Returns:\n Array of shape (B, M, N).\n\n Notes:\n - We currently do not support user-defined `precision` for customizing the\n compute data type. It is fixed to `jnp.float32`.\n - Block size is inferred as `K // K_a` for `a` and `K // K_b` for `b`.\n - To use cuDNN with Nvidia Blackwell GPUs, inputs must match::\n\n # mxfp8\n a, b: jnp.float8_e4m3fn | jnp.float8_e5m2\n a_scales, b_scales: jnp.float8_e8m0fnu\n block_size: 32\n # nvfp4\n a, b: jnp.float4_e2m1fn\n a_scales, b_scales: jnp.float8_e4m3fn\n block_size: 16\n\n Examples:\n\n Basic case:\n\n >>> a = jnp.array([1, 2, 3]).reshape((1, 1, 3))\n >>> b = jnp.array([4, 5, 6]).reshape((1, 1, 3))\n >>> a_scales = jnp.array([0.5]).reshape((1, 1, 1))\n >>> b_scales = jnp.array([0.5]).reshape((1, 1, 1))\n >>> scaled_matmul(a, b, a_scales, b_scales) # doctest: +SKIP\n Array([[[8.]]], dtype=float32)\n\n Using fused cuDNN call on Blackwell GPUs:\n\n >>> dtype = jnp.float8_e4m3fn\n >>> a = jax.random.normal(jax.random.PRNGKey(1), (3, 128, 64), dtype=dtype)\n >>> b = jax.random.normal(jax.random.PRNGKey(2), (3, 128, 64), dtype=dtype)\n >>> a_scales = jnp.ones((3, 128, 4), dtype=jnp.float8_e8m0fnu)\n >>> b_scales = jnp.ones((3, 128, 4), dtype=jnp.float8_e8m0fnu)\n >>> scaled_matmul(a, b, a_scales, b_scales) # doctest: +SKIP\n """"""\n a, b, a_scales, b_scales = lhs, rhs, lhs_scales, rhs_scales\n if not all(x.ndim == 3 for x in (a, b, a_scales, b_scales)):\n raise ValueError(\n ""scaled_matmul requires all inputs to be 3-dimensional arrays""\n )\n\n B_a, M_a, K_a = a.shape\n B_b, N_b, K_b = b.shape\n if K_a != K_b or B_a != B_b:\n raise ValueError(\n ""scaled_matmul requires inputs a and b to have matching batch (B) ""\n f""and contract (K) dimensions, but got shapes {a.shape} and ""\n f""{b.shape}""\n )\n\n B_as, M_as, K_as = a_scales.shape\n B_bs, N_bs, K_bs = b_scales.shape\n if K_as != K_bs or B_as != B_bs:\n raise ValueError(\n ""scaled_matmul requires scales to have matching batch (B) and ""\n f""contract (K) dimensions, but got shapes {a_scales.shape} and ""\n f""{b_scales.shape}""\n )\n\n if M_as != M_a or N_bs != N_b:\n raise ValueError(\n ""scaled_matmul requires scales to match non-contract dimensions of ""\n f""inputs, but got shapes a: {a.shape}, b: {b.shape}, a_scales: ""\n f""{a_scales.shape}, b_scales: {b_scales.shape}""\n )\n\n preferred_element_type = dtypes.canonicalize_dtype(\n np.dtype(preferred_element_type)\n )\n out = cudnn_scaled_matmul(\n a,\n b,\n a_scales,\n b_scales,\n preferred_element_type=preferred_element_type,\n )\n return out\n\ndef get_scaled_dot_general_config(mode: Literal['nvfp4', 'mxfp8'],\n global_scale: Array | None = None):\n r""""""Get quantization configs for scaled_dot_general.\n\n Create quantization configs for the `jax.nn.scaled_dot_general`.\n\n See Also:\n - :func:`jax.nn.scaled_dot_general`: Scaled dot general function.\n """"""\n\n if mode == 'nvfp4':\n one = jnp.ones((1,), dtype=jnp.float32)\n return BlockScaleConfig(\n mode='nvfp4',\n block_size=16,\n data_type=jnp.float4_e2m1fn,\n scale_type=jnp.float8_e4m3fn,\n global_scale=one if global_scale is None else global_scale,\n infer_only=False\n )\n elif mode == 'mxfp8':\n return BlockScaleConfig(\n mode='mxfp8',\n block_size=32,\n data_type=jnp.float8_e4m3fn,\n scale_type=jnp.float8_e8m0fnu,\n global_scale=None,\n infer_only=False\n )\n else:\n raise ValueError(f""Unsupported mode: {mode}"")\n\ndef scaled_dot_general(\n lhs, rhs,\n dimension_numbers,\n preferred_element_type=jnp.float32,\n configs: List[BlockScaleConfig] | None = None,\n implementation: Literal['cudnn'] | None = None,\n ):\n r""""""Scaled dot general operation.\n\n Performs a generalized dot product with block-scaled quantization on the\n lhs and rhs inputs. This operation extends `lax.dot_general` to support\n user-defined scaling configurations.\n\n Essentially, the operation follows::\n\n a, a_scales = quantize(lhs, configs[0])\n b, b_scales = quantize(rhs, configs[1])\n c = jax.nn.scaled_matmul(a, b, a_scales, b_scales)\n\n Args:\n lhs (ArrayLike): Input array.\n rhs (ArrayLike): Input array.\n dimension_numbers (DotDimensionNumbers): A tuple of two tuples specifying\n the contraction and batch dimensions:\n `((lhs_contracting_dims, rhs_contracting_dims), (lhs_batch_dims, rhs_batch_dims))`.\n preferred_element_type (DTypeLike, optional): Output data type of the dot\n product. Defaults to `jnp.float32`. Other valid types include\n `jnp.bfloat16` and `jnp.float16`.\n configs (list of BlockScaleConfig, optional): Scaling configurations for\n lhs, rhs, and gradients. Users can obtain valid configurations via\n `jax.nn.get_scaled_dot_general_config`. Currently, `nvfp4` and `mxfp8`\n are supported. If `None`, falls back to `lax.dot_general`.\n implementation: str\n (Deprecated) Backend selector, now ignored. The system chooses the backend\n automatically. Scheduled for removal in future releases.\n\n Returns:\n Array: The resulting tensor, with batch dimensions first, followed by\n non-contracting/non-batch dimensions of lhs, and then those of rhs.\n\n See Also:\n - :func:`jax.nn.scaled_matmul`: Scaled matmul function.\n - :func:`jax.lax.dot_general`: General dot product operator.\n\n Notes:\n - Unlike `nn.scaled_matmul`, which assumes quantized low-precision\n inputs with explicit scaling factors, this operator takes high-precision\n inputs, applies quantization internally, and handles the backward pass.\n\n Examples:\n\n Creating config for mxfp8:\n\n >>> configs = [jax.nn.get_scaled_dot_general_config('mxfp8')] * 3\n\n Creating config for nvfp4:\n\n >>> global_scale = jnp.array([0.5], jnp.float32)\n >>> configs = [jax.nn.get_scaled_dot_general_config('nvfp4', global_scale)] * 3\n\n Using scaled_dot_general with the configs:\n\n >>> import functools\n >>> scaled_dot_general_fn = functools.partial(jax.nn.scaled_dot_general, configs=configs)\n >>> lhs = jax.random.normal(jax.random.PRNGKey(1), (3, 128, 64))\n >>> rhs = jax.random.normal(jax.random.PRNGKey(2), (3, 128, 64))\n >>> out = scaled_dot_general_fn(lhs, rhs, (((2,), (2,)), ((0,), (0,)))) # doctest: +SKIP\n """"""\n if implementation is not None:\n warnings.warn(""Backend selector, now ignored. The system chooses the ""\n ""backend automatically."", DeprecationWarning)\n\n if configs is None:\n return lax.dot_general(lhs, rhs, dimension_numbers,\n preferred_element_type=preferred_element_type)\n\n out = cudnn_scaled_dot_general(\n lhs, rhs, dimension_numbers,\n preferred_element_type=preferred_element_type,\n configs=configs\n )\n\n return out\n",python,tab +4921,15298877,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37837,0,"",python,selection_command +4922,15532470,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37843,0,"",python,selection_command +4923,15532566,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37847,0,"",python,selection_command +4924,15532964,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37849,0,"",python,selection_command +4925,15602613,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37837,0,"",python,selection_command +4926,15604896,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +4927,15604896,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77888,0,"",python,selection_command +4928,15607037,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +4929,15607037,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37837,0,"",python,selection_command +4930,15616526,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +4931,15616526,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77888,0,"",python,selection_command +4932,15619445,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +4933,15619445,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37837,0,"",python,selection_command +4934,15619940,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37843,0,"",python,selection_command +4935,15620134,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37847,0,"",python,selection_command +4936,15620271,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37849,0,"",python,selection_command +4937,15620836,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"# Copyright 2024 The JAX Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport enum\nimport functools\nimport json\nimport math\nfrom typing import TypedDict\n\nimport jax\nfrom jax import dtypes\nfrom jax._src import core\nfrom jax._src import dispatch\nfrom jax._src.custom_partitioning import custom_partitioning\nfrom jax._src.interpreters import batching\nfrom jax._src.interpreters import mlir\nfrom jax._src.lib import cuda_versions\nfrom jax._src import xla_bridge\nfrom jax._src.lib.mlir import ir\nfrom jax._src.lib.mlir.dialects import hlo\nimport jax.numpy as jnp\nfrom jax.sharding import NamedSharding, PartitionSpec\n\nArray = jnp.ndarray\n\nclass FP8Params(TypedDict):\n amax_dQ: float # Amax of gradient of query\n amax_dK: float # Amax of gradient of key\n amax_dV: float # Amax of gradient of value\n amax_dP: float # Amax of gradient of state\n descale_q: float # Descaling factor of query\n descale_k: float # Descaling factor of key\n descale_v: float # Descaling factor of value\n descale_s: float # Descaling factor of attention score\n scale_s: float # Scale factor for S tensor\n scale_o: float # Scale factor for output\n descale_o: float # Descale factor for output (bwd)\n descale_dO: float # Descale factor for output gradient (bwd)\n descale_dP: float # Descale factor for P gradient tensor (bwd)\n scale_dQ: float # Scale factor for query gradient (bwd)\n scale_dK: float # Scale factor for key gradient (bwd)\n scale_dV: float # Scale factor for value gradient (bwd)\n scale_dP: float # Scale factor for state gradient (bwd)\n\n\nclass AttentionLayout(enum.Enum):\n BTNH = 0\n BNTH = 1\n\n\nclass MaskType(enum.Enum):\n NO_MASK = 0\n PADDING = 1\n CAUSAL = 2\n PADDING_CAUSAL = 3\n ALIBI = 4\n\n\ndef convert_mask_type_to_string(mask_type: MaskType) -> str:\n if mask_type == MaskType.NO_MASK:\n return ""NO_MASK""\n elif mask_type == MaskType.PADDING:\n return ""PADDING""\n elif mask_type == MaskType.CAUSAL:\n return ""CAUSAL""\n elif mask_type == MaskType.PADDING_CAUSAL:\n return ""PADDING_CAUSAL""\n elif mask_type == MaskType.ALIBI:\n return ""ALIBI""\n else:\n raise ValueError(f""Unexpected mask type: {mask_type}"")\n\ndef has_padding(mask_type: MaskType) -> bool:\n return mask_type == MaskType.PADDING or mask_type == MaskType.PADDING_CAUSAL\n\ndef should_export_dbias(bias_shape, query_shape, layout) -> bool:\n b_B, b_N, _, _ = bias_shape\n if layout == AttentionLayout.BNTH.value:\n _, q_N, _, _ = query_shape\n else:\n _, _, q_N, _ = query_shape\n return b_B == 1 and b_N == q_N\n\ndef get_large_negative_number(dtype):\n # temp WAR as cuDNN has a bug for subtraction between two large negative value\n if dtype == jnp.bfloat16:\n return jnp.asarray(-2 << 40, dtype=dtype)\n elif dtype == jnp.float16:\n return jnp.asarray(-2 << 14, dtype=dtype)\n else:\n raise ValueError(""Unsupported dtype for inputs."")\n\ndef _normalize_layout(layout: str) -> AttentionLayout:\n layout_upper = layout.upper()\n if layout_upper in [""BSNH"", ""BNSH"", ""BTNH"", ""BNTH""]:\n return AttentionLayout[layout_upper.replace(""S"", ""T"")]\n else:\n raise ValueError(f""Unsupported qkv_layout: {layout}"")\n\ndef element_type_to_backend_config_type_mapping(dtype):\n _element_type_to_backend_config_type_mapping = {\n ir.BF16Type.get(): ""BF16"",\n ir.F16Type.get(): ""F16"",\n }\n return _element_type_to_backend_config_type_mapping[dtype]\n\ndef default_layouts(*shapes):\n return [range(len(shape) - 1, -1, -1) for shape in shapes]\n\ndef get_max_seg_per_batch(q_offsets):\n return q_offsets.shape[1] - 1 if len(q_offsets.shape) == 2 else 1\n\ndef check_is_paged_attention(page_table_k):\n return len(page_table_k.shape) == 4\n\ndef create_dot_product_attention_backend_config_base(\n batch, num_heads, seq_q, seq_kv, dtype, fmha_scale, mask_type, layout, is_bwd\n):\n # Q, K, V: query, key, value in shape of BT(S)NH or BNT(S)H\n # P: BMM1 output in shape of BNTS\n # O: BMM2 output in the same shape with Q\n # BMM1: Q @ K -> P\n # BMM2: P @ V -> O\n # BMM1Grad1: dP @ Q -> dK\n # BMM1Grad2: dP @ K -> dQ\n # BMM2Grad1: P @ dO -> dV\n # BMM2Grad2: dO @ V -> dP\n cudnn_fmha_backend_config = {\n ""algorithm"": {\n ""algo_id"": ""0"",\n ""math_type"": ""TENSOR_OP_MATH"",\n ""tuning_knobs"": {""17"": ""1"", ""24"": ""0""},\n ""is_cudnn_frontend"": True,\n ""workspace_size"": ""0"",\n },\n ""fmha_scale"": fmha_scale,\n ""intermediate_tensor_shape"": {\n ""element_type"": element_type_to_backend_config_type_mapping(dtype),\n ""dimensions"": [str(batch), str(num_heads), str(seq_q), str(seq_kv)],\n ""tuple_shapes"": [],\n ""layout"": {\n ""dim_level_types"": [],\n ""dim_unique"": [],\n ""dim_ordered"": [],\n ""minor_to_major"": [""3"", ""2"", ""1"", ""0""],\n ""tiles"": [],\n ""element_size_in_bits"": ""0"",\n ""memory_space"": ""0"",\n ""index_primitive_type"": ""PRIMITIVE_TYPE_INVALID"",\n ""pointer_primitive_type"": ""PRIMITIVE_TYPE_INVALID"",\n ""dynamic_shape_metadata_prefix_bytes"": ""0"",\n },\n ""is_dynamic_dimension"": [False, False, False, False],\n },\n ""is_flash_attention"": True,\n ""mask_type"": convert_mask_type_to_string(mask_type),\n }\n\n # We define the contracting and batch dims in the format of\n # ((lhs_contracting_dims, rhs_contracting_dims), (lhs_batch_dims,\n # rhs_batch_dims)).\n if layout == AttentionLayout.BNTH.value:\n dims = [\n ((3, 3), ((0, 1), (0, 1))), # BMM1: BNTH,BNSH->BNTS\n ((3, 2), ((0, 1), (0, 1))), # BMM2: BNTS,BNSH->BNTH\n ((2, 2), ((0, 1), (0, 1))), # BMM1_grad_1: BNTS,BNTH->BNSH\n ((3, 2), ((0, 1), (0, 1))), # BMM1_grad_2: BNTS,BNSH->BNTH\n ((2, 2), ((0, 1), (0, 1))), # BMM2_grad_1: BNTS,BNTH->BNSH\n ((3, 3), ((0, 1), (0, 1))), # BMM2_grad_2: BNTH,BNSH->BNTS\n ]\n else:\n dims = [\n ((3, 3), ((0, 2), (0, 2))), # BMM1: BTNH,BSNH->BNTS\n ((3, 1), ((0, 1), (0, 2))), # BMM2: BNTS,BSNH->BTNH\n ((2, 1), ((0, 1), (0, 2))), # BMM1_grad_1: BNTS,BTNH->BSNH\n ((3, 1), ((0, 1), (0, 2))), # BMM1_grad_2: BNTS,BSNH->BTNH\n ((2, 1), ((0, 1), (0, 2))), # BMM2_grad_1: BNTS,BTNH->BSNH\n ((3, 3), ((0, 2), (0, 2))), # BMM2_grad_2: BTNH,BSNH->BNTS\n ]\n keys = [\n ""bmm1_dot_dimension_numbers"",\n ""bmm2_dot_dimension_numbers"",\n ""bmm1_grad_gemm1_dot_dimension_numbers"",\n ""bmm1_grad_gemm2_dot_dimension_numbers"",\n ""bmm2_grad_gemm1_dot_dimension_numbers"",\n ""bmm2_grad_gemm2_dot_dimension_numbers"",\n ]\n fwd_dot_number = {}\n bwd_dot_number = {}\n for idx, (key, ((lc, rc), (lb, rb))) in enumerate(zip(keys, dims)):\n dims_to_write = fwd_dot_number if idx < 2 else bwd_dot_number\n dims_to_write[key] = {\n ""lhs_contracting_dimensions"": [str(lc)],\n ""rhs_contracting_dimensions"": [str(rc)],\n ""lhs_batch_dimensions"": [str(i) for i in lb],\n ""rhs_batch_dimensions"": [str(i) for i in rb],\n }\n\n if is_bwd:\n cudnn_fmha_backend_config = {**cudnn_fmha_backend_config, **bwd_dot_number}\n else:\n cudnn_fmha_backend_config = {**cudnn_fmha_backend_config, **fwd_dot_number}\n backend_config = {\n ""operation_queue_id"":""0"",\n ""wait_on_operation_queues"":[],\n ""cudnn_fmha_backend_config"": cudnn_fmha_backend_config\n }\n return backend_config\n\ndef create_dot_product_attention_backend_config(\n batch,\n num_heads,\n seq_q,\n seq_kv,\n dtype,\n fmha_scale,\n seed,\n dropout_rate,\n mask_type,\n layout,\n sliding_window_length,\n max_seg_per_batch,\n is_paged_attention,\n is_bwd\n):\n backend_config = create_dot_product_attention_backend_config_base(\n batch, num_heads, seq_q, seq_kv, dtype,\n fmha_scale, mask_type, layout, is_bwd\n )\n if sliding_window_length is None:\n sliding_window_length = 0\n backend_config['cudnn_fmha_backend_config'][""dropout_rate""] = dropout_rate\n backend_config['cudnn_fmha_backend_config'][""seed""] = seed\n backend_config['cudnn_fmha_backend_config'][""sliding_window_length""] = sliding_window_length\n backend_config['cudnn_fmha_backend_config'][""max_seg_per_batch""] = max_seg_per_batch\n backend_config['cudnn_fmha_backend_config'][""is_paged_attention""] = is_paged_attention\n return json.dumps(backend_config)\n\ndef create_dot_product_attention_fp8_backend_config(\n batch, num_heads, seq_q, seq_kv, dtype, fmha_scale, mask_type, layout, is_bwd):\n backend_config = create_dot_product_attention_backend_config_base(\n batch, num_heads, seq_q, seq_kv, dtype, fmha_scale, mask_type, layout, is_bwd)\n return json.dumps(backend_config)\n\n# mapping from (is_bwd, has_dropout, has_bias) to custom call name\n_custom_name_maps = {\n # fMHA forward call targets.\n (False, False, False, False): ""__cudnn$fmhaSoftmax"",\n (False, False, True, False): ""__cudnn$fmhaScaleBiasSoftmax"",\n (False, True, False, False): ""__cudnn$fmhaSoftmaxDropout"",\n (False, True, True, False): ""__cudnn$fmhaScaleBiasSoftmaxDropout"",\n (False, False, False, True): ""__cudnn$fmhaSoftmaxF8"",\n # fMHA backward call targets.\n (True, False, False, False): ""__cudnn$fmhaSoftmaxBackward"",\n (True, False, True, False): ""__cudnn$fmhaScaleBiasSoftmaxBackward"",\n (True, True, False, False): ""__cudnn$fmhaSoftmaxDropoutBackward"",\n (True, True, True, False): ""__cudnn$fmhaScaleBiasSoftmaxDropoutBackward"",\n (True, False, False, True): ""__cudnn$fmhaSoftmaxBackwardF8"",\n}\n\ndef get_custom_call_name(has_bias, has_dropout, is_bwd, is_fp8=False):\n return _custom_name_maps[(is_bwd, has_dropout, has_bias, is_fp8)]\n\nget_fp8_custom_call_name = functools.partial(\n get_custom_call_name, has_bias=False, has_dropout=False, is_fp8=True\n)\n\ndef check_layout(query, key, value, bias, q_seqlen, kv_seqlen,\n q_offsets, kv_offsets, page_table_k, page_table_v, layout):\n def check_eq(a, b, c, msg):\n if not (a == b == c):\n raise ValueError(f""{msg} must be same, got {a}, {b}, {b}"")\n\n q_rank, k_rank, v_rank = len(query.shape), len(key.shape), len(value.shape)\n if q_rank != 4:\n raise ValueError(f""Q must have a rank of 4, got {q_rank}"")\n check_eq(q_rank, k_rank, v_rank, ""QKV rank"")\n\n q_dtype, k_dtype, v_dtype = query.dtype, key.dtype, value.dtype\n if q_dtype not in [jnp.bfloat16, jnp.float16, jnp.float8_e4m3fn, jnp.float8_e5m2]:\n raise NotImplementedError(f""Q must be fp16/bf16/fp8_e4m3fn/fp8_e5m2, got {q_dtype}"")\n check_eq(q_dtype, k_dtype, v_dtype, ""QKV dtype"")\n\n if layout == AttentionLayout.BNTH:\n qB, qN, qT, qH = query.shape\n kB, kN, kS, kH = key.shape\n vB, vN, vS, vH = value.shape\n else:\n assert layout == AttentionLayout.BTNH\n qB, qT, qN, qH = query.shape\n kB, kS, kN, kH = key.shape\n vB, vS, vN, vH = value.shape\n\n if page_table_k is not None and page_table_v is not None:\n k_blocks, k_block_size = kB, kS\n v_blocks, v_block_size = vB, vS\n kB, _, k_blocks_per_batch, _ = page_table_k.shape\n vB, _, v_blocks_per_batch, _ = page_table_v.shape\n kS = k_blocks_per_batch * k_block_size\n vS = v_blocks_per_batch * v_block_size\n if kB * k_blocks_per_batch != k_blocks:\n raise ValueError(\n f""Key and page_table_k must have same number of blocks, ""\n f""got {k_blocks} vs {kB * k_blocks_per_batch}"")\n if vB * v_blocks_per_batch != v_blocks:\n raise ValueError(\n f""Value and page_table_v must have same number of blocks, ""\n f""got {v_blocks} vs {vB * v_blocks_per_batch}"")\n\n check_eq(qB, kB, vB, ""QKV batch"")\n check_eq(qH, kH, vH, ""QKV dim_per_head"")\n if kN != vN:\n raise ValueError(f""KV must have same number of heads, got {kN} vs {vN}"")\n if kS != vS:\n raise ValueError(f""KV must have same seq length, got {kS} vs {vS}"")\n\n # check bias\n if bias is not None:\n _, _, bT, bS = bias.shape\n if bT != qT or bS != vS:\n raise ValueError(\n f""Bias must have same seq length as QKV, got {bT} and {bS}"")\n\n # check q_seqlen/kv_seqlen/q_offsets/kv_offsets\n expected_rank = 2 if q_offsets is not None else 1\n def check_seqlen_offsets(tensor, name):\n if tensor is not None:\n dtype = tensor.dtype\n rank = len(tensor.shape)\n if dtype != jnp.int32:\n raise ValueError(f""{name} must have int32 datatype, got {dtype}"")\n if rank != expected_rank:\n raise ValueError(f""{name} must have a rank of {expected_rank}, got {rank}"")\n b = tensor.shape[0]\n if b != qB:\n raise ValueError(f""{name} must have same batch as Q, got {b}"")\n\n check_seqlen_offsets(q_seqlen, ""q_seqlen"")\n check_seqlen_offsets(kv_seqlen, ""kv_seqlen"")\n check_seqlen_offsets(q_offsets, ""q_offsets"")\n check_seqlen_offsets(kv_offsets, ""kv_offsets"")\n\n\ndef check_is_flash_attention(\n query, key, layout: int, cudnn_version, has_bias, is_training, is_packed=False,\n is_paged_attention=False, is_fp8=False):\n # Extract sequence length (T) and head dim (H) based on layout\n if layout == AttentionLayout.BNTH.value:\n _, _, T, H = query.shape\n _, _, S, _ = key.shape\n else:\n _, T, _, H = query.shape\n _, S, _, _ = key.shape\n\n # Flash attention conditions\n if is_fp8:\n # FP8 specific conditions\n if not ((is_training and H == 128 and T % 128 == 0 and S % 128 == 0) or\n (not is_training and H <= 256 and H % 16 == 0)):\n raise NotImplementedError(\n f""Unsupported sequence length Q {T}, KV {S} and head dim {H} for FP8.""\n )\n else:\n # bf16/fp16 attention conditions\n # Check the head dim.\n is_on_hopper = is_cuda_compute_capability_equal(""9.0"")\n H_max = 256 if cudnn_version >= 90500 and is_on_hopper else 128\n if not (H <= H_max and H % 8 == 0):\n raise NotImplementedError(\n f""The head dim must be <= {H_max} and a multiple of 8, ""\n f""but got {H}.""\n )\n\n # Check patterns with bias, seqlen should be divisible by 2\n if (is_training and has_bias and (T % 2 != 0 or S % 2 != 0)):\n raise NotImplementedError(\n f""Unsupported sequence length Q {T}, KV {S}.""\n )\n\n if is_packed and (cudnn_version < 90600 or not check_compute_capability(""9.0"")):\n raise NotImplementedError(\n ""Packed layout requires cudnn version >= 9.6 and at least hopper arch."")\n if is_paged_attention and cudnn_version < 90500:\n raise NotImplementedError(""Page attention requires cudnn version >= 9.5."")\n\ndef check_cudnn_version():\n # check if cuDNN is installed\n if cuda_versions is None:\n raise RuntimeError(""cuDNN is not detected."")\n return cuda_versions.cudnn_get_version()\n\ndef check_compute_capability(capability):\n if not 'cuda' in xla_bridge.get_backend().platform_version:\n return False\n d, *_ = jax.local_devices(backend=""gpu"")\n target = tuple(int(x) for x in capability.split("".""))\n current = tuple(int(x) for x in d.compute_capability.split("".""))\n return current >= target\n\ndef is_cuda_compute_capability_equal(capability):\n if not 'cuda' in xla_bridge.get_backend().platform_version:\n return False\n d, *_ = jax.local_devices(backend=""gpu"")\n target = tuple(int(x) for x in capability.split("".""))\n current = tuple(int(x) for x in d.compute_capability.split("".""))\n return current == target\n\ndef _dot_product_attention_fwd(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v,\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, cudnn_version, return_residual):\n # check if flash attention is supported for this attention pattern\n check_is_flash_attention(\n query, key, layout, cudnn_version, bias is not None, False,\n get_max_seg_per_batch(q_offsets) > 1, check_is_paged_attention(page_table_k))\n outputs = _dot_product_attention_fwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=False or return_residual)\n if return_residual:\n return tuple(outputs)\n else:\n return outputs[0]\n\ndef _dot_product_attention_fwd_rule(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, cudnn_version,\n return_residual):\n # check if flash attention is supported for this attention pattern\n check_is_flash_attention(\n query, key, layout, cudnn_version, bias is not None, True,\n get_max_seg_per_batch(q_offsets) > 1)\n outputs = _dot_product_attention_fwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=True)\n res = (query, key, value, bias, q_seqlen, kv_seqlen, q_offsets,\n kv_offsets, page_table_k, page_table_v, outputs[1], outputs[0])\n if return_residual:\n return tuple(outputs), res\n else:\n return outputs[0], res\n\ndef _dot_product_attention_bwd_rule(\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, is_training, return_residual, res, grad_output):\n (query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output) = res\n if return_residual:\n grad_output = grad_output[0]\n grads = _dot_product_attention_bwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale=scale, seed=seed, dropout_rate=dropout_rate, variadic_args=variadic_args,\n mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length\n )\n grads = (*grads,) + (None,) * (10 - len(grads))\n return grads\n\ndef _fix_seqlen_offsets(q_seqlen, kv_seqlen, q_offsets, kv_offsets, query, key):\n # fix seqlen and offsets to what cuDNN expects in sequence packing.\n # cuDNN expects seqlen to have shape [S] where S is the total number of segments\n # while the SDPA API accetps seqlen with shape [B, M] where B is the batch and M\n # is the maximum number of segments of one batch. B x M is larger than S and seqlen\n # is filled with -1 for padded regions. Therefore, we need to shift all non negative\n # values to left side to form a correct seqlen. Similar layout is required for\n # offsets tensors.\n # cuDNN expects offsets to have offset for each segment starting from first segment\n # while SDPA API accetps offsets to have offset for each segment starting from\n # current batch, therefore we need to calculate accumulative offset of each segment\n # starting from first segment.\n def _shift_to_left(x, fill_value):\n # shift any non-negative value to left\n # [[1, 3, -1, -1], [2, 3, 4, -1]]\n # -> [[1, 3, 2, 3], [4, -1, -1, -1]]\n x_shape = x.shape\n x = x.flatten()\n size = x.size\n indices = jnp.nonzero(x >= 0, size=size, fill_value=size)[0]\n y = jnp.take(x, indices, fill_value=fill_value)\n return jnp.reshape(y, x_shape)\n\n def _cu_offset(offsets, max_seq):\n # calculate accumulative offset by batch\n # [[1, 3, 5, 7], [4, 5, -1, -1]], max_seq = 8\n # -> [[1, 3, 5, 7], [12, 13, -1, -1]]\n batch = offsets.shape[0]\n offsets = jnp.where(\n offsets >= 0,\n offsets + (jnp.arange(batch, dtype=offsets.dtype) * max_seq)[..., jnp.newaxis],\n offsets,\n )\n return offsets\n\n if get_max_seg_per_batch(q_offsets) > 1:\n B, T, N, H = query.shape\n _, S, _, _ = key.shape\n\n q_seqlen = _shift_to_left(q_seqlen, -1)\n kv_seqlen = _shift_to_left(kv_seqlen, -1)\n\n q_offsets = _cu_offset(q_offsets, T)\n kv_offsets = _cu_offset(kv_offsets, S)\n q_offsets = _shift_to_left(q_offsets, -1)\n kv_offsets = _shift_to_left(kv_offsets, -1)\n\n # mark any invalid entries as maximum offset\n q_offsets = jnp.where(q_offsets < 0, B * T, q_offsets)\n kv_offsets = jnp.where(kv_offsets < 0, B * S, kv_offsets)\n\n # multiply by stride_per_token to get correct offsets\n # do it here because real stride changes after sharding\n q_offsets = q_offsets * N * H\n kv_offsets = kv_offsets * N * H\n\n return q_seqlen, kv_seqlen, q_offsets, kv_offsets\n\ndef _dot_product_attention_fwd_impl(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, is_training):\n # args: {Q, K, V, mask*, bias*}\n q_seqlen, kv_seqlen, q_offsets, kv_offsets = \\n _fix_seqlen_offsets(q_seqlen, kv_seqlen, q_offsets, kv_offsets, query, key)\n outputs = _dot_product_attention_fwd_p.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=is_training)\n return outputs\n\ndef _dot_product_attention_bwd_impl(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output, scale,\n seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length):\n q_seqlen, kv_seqlen, q_offsets, kv_offsets = \\n _fix_seqlen_offsets(q_seqlen, kv_seqlen, q_offsets, kv_offsets, query, key)\n grads = _dot_product_attention_bwd_p.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale=scale, seed=seed,\n dropout_rate=dropout_rate, variadic_args=variadic_args,\n mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length)\n return grads\n\ndef _dot_product_attention_fwd_abstract(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, *, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, is_training):\n query_dtype = dtypes.canonicalize_dtype(query.dtype)\n if layout == AttentionLayout.BNTH.value:\n B, N, T, _ = query.shape\n _, _, S, _ = key.shape\n else:\n B, T, N, _ = query.shape\n _, S, _, _ = key.shape\n output_shape = query.shape\n\n max_seg_per_batch = get_max_seg_per_batch(q_offsets)\n softmax_stat_shape = (B * max_seg_per_batch, N, T)\n\n if is_training:\n return (\n core.ShapedArray(output_shape, query_dtype), # output\n core.ShapedArray(softmax_stat_shape, jnp.float32), # softmax_stat\n )\n else:\n return (\n core.ShapedArray(output_shape, query_dtype), # output\n )\n\ndef _dot_product_attention_bwd_abstract(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output, *,\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length):\n query_dtype = dtypes.canonicalize_dtype(query.dtype)\n key_dtype = dtypes.canonicalize_dtype(key.dtype)\n value_dtype = dtypes.canonicalize_dtype(value.dtype)\n\n _, has_dbias = variadic_args\n if has_dbias:\n # cuDNN supports bias for this case\n bias_dtype = dtypes.canonicalize_dtype(bias.dtype)\n return (\n core.ShapedArray(\n query.shape, query_dtype\n ), # grad query\n core.ShapedArray(\n key.shape, key_dtype\n ), # grad key\n core.ShapedArray(\n value.shape, value_dtype\n ), # grad value\n core.ShapedArray(\n bias.shape, bias_dtype\n ), # grad bias\n )\n else:\n return (\n core.ShapedArray(\n query.shape, query_dtype\n ), # grad query\n core.ShapedArray(\n key.shape, key_dtype\n ), # grad key\n core.ShapedArray(\n value.shape, value_dtype\n ), # grad value\n )\n\ndef _dot_product_attention_fwd_cuda_lowering(\n ctx, query, key, value, bias, q_seqlen, kv_seqlen, q_offsets,\n kv_offsets, page_table_k, page_table_v, scale, seed, dropout_rate,\n variadic_args, mask_type, layout, sliding_window_length, is_training):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n key_shape = key_type.shape\n\n if layout == AttentionLayout.BNTH.value:\n B, N, T, H = query_shape\n _, _, S, _ = key_shape\n output_layout = (3, 2, 1, 0)\n output_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, N, H = query_shape\n _, S, _, _ = key_shape\n output_layout = (3, 1, 2, 0)\n output_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n max_seg_per_batch = get_max_seg_per_batch(ir.RankedTensorType(q_offsets.type))\n is_paged_attention = check_is_paged_attention(ir.RankedTensorType(page_table_k.type))\n\n output_shape = (B, N, T, H)\n softmax_stat_shape = (B * max_seg_per_batch, N, T)\n workspace_shape = (0,)\n workspace_type = ir.IntegerType.get_unsigned(8)\n\n has_bias, _ = variadic_args\n backend_config = create_dot_product_attention_backend_config(\n B, N, T, S, query_type.element_type, scale, seed, dropout_rate,\n mask_type, layout, sliding_window_length, max_seg_per_batch,\n is_paged_attention, is_bwd=False)\n # {Q, K, V, bias*, q_seqlen*, kv_seqlen*, q_offsets*, kv_offsets*}}\n # {output, activation*, workspace}\n has_dropout = dropout_rate > 0\n operands = [query, key, value]\n if has_bias:\n operands.append(bias)\n if has_padding(mask_type) or max_seg_per_batch > 1 or is_paged_attention:\n operands.append(q_seqlen)\n operands.append(kv_seqlen)\n if max_seg_per_batch > 1:\n operands.append(q_offsets)\n operands.append(kv_offsets)\n if is_paged_attention:\n operands.append(page_table_k)\n operands.append(page_table_v)\n\n custom_call_name = get_custom_call_name(has_bias, has_dropout, False)\n\n if is_training:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get(softmax_stat_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(workspace_shape, workspace_type),\n ]\n result_layouts = [output_layout] + default_layouts(softmax_stat_shape, workspace_shape)\n else:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get(workspace_shape, workspace_type)\n ]\n result_layouts = [output_layout] + default_layouts(workspace_shape)\n # create custom call here\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=default_layouts(\n *[ir.RankedTensorType(operand.type).shape for operand in operands]),\n result_layouts=result_layouts,\n )\n # drop workspace memory\n # output should be (B, T, N, H) instead of (B, N, T, H)\n if is_training:\n return [hlo.transpose(out.results[0], output_transpose_perm), out.results[1]]\n else:\n return [hlo.transpose(out.results[0], output_transpose_perm)]\n\ndef _dot_product_attention_bwd_cuda_lowering(\n ctx, query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n key_shape = key_type.shape\n value_type = ir.RankedTensorType(value.type)\n\n if layout == AttentionLayout.BNTH.value:\n B, q_N, T, H = query_shape\n _, k_N, S, _ = key_shape\n grad_layout = (3, 2, 1, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, q_N, H = query_shape\n _, S, k_N, _ = key_shape\n grad_layout = (3, 1, 2, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n workspace_shape = (0,)\n workspace_type = ir.IntegerType.get_unsigned(8)\n\n grad_query_shape = (B, q_N, T, H)\n grad_key_shape = (B, k_N, S, H)\n grad_value_shape = (B, k_N, S, H)\n\n has_bias, has_dbias = variadic_args\n max_seg_per_batch = get_max_seg_per_batch(ir.RankedTensorType(q_offsets.type))\n backend_config = create_dot_product_attention_backend_config(\n B, q_N, T, S, query_type.element_type, scale, seed, dropout_rate,\n mask_type, layout, sliding_window_length, max_seg_per_batch,\n False, is_bwd=True)\n # {Q, K, V, activation, dO, bias*, O, q_seqlen*, kv_seqlen*,\n # q_offsets*, kv_offsets*}\n # {dQ, dK, dV, dbias*, workspace}\n has_dropout = dropout_rate > 0\n # create operands\n operands = [query, key, value, activation, grad_output]\n if has_bias:\n # flash attention requires bias in the bwd for remat\n operands.append(bias)\n operands.append(fwd_output)\n if has_padding(mask_type) or max_seg_per_batch > 1:\n operands.append(q_seqlen)\n operands.append(kv_seqlen)\n if max_seg_per_batch > 1:\n operands.append(q_offsets)\n operands.append(kv_offsets)\n # get custom call name\n custom_call_name = get_custom_call_name(has_bias, has_dropout, True)\n\n # create output types and layouts\n # grad_query, grad_key, grad_value\n result_types = [\n ir.RankedTensorType.get(grad_query_shape, query_type.element_type),\n ir.RankedTensorType.get(grad_key_shape, key_type.element_type),\n ir.RankedTensorType.get(grad_value_shape, value_type.element_type),\n ]\n result_layouts = [grad_layout, grad_layout, grad_layout]\n bias_type = ir.RankedTensorType(bias.type)\n bias_shape = bias_type.shape\n if has_dbias:\n # cuDNN supports bias for this case\n result_types.append(\n ir.RankedTensorType.get(bias_shape, bias_type.element_type))\n result_layouts = result_layouts + default_layouts(bias_shape)\n # workspace\n result_types.append(ir.RankedTensorType.get(workspace_shape, workspace_type))\n result_layouts = result_layouts + default_layouts(workspace_shape)\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=default_layouts(\n *[ir.RankedTensorType(operand.type).shape for operand in operands]),\n result_layouts=result_layouts,\n )\n dqkv = (hlo.transpose(out.results[0], grad_transpose_perm),\n hlo.transpose(out.results[1], grad_transpose_perm),\n hlo.transpose(out.results[2], grad_transpose_perm))\n # Only keep dQ, dK, dV and dBias here\n if has_dbias:\n return dqkv + (out.results[3],)\n else:\n return dqkv\n\n# batcher\ndef _check_valid_batch_dims(bdims):\n for dim in bdims:\n if dim not in [0, None]:\n raise NotImplementedError(\n f""Currently only support batch_dim in [0, None], but got {dim=}"")\n\ndef _dot_product_attention_fwd_batcher(\n batched_args, batch_dims, *, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, is_training):\n _check_valid_batch_dims(batch_dims)\n query, key, value, bias, q_seqlen, kv_seqlen, \\n q_offsets, kv_offsets, page_table_k, page_table_v = batched_args\n query_bdim = batch_dims[0]\n if is_training:\n out_bdims = query_bdim, query_bdim\n else:\n out_bdims = (query_bdim,)\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n has_bias, _ = variadic_args\n original_shape = query.shape\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n if has_bias and batch_dims[3] is not None:\n bias = jnp.reshape(bias, (B, N, T, S))\n if has_padding(mask_type):\n q_seqlen = jnp.reshape(q_seqlen, (B, ))\n kv_seqlen = jnp.reshape(kv_seqlen, (B, ))\n\n outputs = _dot_product_attention_fwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=is_training)\n\n # reshape to original shape\n output = outputs[0]\n output = jnp.reshape(output, original_shape)\n if is_training:\n activation = outputs[1]\n activation = jnp.reshape(activation, (*Bs, N, T))\n return (output, activation), out_bdims\n else:\n return (output,), out_bdims\n\ndef _dot_product_attention_bwd_batcher(\n batched_args, batch_dims, *, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length):\n _check_valid_batch_dims(batch_dims)\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets, \\n page_table_k, page_table_v, activation, fwd_output, grad_output = batched_args\n query_bdim = batch_dims[0]\n out_bdims = query_bdim, query_bdim, query_bdim\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n has_bias, has_dbias = variadic_args\n # Reset the has_dbias if the combined batch size is not 1, because cuDNN only\n # supports dbias with a single batch. In this case, an all-zero dbias will be\n # appended instead.\n if B > 1:\n variadic_args = (has_bias, False)\n original_query_shape = query.shape\n original_key_shape = key.shape\n original_value_shape = value.shape\n original_bias_shape = bias.shape if has_bias else None\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n if has_bias and batch_dims[3] is not None:\n bias = jnp.reshape(bias, (B, N, T, S))\n if has_padding(mask_type):\n q_seqlen = jnp.reshape(q_seqlen, (B, ))\n kv_seqlen = jnp.reshape(kv_seqlen, (B, ))\n\n activation = jnp.reshape(activation, (B, N, T))\n fwd_output = jnp.reshape(fwd_output, (B,) + query.shape[-3:])\n grad_output = jnp.reshape(grad_output, (B,) + query.shape[-3:])\n\n grads = _dot_product_attention_bwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale=scale, seed=seed, dropout_rate=dropout_rate, variadic_args=variadic_args,\n mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length,\n )\n\n # reshape to original shape\n grads[0] = jnp.reshape(grads[0], original_query_shape)\n grads[1] = jnp.reshape(grads[1], original_key_shape)\n grads[2] = jnp.reshape(grads[2], original_value_shape)\n if has_dbias:\n assert has_bias\n if variadic_args[1]:\n grads[3] = jnp.reshape(grads[3], original_bias_shape)\n else:\n grads.append(jnp.zeros(original_bias_shape, bias.dtype))\n out_bdims += (batch_dims[3],)\n return grads, out_bdims\n\n# custom partitioning\ndef _get_padded_spec(arg_info):\n spec = None if arg_info.sharding is None else arg_info.sharding.spec\n ndim = arg_info.ndim\n if spec is None:\n return (None,) * ndim\n assert len(spec) <= ndim\n return spec + (None,) * (ndim - len(spec))\n\ndef _check_qkv_bias_mask_spec(\n query_spec, key_spec, value_spec, bias_spec, layout):\n # check qkv spec\n if not query_spec == key_spec == value_spec:\n raise ValueError(""Query, key and value should have same sharding."")\n if layout == AttentionLayout.BNTH.value:\n *batch_spec, num_head_spec, q_seq_spec, head_spec = query_spec\n else:\n *batch_spec, q_seq_spec, num_head_spec, head_spec = query_spec\n if q_seq_spec is not None:\n raise ValueError(""Sharding on sequence dim is not allowed."")\n if head_spec is not None:\n raise ValueError(""Sharding on head dim is not allowed."")\n # check bias spec\n if bias_spec:\n *bias_batch_spec, bias_num_head_spec, bias_q_seq_spec, bias_kv_seq_spec = bias_spec\n if any(bias_batch_spec) and bias_batch_spec != batch_spec or \\n bias_num_head_spec is not None and bias_num_head_spec != num_head_spec:\n raise ValueError(\n ""Query and bias should have same sharding on batch and num_head dim."")\n if bias_q_seq_spec is not None or bias_kv_seq_spec is not None:\n raise ValueError(""Sharding on bias sequence dim is not allowed."")\n\n\n# fwd custom partition\ndef _infer_fwd_output_sharding(mesh, arg_shapes, variadic_args,is_training, layout):\n # only sharding on batch and num_head dim is allowed\n # (*batch, q_seq, num_head, head)\n query_spec = _get_padded_spec(arg_shapes[0])\n # (*batch, kv_seq, num_head, head)\n key_spec = _get_padded_spec(arg_shapes[1])\n value_spec = _get_padded_spec(arg_shapes[2])\n has_bias, _ = variadic_args\n bias_spec = _get_padded_spec(arg_shapes[3]) if has_bias else None\n\n _check_qkv_bias_mask_spec(\n query_spec, key_spec, value_spec, bias_spec, layout)\n # keep out sharding same as query sharding since they have same shape\n out_sharding = NamedSharding(mesh, PartitionSpec(*query_spec))\n if is_training:\n # activation sharding\n *batch_spec, q_seq_spec, num_head_spec, _ = query_spec\n activation_sharding = NamedSharding(\n mesh, PartitionSpec(*batch_spec, num_head_spec, q_seq_spec, None))\n return [out_sharding, activation_sharding]\n return [out_sharding]\n\n_dot_product_attention_fwd_lower = custom_partitioning(\n _dot_product_attention_fwd_impl, static_argnums=(10, 11, 12, 13, 14, 15, 16, 17))\n\ndef _dot_product_attention_fwd_infer_sharding_from_operands(\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length,\n is_training, mesh, arg_shapes, result_shape):\n return _infer_fwd_output_sharding(mesh, arg_shapes, variadic_args, is_training, layout)\n\ndef _dot_product_attention_fwd_partition(\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length,\n is_training, mesh, arg_shapes, result_shape):\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n out_shardings = _infer_fwd_output_sharding(\n mesh, arg_shapes, variadic_args, is_training, layout)\n impl = functools.partial(\n _dot_product_attention_fwd_impl,\n scale=scale,\n seed=seed,\n dropout_rate=dropout_rate,\n variadic_args=variadic_args,\n mask_type=mask_type,\n layout=layout,\n sliding_window_length=sliding_window_length,\n is_training=is_training,\n )\n return mesh, impl, out_shardings, arg_shardings\n\n# bwd custom partition\ndef _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args):\n # (*batch, q_seq, num_head, head)\n query_spec = _get_padded_spec(arg_shapes[0])\n # (*batch, kv_seq, num_head, head)\n key_spec = _get_padded_spec(arg_shapes[1])\n value_spec = _get_padded_spec(arg_shapes[2])\n has_bias, has_dbias = variadic_args\n bias_spec = _get_padded_spec(arg_shapes[3]) if has_bias else None\n _check_qkv_bias_mask_spec(\n query_spec, key_spec, value_spec, bias_spec, layout)\n # keep grad query sharding same as query sharding\n grad_query_sharding = NamedSharding(mesh, PartitionSpec(*query_spec))\n grad_key_sharding = NamedSharding(mesh, PartitionSpec(*key_spec))\n grad_value_sharding = NamedSharding(mesh, PartitionSpec(*key_spec))\n out_shardings = [grad_query_sharding, grad_key_sharding, grad_value_sharding]\n if has_dbias:\n grad_bias_sharding = NamedSharding(mesh, PartitionSpec(*bias_spec))\n out_shardings = out_shardings + [grad_bias_sharding]\n return out_shardings\n\n_dot_product_attention_bwd_lower = custom_partitioning(\n _dot_product_attention_bwd_impl, static_argnums=(13, 14, 15, 16, 17, 18, 19)\n)\n\ndef _dot_product_attention_bwd_infer_sharding_from_operands(\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, mesh, arg_shapes, result_shape):\n return _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args)\n\ndef _dot_product_attention_bwd_partition(\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, mesh, arg_shapes, result_shape):\n out_shardings = _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args)\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n def sharded_impl(*args):\n impl = functools.partial(\n _dot_product_attention_bwd_impl,\n scale=scale,\n seed=seed,\n dropout_rate=dropout_rate,\n variadic_args=variadic_args,\n mask_type=mask_type,\n layout=layout,\n sliding_window_length=sliding_window_length,\n )\n grads = impl(*args)\n _, has_dbias = variadic_args\n if has_dbias:\n query_spec = arg_shardings[0].spec\n batch_spec = query_spec[0]\n local_dbias = grads[3]\n global_dbias = jax.lax.psum(local_dbias, batch_spec)\n grads = grads[:3] + [global_dbias]\n return grads\n return mesh, sharded_impl, out_shardings, arg_shardings\n\n# Create dot_product_attention_fwd_p for forward operation.\n_dot_product_attention_fwd_p = core.Primitive(""dot_product_attention_fwd"")\n_dot_product_attention_fwd_p.multiple_results = True\n_dot_product_attention_fwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_fwd_p)\n)\n_dot_product_attention_fwd_p.def_abstract_eval(\n _dot_product_attention_fwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_fwd_p,\n _dot_product_attention_fwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_fwd_p_wrapper = core.Primitive(\n ""dot_product_attention_fwd_wrapper""\n)\n_dot_product_attention_fwd_p_wrapper.multiple_results = True\n_dot_product_attention_fwd_p_wrapper.def_impl(_dot_product_attention_fwd_impl)\n_dot_product_attention_fwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_fwd_abstract\n)\n\n# Create dot_product_attention_bwd_p for backward operation.\n_dot_product_attention_bwd_p = core.Primitive(""dot_product_attention_bwd"")\n_dot_product_attention_bwd_p.multiple_results = True\n_dot_product_attention_bwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_bwd_p)\n)\n_dot_product_attention_bwd_p.def_abstract_eval(\n _dot_product_attention_bwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_bwd_p,\n _dot_product_attention_bwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_bwd_p_wrapper = core.Primitive(\n ""dot_product_attention_bwd_wrapper""\n)\n_dot_product_attention_bwd_p_wrapper.multiple_results = True\n_dot_product_attention_bwd_p_wrapper.def_impl(_dot_product_attention_bwd_impl)\n_dot_product_attention_bwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_bwd_abstract\n)\n\nbatching.primitive_batchers[\n _dot_product_attention_fwd_p_wrapper\n] = _dot_product_attention_fwd_batcher\nbatching.primitive_batchers[\n _dot_product_attention_bwd_p_wrapper\n] = _dot_product_attention_bwd_batcher\n\ndef not_implemented_sharding_rule(*args, **kwargs):\n return NotImplementedError(""Sharding rule not implemented."")\n\n_dot_product_attention_fwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_fwd_infer_sharding_from_operands,\n partition=_dot_product_attention_fwd_partition,\n sharding_rule=not_implemented_sharding_rule)\n\nmlir.register_lowering(_dot_product_attention_fwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_fwd_lower, multiple_results=True))\n\n_dot_product_attention_bwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_bwd_infer_sharding_from_operands,\n partition=_dot_product_attention_bwd_partition,\n sharding_rule=not_implemented_sharding_rule)\n\nmlir.register_lowering(_dot_product_attention_bwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_bwd_lower, multiple_results=True))\n\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fwd_p_wrapper\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_bwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_bwd_p_wrapper\n)\n\n@functools.partial(jax.custom_vjp, nondiff_argnums=(10, 11, 12, 13, 14, 15, 16, 17, 18))\ndef _dot_product_attention(query: Array,\n key: Array,\n value: Array,\n bias: Array,\n q_seqlen: Array,\n kv_seqlen: Array,\n q_offsets: Array,\n kv_offsets: Array,\n page_table_k: Array,\n page_table_v: Array,\n scale: float,\n seed: int,\n dropout_rate: float,\n variadic_args: tuple[bool, ...],\n mask_type: bool,\n layout: int,\n sliding_window_length: int | None,\n cudnn_version: int,\n return_residual: bool):\n output = _dot_product_attention_fwd(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length,\n cudnn_version=cudnn_version, return_residual=return_residual)\n return output\n\n_dot_product_attention.defvjp(\n _dot_product_attention_fwd_rule, _dot_product_attention_bwd_rule\n)\n\nfp8_params_keys = [\n 'amax_dQ', 'amax_dK', 'amax_dV', 'amax_dP', # place holder for bwd output\n 'descale_q', 'descale_k', 'descale_v', 'descale_s',\n 'scale_s', 'scale_o', 'descale_o', 'descale_dO',\n 'descale_dP', 'scale_dQ', 'scale_dK', 'scale_dV',\n 'scale_dP'\n]\n\nfp8_params_keys_fwd = [\n 'descale_q', 'descale_k', 'descale_v', 'descale_s', 'scale_s', 'scale_o'\n]\nfp8_params_keys_bwd = [\n 'descale_q', 'descale_k', 'descale_v', 'descale_o', 'descale_dO', 'descale_s',\n 'descale_dP', 'scale_s', 'scale_dQ', 'scale_dK', 'scale_dV', 'scale_dP',\n]\nparams_from_keys = lambda params, keys: [params[key] for key in keys]\n\ndef check_fp8_params(params):\n # Check if all required keys are present\n missing_keys = set(fp8_params_keys) - set(params)\n if missing_keys:\n raise ValueError(f""The following keys are missing from fp8_params: {', '.join(missing_keys)}"")\n\ncheck_is_flash_attention_fp8 = functools.partial(\n check_is_flash_attention,\n has_bias=False,\n is_fp8=True\n)\n\ndef _dot_product_attention_fp8_fwd(\n query, key, value,\n fp8_params_fwd,\n scale, use_causal_mask, layout, cudnn_version):\n check_is_flash_attention_fp8(\n query, key, layout, cudnn_version, is_training=False)\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o = fp8_params_fwd\n outputs = _dot_product_attention_fp8_fwd_p_wrapper.bind(\n query, key, value,\n descale_q, descale_k, descale_v, descale_s,\n scale_s, scale_o,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout, is_training=False)\n return outputs\n\ndef _dot_product_attention_fp8_fwd_rule(\n query, key, value,\n fp8_params,\n scale, use_causal_mask, layout, cudnn_version):\n check_is_flash_attention_fp8(\n query, key, layout, cudnn_version, is_training=True)\n\n outputs = _dot_product_attention_fp8_fwd_p_wrapper.bind(\n query, key, value, *params_from_keys(fp8_params, fp8_params_keys_fwd),\n scale=scale, use_causal_mask=use_causal_mask, layout=layout, is_training=True)\n res = (query, key, value, outputs[3], outputs[0], params_from_keys(fp8_params, fp8_params_keys_bwd))\n return (outputs[0], outputs[1], outputs[2]), res\n\ndef _dot_product_attention_fp8_bwd_rule(\n scale, use_causal_mask, layout, cudnn_version, res, g):\n (query, key, value, activation, fwd_output, aux_params) = res\n grad_output = g[0]\n grads = _dot_product_attention_fp8_bwd_p_wrapper.bind(\n query,\n key,\n value,\n fwd_output,\n grad_output,\n activation,\n *aux_params,\n scale=scale,\n use_causal_mask=use_causal_mask,\n layout=layout,\n )\n\n fp8_params_grads = dict.fromkeys(fp8_params_keys)\n keys_to_grad_indices = ['amax_dQ', 'amax_dK', 'amax_dV', 'amax_dP']\n # grads structure: (dQ, dK, dV, amax_dq, amax_dk, amax_dv, amax_dp)\n for i, key in enumerate(keys_to_grad_indices, start=3):\n fp8_params_grads[key] = grads[i]\n\n return (grads[0], grads[1], grads[2], fp8_params_grads)\n\ndef _dot_product_attention_fp8_fwd_impl(\n query, key, value,\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale, use_causal_mask, layout, is_training):\n outputs = _dot_product_attention_fp8_fwd_p.bind(\n query,\n key,\n value,\n descale_q,\n descale_k,\n descale_v,\n descale_s,\n scale_s,\n scale_o,\n scale=scale,\n use_causal_mask=use_causal_mask,\n layout=layout,\n is_training=is_training,\n )\n return outputs\n\ndef _dot_product_attention_fp8_bwd_impl(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale, use_causal_mask, layout):\n grads = _dot_product_attention_fp8_bwd_p.bind(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout)\n return grads\n\n\ndef _dot_product_attention_fp8_fwd_abstract(\n query, key, value,\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale, use_causal_mask, layout, is_training):\n query_dtype = dtypes.canonicalize_dtype(query.dtype)\n if layout == AttentionLayout.BNTH.value:\n B, N, T, _ = query.shape\n _, _, S, _ = key.shape\n else:\n B, T, N, _ = query.shape\n _, S, _, _ = key.shape\n output_shape = query.shape\n softmax_stat_shape = (B, N, T)\n\n # output, amax_s, amax_o[, softmax_stat]\n if is_training:\n return (\n core.ShapedArray(output_shape, query_dtype),\n core.ShapedArray((1,1,1,1), jnp.float32),\n core.ShapedArray((1,1,1,1), jnp.float32),\n core.ShapedArray(softmax_stat_shape, jnp.float32),\n )\n else:\n return (\n core.ShapedArray(output_shape, query_dtype),\n core.ShapedArray((1,1,1,1), jnp.float32),\n core.ShapedArray((1,1,1,1), jnp.float32),\n )\n\ndef _dot_product_attention_fp8_bwd_abstract(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale, use_causal_mask, layout):\n query_dtype = dtypes.canonicalize_dtype(query.dtype)\n key_dtype = dtypes.canonicalize_dtype(key.dtype)\n value_dtype = dtypes.canonicalize_dtype(value.dtype)\n\n amax_shape = (1,1,1,1)\n\n return (\n core.ShapedArray(query.shape, query_dtype),\n core.ShapedArray(key.shape, key_dtype),\n core.ShapedArray(value.shape, value_dtype),\n core.ShapedArray(amax_shape, jnp.float32),\n core.ShapedArray(amax_shape, jnp.float32),\n core.ShapedArray(amax_shape, jnp.float32),\n core.ShapedArray(amax_shape, jnp.float32),\n )\n\ndef _dot_product_attention_fp8_fwd_cuda_lowering(\n ctx, query, key, value,\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale, use_causal_mask, layout, is_training):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n key_shape = key_type.shape\n\n if layout == AttentionLayout.BNTH.value:\n B, N, T, H = query_shape\n _, _, S, _ = key_shape\n output_layout = (3, 2, 1, 0)\n output_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, N, H = query_shape\n _, S, _, _ = key_shape\n output_layout = (3, 1, 2, 0)\n output_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n output_shape = (B, N, T, H)\n softmax_stat_shape = (B, N, T)\n workspace_shape = (0,)\n amax_shape = (1,1,1,1)\n workspace_type = ir.IntegerType.get_unsigned(8)\n mask_type = MaskType.CAUSAL if use_causal_mask else MaskType.NO_MASK\n backend_config = create_dot_product_attention_fp8_backend_config(\n B, N, T, S, ir.BF16Type.get(), # query_type.element_type,\n scale, mask_type, layout, is_bwd=False,\n )\n\n operands = [query, key, value, descale_q, descale_k, descale_v, descale_s, scale_s, scale_o]\n custom_call_name = get_fp8_custom_call_name(is_bwd=False)\n\n if is_training:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get(softmax_stat_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(workspace_shape, workspace_type),\n ]\n result_layouts = [output_layout] + default_layouts(amax_shape, amax_shape, softmax_stat_shape, workspace_shape)\n else:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get(workspace_shape, workspace_type)\n ]\n result_layouts = [output_layout] + default_layouts(amax_shape, amax_shape, workspace_shape)\n\n operand_shapes = [ir.RankedTensorType(operand.type).shape for operand in operands[:3]]\n operand_shapes += [[1, 1, 1, 1]] * 6\n operand_layouts = default_layouts(*operand_shapes)\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=operand_layouts,\n result_layouts=result_layouts,\n )\n\n if is_training:\n return [hlo.transpose(out.results[0], output_transpose_perm), out.results[1], out.results[2], out.results[3]]\n else:\n return [hlo.transpose(out.results[0], output_transpose_perm), out.results[1], out.results[2]]\n\n\n\ndef _dot_product_attention_fp8_bwd_cuda_lowering(\n ctx, query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP, scale,\n use_causal_mask, layout):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n key_shape = key_type.shape\n value_type = ir.RankedTensorType(value.type)\n\n if layout == AttentionLayout.BNTH.value:\n B, q_N, T, H = query_shape\n _, k_N, S, _ = key_shape\n grad_layout = (3, 2, 1, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, q_N, H = query_shape\n _, S, k_N, _ = key_shape\n grad_layout = (3, 1, 2, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n workspace_shape = (0,)\n workspace_type = ir.IntegerType.get_unsigned(8)\n amax_shape = (1,1,1,1)\n\n grad_query_shape = (B, q_N, T, H)\n grad_key_shape = (B, k_N, S, H)\n grad_value_shape = (B, k_N, S, H)\n mask_type = MaskType.CAUSAL if use_causal_mask else MaskType.NO_MASK\n\n backend_config = create_dot_product_attention_fp8_backend_config(\n B, q_N, T, S, ir.BF16Type.get(),\n scale, mask_type, layout, is_bwd=True,\n )\n\n operands = [\n query,\n key,\n value,\n fwd_output,\n grad_output,\n activation,\n descale_q,\n descale_k,\n descale_v,\n descale_o,\n descale_dO,\n descale_s,\n descale_dP,\n scale_s,\n scale_dQ,\n scale_dK,\n scale_dV,\n scale_dP,\n ]\n\n custom_call_name = get_fp8_custom_call_name(is_bwd=True)\n\n result_types = [\n ir.RankedTensorType.get(grad_query_shape, query_type.element_type),\n ir.RankedTensorType.get(grad_key_shape, key_type.element_type),\n ir.RankedTensorType.get(grad_value_shape, value_type.element_type),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ]\n result_layouts = [grad_layout, grad_layout, grad_layout] + default_layouts(amax_shape, amax_shape, amax_shape, amax_shape)\n\n result_types.append(ir.RankedTensorType.get(workspace_shape, workspace_type))\n result_layouts = result_layouts + default_layouts(workspace_shape)\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=default_layouts(\n *[ir.RankedTensorType(operand.type).shape for operand in operands]),\n result_layouts=result_layouts,\n )\n dqkv_amaxs = (hlo.transpose(out.results[0], grad_transpose_perm),\n hlo.transpose(out.results[1], grad_transpose_perm),\n hlo.transpose(out.results[2], grad_transpose_perm),\n out.results[3], out.results[4], out.results[5], out.results[6])\n # Only keep dQ, dK, dV, amax_dQ, amax_dK, amax_dV, amax_dP here\n return dqkv_amaxs\n\ndef _dot_product_attention_fp8_fwd_batcher(\n batched_args, batch_dims, *, scale, use_causal_mask, layout, is_training):\n _check_valid_batch_dims(batch_dims)\n query, key, value,\\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o, = batched_args\n query_bdim = batch_dims[0]\n if is_training:\n out_bdims = query_bdim, query_bdim\n else:\n out_bdims = (query_bdim,)\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n\n outputs = _dot_product_attention_fp8_fwd_p_wrapper.bind(\n query, key, value, descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout, is_training=is_training)\n\n # reshape to original shape\n output, amax_s, amax_o = outputs[0], outputs[1], outputs[2]\n output = jnp.reshape(output, query.shape)\n if is_training:\n activation = outputs[3]\n activation = jnp.reshape(activation, (*Bs, N, T))\n return (output, amax_s, amax_o, activation), out_bdims\n else:\n return (output, amax_s, amax_o), out_bdims\n\ndef _dot_product_attention_fp8_bwd_batcher(\n batched_args, batch_dims, *, scale, use_causal_mask, layout):\n _check_valid_batch_dims(batch_dims)\n query, key, value, fwd_output, grad_output, activation,\\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s, descale_dP,\\n scale_s, scale_dQ, scale_dK, scale_dV, scale_dP = batched_args\n query_bdim = batch_dims[0]\n out_bdims = query_bdim, query_bdim, query_bdim\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n\n activation = jnp.reshape(activation, (B, N, T))\n fwd_output = jnp.reshape(fwd_output, (B,) + query.shape[-3:])\n grad_output = jnp.reshape(grad_output, (B,) + query.shape[-3:])\n\n grads = _dot_product_attention_fp8_bwd_p_wrapper.bind(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s, descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout,\n )\n\n grad_query, grad_key, grad_value = grads[:3]\n # reshape to original shape\n grad_query = jnp.reshape(grad_query, query.shape)\n grad_key = jnp.reshape(grad_key, key.shape)\n grad_value = jnp.reshape(grad_value, value.shape)\n\n return grads, out_bdims\n\ndef _infer_fp8_fwd_output_sharding(mesh, arg_shapes, is_training, layout):\n # Prepare variadic_args for the original function\n has_bias = False # Adjust as needed\n variadic_args = (has_bias, None) # Dummy value, adjust as necessary\n\n # Call the original function with the required parameters\n output_sharding = _infer_fwd_output_sharding(mesh, arg_shapes, variadic_args, is_training, layout)\n amax_sharding = NamedSharding(mesh, PartitionSpec())\n if is_training:\n out_sharding, activation_sharding = output_sharding[0], output_sharding[1]\n return [out_sharding, amax_sharding, amax_sharding, activation_sharding]\n return output_sharding + [amax_sharding, amax_sharding]\n\n_dot_product_attention_fp8_fwd_lower = custom_partitioning(\n _dot_product_attention_fp8_fwd_impl, static_argnums=(9, 10, 11, 12))\n\ndef _dot_product_attention_fp8_fwd_infer_sharding_from_operands(\n scale, use_causal_mask, layout, is_training,\n mesh, arg_shapes, result_shape):\n return _infer_fp8_fwd_output_sharding(mesh, arg_shapes, is_training, layout)\n\ndef _dot_product_attention_fp8_fwd_partition(\n scale, use_causal_mask, layout, is_training,\n mesh, arg_shapes, result_shape):\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n out_shardings = _infer_fp8_fwd_output_sharding(\n mesh, arg_shapes, is_training, layout)\n impl = functools.partial(\n _dot_product_attention_fp8_fwd_impl, scale=scale, use_causal_mask=use_causal_mask,\n layout=layout, is_training=is_training)\n return mesh, impl, out_shardings, arg_shardings\n\ndef _infer_fp8_bwd_output_sharding(mesh, arg_shapes, layout):\n # Prepare variadic_args for the original function\n has_bias = False # Adjust as needed\n has_dbias = False # Adjust as needed\n variadic_args = (has_bias, has_dbias) # Dummy value, adjust as necessary\n\n # Call the original function with the required parameters\n output_shardings = _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args)\n\n # Prepare amax_sharding\n amax_sharding = NamedSharding(mesh, PartitionSpec()) # Use a default spec or adjust as needed\n\n # Append amax_sharding for each output sharding\n out_shardings_with_amax = output_shardings + [amax_sharding] * 4\n\n return out_shardings_with_amax\n\n_dot_product_attention_fp8_bwd_lower = custom_partitioning(\n _dot_product_attention_fp8_bwd_impl, static_argnums=(18,19,20)\n)\n\ndef _dot_product_attention_fp8_bwd_infer_sharding_from_operands(\n scale, use_causal_mask, layout, mesh,\n arg_shapes, result_shape):\n return _infer_fp8_bwd_output_sharding(mesh, arg_shapes, layout)\n\ndef _dot_product_attention_fp8_bwd_partition(\n scale, use_causal_mask, layout, mesh,\n arg_shapes, result_shape):\n out_shardings = _infer_fp8_bwd_output_sharding(mesh, arg_shapes, layout)\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n impl = functools.partial(\n _dot_product_attention_fp8_bwd_impl, scale=scale,\n use_causal_mask=use_causal_mask, layout=layout\n )\n return mesh, impl, out_shardings, arg_shardings\n\n# Create dot_product_attention_fp8_fwd_p for forward operation.\n_dot_product_attention_fp8_fwd_p = core.Primitive(""dot_product_attention_fp8_fwd"")\n_dot_product_attention_fp8_fwd_p.multiple_results = True\n_dot_product_attention_fp8_fwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_fp8_fwd_p)\n)\n_dot_product_attention_fp8_fwd_p.def_abstract_eval(\n _dot_product_attention_fp8_fwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_fp8_fwd_p,\n _dot_product_attention_fp8_fwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_fp8_fwd_p_wrapper = core.Primitive(\n ""dot_product_attention_fp8_fwd_wrapper""\n)\n_dot_product_attention_fp8_fwd_p_wrapper.multiple_results = True\n_dot_product_attention_fp8_fwd_p_wrapper.def_impl(_dot_product_attention_fp8_fwd_impl)\n_dot_product_attention_fp8_fwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_fp8_fwd_abstract\n)\n\n# Create dot_product_attention_bwd_p for backward operation.\n_dot_product_attention_fp8_bwd_p = core.Primitive(""dot_product_attention_fp8_bwd"")\n_dot_product_attention_fp8_bwd_p.multiple_results = True\n_dot_product_attention_fp8_bwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_fp8_bwd_p)\n)\n_dot_product_attention_fp8_bwd_p.def_abstract_eval(\n _dot_product_attention_fp8_bwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_fp8_bwd_p,\n _dot_product_attention_fp8_bwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_fp8_bwd_p_wrapper = core.Primitive(\n ""dot_product_attention_fp8_bwd_wrapper""\n)\n_dot_product_attention_fp8_bwd_p_wrapper.multiple_results = True\n_dot_product_attention_fp8_bwd_p_wrapper.def_impl(_dot_product_attention_fp8_bwd_impl)\n_dot_product_attention_fp8_bwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_fp8_bwd_abstract\n)\n\nbatching.primitive_batchers[\n _dot_product_attention_fp8_fwd_p_wrapper\n] = _dot_product_attention_fp8_fwd_batcher\nbatching.primitive_batchers[\n _dot_product_attention_fp8_bwd_p_wrapper\n] = _dot_product_attention_fp8_bwd_batcher\n\n_dot_product_attention_fp8_fwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_fp8_fwd_infer_sharding_from_operands,\n partition=_dot_product_attention_fp8_fwd_partition)\n\nmlir.register_lowering(_dot_product_attention_fp8_fwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_fp8_fwd_lower, multiple_results=True))\n\n_dot_product_attention_fp8_bwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_fp8_bwd_infer_sharding_from_operands,\n partition=_dot_product_attention_fp8_bwd_partition)\n\nmlir.register_lowering(_dot_product_attention_fp8_bwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_fp8_bwd_lower, multiple_results=True))\n\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_fwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_fwd_p_wrapper\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_bwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_bwd_p_wrapper\n)\n\n@functools.partial(jax.custom_vjp, nondiff_argnums=(4, 5, 6, 7))\ndef _dot_product_attention_fp8(query: Array,\n key: Array,\n value: Array,\n fp8_params: dict[str, Array],\n scale: float,\n use_causal_mask: bool,\n layout: int,\n cudnn_version: int):\n output, amax_s, amax_o = _dot_product_attention_fp8_fwd(\n query, key, value, params_from_keys(fp8_params, fp8_params_keys_fwd),\n scale, use_causal_mask, layout, cudnn_version\n )\n return output, amax_s, amax_o\n\n_dot_product_attention_fp8.defvjp(_dot_product_attention_fp8_fwd_rule, _dot_product_attention_fp8_bwd_rule)\n\ndef combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask\n if bias is None:\n bias = mask\n else:\n if mask is not None:\n # should be broadcast to same shape\n bias = bias + mask\n return bias\n\n# User interface\ndef paged_attention(\n query: Array,\n key: Array,\n value: Array,\n q_seqlen: Array,\n kv_seqlen: Array,\n page_table_k: Array,\n page_table_v: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes paged attention described in https://arxiv.org/pdf/2309.06180.\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of\n [num_blocks, block_size, N, H] or [num_blocks, N, block_size, H] where\n num_blocks = B * Ceil(S / block_size).\n value: Values to be used in attention with a shape of\n [num_blocks, block_size, N, H] or [num_blocks, N, block_size, H] where\n num_blocks = B * Ceil(S / block_size).\n q_seqlen: Non padded sequence length of query with a shape of B.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n page_table_k: page table for key of shape [B, 1, num_blocks_per_batch, 1]\n where num_blocks_per_batch = Ceil(S / block_size).\n page_table_v: page table for value of shape [B, 1, num_blocks_per_batch, 1]\n where num_blocks_per_batch = Ceil(S / block_size).\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n scale: Scale for the query.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n """"""\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n if use_fp8:\n raise ValueError(""Paged attention doesn't support fp8 for now."")\n if has_padding(mask_type) and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to generate padding mask."")\n if sliding_window_length is not None and sliding_window_length <= 0:\n raise ValueError(\n f""Require sliding_window_length > 0, got {sliding_window_length}."")\n\n bias = combine_bias_and_mask(bias, mask, query.dtype)\n # check if input shape and data type is compatiable\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, None, None,\n page_table_k, page_table_v, layout)\n has_bias = bias is not None\n has_dbias = has_bias and \\n should_export_dbias(bias.shape, query.shape, layout) # type: ignore[union-attr]\n variadic_args = (has_bias, has_dbias)\n\n _not_used = jnp.zeros(0, dtype=query.dtype)\n if bias is None:\n bias = _not_used\n\n output = _dot_product_attention(\n query, key, value, bias, q_seqlen, kv_seqlen, _not_used, _not_used,\n page_table_k, page_table_v, scale, seed, dropout_rate, variadic_args,\n mask_type, layout.value, sliding_window_length, cudnn_version,\n return_residual)\n return output\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n The supported layouts for Q, K, V are either BT(S)NH or BNT(S)H, and they must\n adhere to the same layout. The output layout remains consistent with Q,\n defaulting to BT(S)NH.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of BSNH or BNSH.\n value: Values to be used in attention with a shape of BSNH or BNSH.\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n q_seqlen: Non padded sequence length of query with a shape of B.\n If q_offsets is set, q_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n If kv_offsets is set, kv_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n q_offsets: offset of each segment packed in query with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, q_offsets = [[0,1,2,-1], [0,1,-1,-1]]. q_seqlen should be set\n to indicate the size of each segment.\n kv_offsets: offset of each segment packed in key with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, kv_offsets = [[0,1,2,-1], [0,1,-1,-1]]. kv_seqlen should be set\n to indicate the size of each segment.\n scale: Scale for the query.\n dropout_rate: Dropout rate.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n amax_s: amax of state. (fp8 only)\n amax_o: amax of output. (fp8 only)\n """"""\n # TODO(b/380898464): Check the compute capability, e.g., require GPU device,\n # in the kernel implementation (c++) code.\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n\n if use_fp8:\n if fp8_params is None:\n raise ValueError(""fp8_params should not be None."")\n if mask_type not in (MaskType.NO_MASK, MaskType.CAUSAL):\n raise ValueError(""Only NO_MASK or CAUSAL masks are supported for fp8."")\n if not all(x is None for x in [bias, mask, q_seqlen, kv_seqlen]):\n raise ValueError(\n f""Expected 'None' for bias, mask, q_seqlen, and kv_seqlen, ""\n f""but got: bias={bias}, mask={mask}, q_seqlen={q_seqlen}, kv_seqlen={kv_seqlen}""\n )\n check_fp8_params(fp8_params)\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n output, amax_s, amax_o = _dot_product_attention_fp8(\n query, key, value, fp8_params,\n scale, mask_type == MaskType.CAUSAL, layout.value, cudnn_version\n )\n return output, amax_s, amax_o\n else:\n if has_padding(mask_type) and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to generate padding mask"")\n if sliding_window_length is not None and sliding_window_length <= 0:\n raise ValueError(\n f""Require sliding_window_length > 0, got {sliding_window_length}"")\n if q_offsets is not None and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to use packed layout"")\n\n bias = combine_bias_and_mask(bias, mask, query.dtype)\n # check if input shape and data type is compatiable\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n has_bias = bias is not None\n has_dbias = has_bias and \\n should_export_dbias(bias.shape, query.shape, layout) # type: ignore[union-attr]\n variadic_args = (has_bias, has_dbias)\n\n _not_used = jnp.zeros(0, dtype=query.dtype)\n if bias is None:\n bias = _not_used\n if q_seqlen is None:\n q_seqlen = _not_used\n if kv_seqlen is None:\n kv_seqlen = _not_used\n if q_offsets is None:\n q_offsets = _not_used\n if kv_offsets is None:\n kv_offsets = _not_used\n\n output = _dot_product_attention(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n _not_used, _not_used, scale, seed, dropout_rate, variadic_args,\n mask_type, layout.value, sliding_window_length, cudnn_version,\n return_residual)\n return output\n",python,tab +4938,15620837,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72096,0,"",python,selection_command +4939,15624133,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72123,0,"",python,selection_command +4940,15624330,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72141,0,"",python,selection_command +4941,15624330,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72157,0,"",python,selection_command +4942,15624349,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72175,0,"",python,selection_command +4943,15624384,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72206,0,"",python,selection_command +4944,15624418,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72237,0,"",python,selection_command +4945,15624657,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72206,0,"",python,selection_command +4946,15624852,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72175,0,"",python,selection_command +4947,15625180,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",73725,0,"",python,selection_command +4948,15626952,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",73731,0,"",python,selection_command +4949,15627312,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76699,0,"",python,selection_command +4950,15629028,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76790,0,"",python,selection_command +4951,15629463,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76850,0,"",python,selection_command +4952,15629898,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76856,0,"",python,selection_command +4953,15630441,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76997,0,"",python,selection_command +4954,15639188,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76856,0,"",python,selection_command +4955,15639564,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76997,0,"",python,selection_command +4956,15639707,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76790,0,"",python,selection_command +4957,15639861,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76699,0,"",python,selection_command +4958,15640014,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",73731,0,"",python,selection_command +4959,15640220,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72175,0,"",python,selection_command +4960,15640716,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +4961,15640716,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37849,0,"",python,selection_command +4962,15641747,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37890,0,"",python,selection_command +4963,15641956,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37897,0,"",python,selection_command +4964,15642086,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37899,0,"",python,selection_command +4965,15642154,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37971,0,"",python,selection_command +4966,15642290,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37981,0,"",python,selection_command +4967,15643004,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37909,0,"",python,selection_command +4968,15643136,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37868,0,"",python,selection_command +4969,15643533,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +4970,15643533,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72096,0,"",python,selection_command +4971,15643901,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72123,0,"",python,selection_command +4972,15644153,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72141,0,"",python,selection_command +4973,15644455,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72157,0,"",python,selection_command +4974,15644588,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72175,0,"",python,selection_command +4975,15644722,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72206,0,"",python,selection_command +4976,15645101,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72175,0,"",python,selection_command +4977,15645281,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",73725,0,"",python,selection_command +4978,15649141,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",73731,0,"",python,selection_command +4979,15650069,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",73725,0,"",python,selection_command +4980,15650266,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +4981,15650266,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37868,0,"",python,selection_command +4982,15650465,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +4983,15650465,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76856,0,"",python,selection_command +4984,15651018,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +4985,15651018,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37868,0,"",python,selection_command +4986,15658749,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37876,0,"",python,selection_command +4987,15659576,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37917,0,"",python,selection_command +4988,15659785,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37919,0,"",python,selection_command +4989,15677475,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +4990,15678190,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +4991,15679187,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +4992,15681434,"/fast/home/franz.srambical/jafar/utils/nn.py",9706,0,"",python,selection_command +4993,15682193,"/fast/home/franz.srambical/jafar/utils/nn.py",9653,0,"",python,selection_command +4994,15682453,"/fast/home/franz.srambical/jafar/utils/nn.py",9594,0,"",python,selection_command +4995,15682467,"/fast/home/franz.srambical/jafar/utils/nn.py",9555,0,"",python,selection_command +4996,15682499,"/fast/home/franz.srambical/jafar/utils/nn.py",9554,0,"",python,selection_command +4997,15682530,"/fast/home/franz.srambical/jafar/utils/nn.py",9511,0,"",python,selection_command +4998,15682566,"/fast/home/franz.srambical/jafar/utils/nn.py",9474,0,"",python,selection_command +4999,15682600,"/fast/home/franz.srambical/jafar/utils/nn.py",9473,0,"",python,selection_command +5000,15682631,"/fast/home/franz.srambical/jafar/utils/nn.py",9402,0,"",python,selection_command +5001,15682664,"/fast/home/franz.srambical/jafar/utils/nn.py",9381,0,"",python,selection_command +5002,15682698,"/fast/home/franz.srambical/jafar/utils/nn.py",9380,0,"",python,selection_command +5003,15682730,"/fast/home/franz.srambical/jafar/utils/nn.py",9313,0,"",python,selection_command +5004,15682763,"/fast/home/franz.srambical/jafar/utils/nn.py",9286,0,"",python,selection_command +5005,15682855,"/fast/home/franz.srambical/jafar/utils/nn.py",9285,0,"",python,selection_command +5006,15682855,"/fast/home/franz.srambical/jafar/utils/nn.py",9219,0,"",python,selection_command +5007,15682863,"/fast/home/franz.srambical/jafar/utils/nn.py",9192,0,"",python,selection_command +5008,15682897,"/fast/home/franz.srambical/jafar/utils/nn.py",9144,0,"",python,selection_command +5009,15683377,"/fast/home/franz.srambical/jafar/utils/nn.py",9192,0,"",python,selection_command +5010,15683472,"/fast/home/franz.srambical/jafar/utils/nn.py",9219,0,"",python,selection_command +5011,15683645,"/fast/home/franz.srambical/jafar/utils/nn.py",9192,0,"",python,selection_command +5012,15683912,"/fast/home/franz.srambical/jafar/utils/nn.py",9144,0,"",python,selection_command +5013,15684361,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"",python,selection_command +5014,15684824,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,1,"",python,content +5015,15684991,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,1,"",python,content +5016,15685138,"/fast/home/franz.srambical/jafar/utils/nn.py",9198,0,"",python,selection_command +5017,15685252,"/fast/home/franz.srambical/jafar/utils/nn.py",9198,1,"",python,content +5018,15685379,"/fast/home/franz.srambical/jafar/utils/nn.py",9198,1,"",python,content +5019,15686551,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +5020,15686778,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5021,15698243,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +5022,15706264,"TERMINAL",0,0,"2025-07-27 13:40:26.063277: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5023,15707740,"TERMINAL",0,0,"2025-07-27 13:40:27.541224: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5024,15711266,"TERMINAL",0,0,"2025-07-27 13:40:31.015725: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5025,15711958,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(298)attention_fn()\r\n-> implementation = ""cudnn"" if use_flash_attention else None\r\n",,terminal_output +5026,15759635,"TERMINAL",0,0,"b",,terminal_output +5027,15759776,"TERMINAL",0,0,"i",,terminal_output +5028,15759861,"TERMINAL",0,0,"as",,terminal_output +5029,15760043,"TERMINAL",0,0,".",,terminal_output +5030,15760136,"TERMINAL",0,0,"s",,terminal_output +5031,15760291,"TERMINAL",0,0,"ha",,terminal_output +5032,15760386,"TERMINAL",0,0,"p",,terminal_output +5033,15760481,"TERMINAL",0,0,"e",,terminal_output +5034,15760589,"TERMINAL",0,0,"\r\n(Pdb) *** AttributeError: 'NoneType' object has no attribute 'shape'\r\n",,terminal_output +5035,15787304,"TERMINAL",0,0,"q",,terminal_output +5036,15787418,"TERMINAL",0,0,"u",,terminal_output +5037,15787519,"TERMINAL",0,0,"e",,terminal_output +5038,15787622,"TERMINAL",0,0,"r",,terminal_output +5039,15787715,"TERMINAL",0,0,"y",,terminal_output +5040,15788251,"TERMINAL",0,0,".",,terminal_output +5041,15788330,"TERMINAL",0,0,"s",,terminal_output +5042,15788490,"TERMINAL",0,0,"ha",,terminal_output +5043,15788703,"TERMINAL",0,0,"ep",,terminal_output +5044,15789174,"TERMINAL",0,0," ",,terminal_output +5045,15789306,"TERMINAL",0,0," ",,terminal_output +5046,15789507,"TERMINAL",0,0,"p",,terminal_output +5047,15789621,"TERMINAL",0,0,"e",,terminal_output +5048,15789720,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 8, 64)\r\n",,terminal_output +5049,15790833,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"",python,selection_command +5050,15791073,"/fast/home/franz.srambical/jafar/utils/nn.py",9118,0,"",python,selection_command +5051,15791106,"/fast/home/franz.srambical/jafar/utils/nn.py",9067,0,"",python,selection_command +5052,15791131,"/fast/home/franz.srambical/jafar/utils/nn.py",9017,0,"",python,selection_command +5053,15791240,"/fast/home/franz.srambical/jafar/utils/nn.py",8944,0,"",python,selection_command +5054,15801457,"/fast/home/franz.srambical/jafar/utils/nn.py",8935,0,"",python,selection_command +5055,15801709,"/fast/home/franz.srambical/jafar/utils/nn.py",8933,0,"",python,selection_command +5056,15801733,"/fast/home/franz.srambical/jafar/utils/nn.py",8895,0,"",python,selection_command +5057,15801768,"/fast/home/franz.srambical/jafar/utils/nn.py",8797,0,"",python,selection_command +5058,15801797,"/fast/home/franz.srambical/jafar/utils/nn.py",8712,0,"",python,selection_command +5059,15801831,"/fast/home/franz.srambical/jafar/utils/nn.py",8703,0,"",python,selection_command +5060,15801862,"/fast/home/franz.srambical/jafar/utils/nn.py",8632,0,"",python,selection_command +5061,15801982,"/fast/home/franz.srambical/jafar/utils/nn.py",8536,0,"",python,selection_command +5062,15802140,"/fast/home/franz.srambical/jafar/utils/nn.py",8527,0,"",python,selection_command +5063,15802387,"/fast/home/franz.srambical/jafar/utils/nn.py",8464,0,"",python,selection_command +5064,15802453,"/fast/home/franz.srambical/jafar/utils/nn.py",8454,0,"",python,selection_command +5065,15802740,"/fast/home/franz.srambical/jafar/utils/nn.py",8368,0,"",python,selection_command +5066,15805218,"/fast/home/franz.srambical/jafar/utils/nn.py",2877,26,"_create_flash_attention_fn",python,selection_command +5067,15805466,"/fast/home/franz.srambical/jafar/utils/nn.py",2902,0,"",python,selection_command +5068,15806683,"/fast/home/franz.srambical/jafar/utils/nn.py",2830,0,"",python,selection_command +5069,15806942,"/fast/home/franz.srambical/jafar/utils/nn.py",2788,0,"",python,selection_command +5070,15807383,"/fast/home/franz.srambical/jafar/utils/nn.py",2749,0,"",python,selection_command +5071,15807509,"/fast/home/franz.srambical/jafar/utils/nn.py",2714,0,"",python,selection_command +5072,15807711,"/fast/home/franz.srambical/jafar/utils/nn.py",2749,0,"",python,selection_command +5073,15807792,"/fast/home/franz.srambical/jafar/utils/nn.py",2788,0,"",python,selection_command +5074,15808277,"/fast/home/franz.srambical/jafar/utils/nn.py",2830,0,"",python,selection_command +5075,15808342,"/fast/home/franz.srambical/jafar/utils/nn.py",2788,0,"",python,selection_command +5076,15808608,"/fast/home/franz.srambical/jafar/utils/nn.py",2749,0,"",python,selection_command +5077,15808622,"/fast/home/franz.srambical/jafar/utils/nn.py",2714,0,"",python,selection_command +5078,15808652,"/fast/home/franz.srambical/jafar/utils/nn.py",2680,0,"",python,selection_command +5079,15808686,"/fast/home/franz.srambical/jafar/utils/nn.py",2642,0,"",python,selection_command +5080,15808720,"/fast/home/franz.srambical/jafar/utils/nn.py",2584,0,"",python,selection_command +5081,15808844,"/fast/home/franz.srambical/jafar/utils/nn.py",2574,0,"",python,selection_command +5082,15809106,"/fast/home/franz.srambical/jafar/utils/nn.py",2584,0,"",python,selection_command +5083,15809195,"/fast/home/franz.srambical/jafar/utils/nn.py",2588,0,"",python,selection_command +5084,15809381,"/fast/home/franz.srambical/jafar/utils/nn.py",2589,0,"",python,selection_command +5085,15809531,"/fast/home/franz.srambical/jafar/utils/nn.py",2608,0,"",python,selection_command +5086,15809676,"/fast/home/franz.srambical/jafar/utils/nn.py",2610,0,"",python,selection_command +5087,15809827,"/fast/home/franz.srambical/jafar/utils/nn.py",2613,0,"",python,selection_command +5088,15810032,"/fast/home/franz.srambical/jafar/utils/nn.py",2614,0,"",python,selection_command +5089,15810529,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +5090,15810529,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +5091,15811549,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12332,0,"",python,selection_command +5092,15811760,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14092,0,"",python,selection_command +5093,15814465,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14175,0,"",python,selection_command +5094,15814970,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14187,0,"",python,selection_command +5095,15815159,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14210,0,"",python,selection_command +5096,15815299,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14226,0,"",python,selection_command +5097,15815449,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14249,0,"",python,selection_command +5098,15815565,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14260,0,"",python,selection_command +5099,15815817,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15027,0,"",python,selection_command +5100,15815843,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15042,0,"",python,selection_command +5101,15815876,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15063,0,"",python,selection_command +5102,15815905,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15077,0,"",python,selection_command +5103,15815936,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15835,0,"",python,selection_command +5104,15815970,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16004,0,"",python,selection_command +5105,15816531,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15835,0,"",python,selection_command +5106,15816704,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15077,0,"",python,selection_command +5107,15816948,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15063,0,"",python,selection_command +5108,15816978,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15042,0,"",python,selection_command +5109,15817004,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15027,0,"",python,selection_command +5110,15817262,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14260,0,"",python,selection_command +5111,15817434,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14249,0,"",python,selection_command +5112,15817589,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14226,0,"",python,selection_command +5113,15817821,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14210,0,"",python,selection_command +5114,15818137,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14187,0,"",python,selection_command +5115,15818470,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14175,0,"",python,selection_command +5116,15818817,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13108,0,"",python,selection_command +5117,15820157,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14245,0,"",python,selection_command +5118,15821921,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14256,0,"",python,selection_command +5119,15822776,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15059,0,"",python,selection_command +5120,15834215,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15073,0,"",python,selection_command +5121,15834364,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15831,0,"",python,selection_command +5122,15834799,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16000,0,"",python,selection_command +5123,15839532,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15977,0,"",python,selection_command +5124,15839780,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15946,0,"",python,selection_command +5125,15839815,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15936,0,"",python,selection_command +5126,15839833,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15919,0,"",python,selection_command +5127,15839866,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15881,0,"",python,selection_command +5128,15839900,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15855,0,"",python,selection_command +5129,15839939,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15831,0,"",python,selection_command +5130,15839972,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15808,0,"",python,selection_command +5131,15840004,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15775,0,"",python,selection_command +5132,15840039,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15699,0,"",python,selection_command +5133,15840074,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15621,0,"",python,selection_command +5134,15840220,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15595,0,"",python,selection_command +5135,15840390,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15598,0,"",python,selection_command +5136,15840551,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15599,0,"",python,selection_command +5137,15840685,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15611,0,"",python,selection_command +5138,15841054,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15599,0,"",python,selection_command +5139,15841228,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14336,0,"",python,selection_command +5140,15841824,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14349,0,"",python,selection_command +5141,15841956,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14351,0,"",python,selection_command +5142,15842354,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13227,0,"",python,selection_command +5143,15843360,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14351,0,"",python,selection_command +5144,15843519,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15599,0,"",python,selection_command +5145,15843782,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15831,0,"",python,selection_command +5146,15845879,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16000,0,"",python,selection_command +5147,15846378,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16430,0,"",python,selection_command +5148,15850103,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17402,0,"",python,selection_command +5149,15851892,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16430,0,"",python,selection_command +5150,15852131,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15516,0,"",python,selection_command +5151,15854241,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14368,0,"",python,selection_command +5152,15855169,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14331,0,"",python,selection_command +5153,15855345,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14306,0,"",python,selection_command +5154,15855689,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14269,0,"",python,selection_command +5155,15856384,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14273,0,"",python,selection_command +5156,15856622,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14274,0,"",python,selection_command +5157,15856643,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14287,0,"",python,selection_command +5158,15857257,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14289,0,"",python,selection_command +5159,15857753,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14287,0,"",python,selection_command +5160,15857920,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14274,0,"",python,selection_command +5161,15858057,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14273,0,"",python,selection_command +5162,15858270,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14244,0,"",python,selection_command +5163,15858386,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14245,0,"",python,selection_command +5164,15860681,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14256,0,"",python,selection_command +5165,15861129,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15059,0,"",python,selection_command +5166,15865642,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15073,0,"",python,selection_command +5167,15865948,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",15831,0,"",python,selection_command +5168,15867626,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16000,0,"",python,selection_command +5169,15868582,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16430,0,"",python,selection_command +5170,15869474,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16444,0,"",python,selection_command +5171,15869651,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",11597,0,"",python,selection_command +5172,15886626,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",11667,0,"",python,selection_command +5173,15886973,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13131,0,"",python,selection_command +5174,15887829,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13143,0,"",python,selection_command +5175,15888081,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13145,0,"",python,selection_command +5176,15888103,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13153,0,"",python,selection_command +5177,15888136,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13159,0,"",python,selection_command +5178,15888168,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13164,0,"",python,selection_command +5179,15888355,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13166,0,"",python,selection_command +5180,15888521,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13168,0,"",python,selection_command +5181,15888838,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5282,0,"",python,selection_command +5182,15889991,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5309,0,"",python,selection_command +5183,15890110,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5325,0,"",python,selection_command +5184,15890259,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5339,0,"",python,selection_command +5185,15890393,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5355,0,"",python,selection_command +5186,15893403,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6530,0,"",python,selection_command +5187,15894763,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6536,0,"",python,selection_command +5188,15894895,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",6742,0,"",python,selection_command +5189,15895307,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8766,0,"",python,selection_command +5190,15927176,"utils/nn.py",0,0,"",python,tab +5191,15928063,"utils/nn.py",10610,0,"",python,selection_command +5192,15928308,"utils/nn.py",10582,0,"",python,selection_command +5193,15928339,"utils/nn.py",10538,0,"",python,selection_command +5194,15928361,"utils/nn.py",10510,0,"",python,selection_command +5195,15928396,"utils/nn.py",10440,0,"",python,selection_command +5196,15928552,"utils/nn.py",10366,0,"",python,selection_command +5197,15928709,"utils/nn.py",10337,0,"",python,selection_command +5198,15928829,"utils/nn.py",10304,0,"",python,selection_command +5199,15928954,"utils/nn.py",10306,0,"",python,selection_command +5200,15929136,"utils/nn.py",10307,0,"",python,selection_command +5201,15929293,"utils/nn.py",10313,0,"",python,selection_command +5202,15929843,"utils/nn.py",10322,0,"",python,selection_command +5203,15930142,"utils/nn.py",10322,1,"",python,content +5204,15931243,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +5205,15944347,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7404,0,"",python,selection_command +5206,15944843,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5763,0,"",python,selection_command +5207,15948388,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7404,0,"",python,selection_command +5208,15949352,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8747,0,"",python,selection_command +5209,15978770,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8799,0,"",python,selection_command +5210,15978912,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8852,0,"",python,selection_command +5211,15979014,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8877,0,"",python,selection_command +5212,15979176,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8912,0,"",python,selection_command +5213,15979351,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8914,0,"",python,selection_command +5214,15979518,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8916,0,"",python,selection_command +5215,15979690,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8919,0,"",python,selection_command +5216,15979800,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8920,0,"",python,selection_command +5217,15979980,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8922,0,"",python,selection_command +5218,15980137,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8923,0,"",python,selection_command +5219,15981013,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +5220,15981013,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30613,0,"",python,selection_command +5221,15982981,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +5222,15982981,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8923,0,"",python,selection_command +5223,15984853,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +5224,15984853,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30613,0,"",python,selection_command +5225,15985406,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +5226,15985406,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8923,0,"",python,selection_command +5227,15985864,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +5228,15985864,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30613,0,"",python,selection_command +5229,15990288,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +5230,15990288,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8923,0,"",python,selection_command +5231,15993495,"utils/nn.py",0,0,"",python,tab +5232,15993495,"utils/nn.py",10322,0,"",python,selection_command +5233,15994266,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +5234,15994266,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8766,0,"",python,selection_command +5235,15995391,"utils/nn.py",0,0,"",python,tab +5236,15997527,"utils/nn.py",9290,0,"",python,selection_command +5237,15998199,"utils/nn.py",9281,0,"",python,selection_command +5238,15998428,"utils/nn.py",9223,0,"",python,selection_command +5239,15998460,"utils/nn.py",9198,0,"",python,selection_command +5240,15998492,"utils/nn.py",9152,0,"",python,selection_command +5241,15998528,"utils/nn.py",9118,0,"",python,selection_command +5242,15998565,"utils/nn.py",9067,0,"",python,selection_command +5243,15998676,"utils/nn.py",9017,0,"",python,selection_command +5244,15998969,"utils/nn.py",8944,0,"",python,selection_command +5245,15999117,"utils/nn.py",8956,0,"",python,selection_command +5246,15999380,"utils/nn.py",8957,0,"",python,selection_command +5247,15999396,"utils/nn.py",8962,0,"",python,selection_command +5248,15999428,"utils/nn.py",8964,0,"",python,selection_command +5249,15999461,"utils/nn.py",8967,0,"",python,selection_command +5250,15999494,"utils/nn.py",8969,0,"",python,selection_command +5251,15999773,"utils/nn.py",8974,0,"",python,selection_command +5252,15999937,"utils/nn.py",8976,0,"",python,selection_command +5253,16048661,"utils/nn.py",8935,0,"",python,selection_command +5254,16048890,"utils/nn.py",8933,0,"",python,selection_command +5255,16048922,"utils/nn.py",8925,0,"",python,selection_command +5256,16048955,"utils/nn.py",8829,0,"",python,selection_command +5257,16048978,"utils/nn.py",8744,0,"",python,selection_command +5258,16049142,"utils/nn.py",8703,0,"",python,selection_command +5259,16049294,"utils/nn.py",8664,0,"",python,selection_command +5260,16049415,"utils/nn.py",8568,0,"",python,selection_command +5261,16049572,"utils/nn.py",8527,0,"",python,selection_command +5262,16049714,"utils/nn.py",8496,0,"",python,selection_command +5263,16049857,"utils/nn.py",8454,0,"",python,selection_command +5264,16049992,"utils/nn.py",8400,0,"",python,selection_command +5265,16050230,"utils/nn.py",8360,0,"",python,selection_command +5266,16050435,"utils/nn.py",8364,0,"",python,selection_command +5267,16052023,"utils/nn.py",2877,26,"_create_flash_attention_fn",python,selection_command +5268,16052172,"utils/nn.py",2902,0,"",python,selection_command +5269,16052361,"utils/nn.py",2826,0,"",python,selection_command +5270,16052475,"utils/nn.py",2784,0,"",python,selection_command +5271,16052736,"utils/nn.py",2745,0,"",python,selection_command +5272,16052757,"utils/nn.py",2710,0,"",python,selection_command +5273,16052790,"utils/nn.py",2676,0,"",python,selection_command +5274,16052823,"utils/nn.py",2638,0,"",python,selection_command +5275,16052992,"utils/nn.py",2580,0,"",python,selection_command +5276,16054647,"utils/nn.py",2614,0,"",python,selection_command +5277,16055037,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +5278,16055038,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +5279,16061975,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",11676,0,"",python,selection_command +5280,16062585,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13131,0,"",python,selection_command +5281,16063761,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14274,0,"",python,selection_command +5282,16064731,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14289,0,"",python,selection_command +5283,16064894,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22697,0,"",python,selection_command +5284,16066813,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22722,0,"",python,selection_command +5285,16066955,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22733,0,"",python,selection_command +5286,16067138,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22746,0,"",python,selection_command +5287,16069041,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22733,0,"",python,selection_command +5288,16069338,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22722,0,"",python,selection_command +5289,16069470,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22697,0,"",python,selection_command +5290,16070185,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14274,0,"",python,selection_command +5291,16071055,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14287,0,"",python,selection_command +5292,16071193,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14289,0,"",python,selection_command +5293,16071459,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13131,0,"",python,selection_command +5294,16072982,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13143,0,"",python,selection_command +5295,16073239,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13145,0,"",python,selection_command +5296,16073259,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13153,0,"",python,selection_command +5297,16073291,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13159,0,"",python,selection_command +5298,16073441,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13164,0,"",python,selection_command +5299,16073683,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13166,0,"",python,selection_command +5300,16073851,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13168,0,"",python,selection_command +5301,16074148,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5282,0,"",python,selection_command +5302,16077063,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13168,0,"",python,selection_command +5303,16077217,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14289,0,"",python,selection_command +5304,16077802,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22697,0,"",python,selection_command +5305,16109586,"utils/nn.py",0,0,"",python,tab +5306,16112024,"utils/nn.py",11024,0,"",python,selection_command +5307,16112412,"utils/nn.py",9793,0,"",python,selection_command +5308,16112918,"utils/nn.py",9754,0,"",python,selection_command +5309,16113168,"utils/nn.py",9711,0,"",python,selection_command +5310,16113191,"utils/nn.py",9702,0,"",python,selection_command +5311,16113216,"utils/nn.py",9657,0,"",python,selection_command +5312,16113257,"utils/nn.py",9598,0,"",python,selection_command +5313,16113286,"utils/nn.py",9559,0,"",python,selection_command +5314,16113323,"utils/nn.py",9550,0,"",python,selection_command +5315,16113354,"utils/nn.py",9515,0,"",python,selection_command +5316,16113388,"utils/nn.py",9478,0,"",python,selection_command +5317,16113421,"utils/nn.py",9469,0,"",python,selection_command +5318,16113454,"utils/nn.py",9406,0,"",python,selection_command +5319,16113488,"utils/nn.py",9385,0,"",python,selection_command +5320,16113519,"utils/nn.py",9376,0,"",python,selection_command +5321,16113553,"utils/nn.py",9317,0,"",python,selection_command +5322,16113586,"utils/nn.py",9290,0,"",python,selection_command +5323,16113619,"utils/nn.py",9281,0,"",python,selection_command +5324,16113653,"utils/nn.py",9223,0,"",python,selection_command +5325,16113686,"utils/nn.py",9198,0,"",python,selection_command +5326,16113719,"utils/nn.py",9152,0,"",python,selection_command +5327,16113752,"utils/nn.py",9118,0,"",python,selection_command +5328,16113879,"utils/nn.py",9067,0,"",python,selection_command +5329,16114046,"utils/nn.py",9017,0,"",python,selection_command +5330,16114197,"utils/nn.py",8944,0,"",python,selection_command +5331,16114333,"utils/nn.py",8956,0,"",python,selection_command +5332,16114582,"utils/nn.py",8957,0,"",python,selection_command +5333,16114606,"utils/nn.py",8962,0,"",python,selection_command +5334,16114632,"utils/nn.py",8964,0,"",python,selection_command +5335,16114667,"utils/nn.py",8967,0,"",python,selection_command +5336,16114702,"utils/nn.py",8969,0,"",python,selection_command +5337,16114929,"utils/nn.py",8974,0,"",python,selection_command +5338,16115078,"utils/nn.py",8976,0,"",python,selection_command +5339,16115252,"utils/nn.py",8980,0,"",python,selection_command +5340,16115501,"utils/nn.py",8976,0,"",python,selection_command +5341,16116028,"utils/nn.py",10544,0,"",python,selection_command +5342,16118168,"utils/nn.py",10518,0,"",python,selection_command +5343,16119534,"utils/nn.py",10799,0,"",python,selection_command +5344,16120054,"utils/nn.py",10771,0,"",python,selection_command +5345,16120310,"utils/nn.py",10747,0,"",python,selection_command +5346,16120379,"utils/nn.py",10719,0,"",python,selection_command +5347,16120542,"utils/nn.py",10669,0,"",python,selection_command +5348,16120668,"utils/nn.py",10670,0,"",python,selection_command +5349,16120864,"utils/nn.py",10672,0,"",python,selection_command +5350,16121005,"utils/nn.py",10675,0,"",python,selection_command +5351,16121147,"utils/nn.py",10676,0,"",python,selection_command +5352,16121289,"utils/nn.py",10678,0,"",python,selection_command +5353,16121470,"utils/nn.py",10679,0,"",python,selection_command +5354,16124725,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +5355,16124726,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30613,0,"",python,selection_command +5356,16126642,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30640,0,"",python,selection_command +5357,16126896,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30662,0,"",python,selection_command +5358,16126918,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30682,0,"",python,selection_command +5359,16126951,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30704,0,"",python,selection_command +5360,16127133,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30739,0,"",python,selection_command +5361,16127304,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30704,0,"",python,selection_command +5362,16128106,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32291,0,"",python,selection_command +5363,16135565,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32307,0,"",python,selection_command +5364,16136887,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32616,0,"",python,selection_command +5365,16137163,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34566,0,"",python,selection_command +5366,16140151,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32616,0,"",python,selection_command +5367,16140289,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32307,0,"",python,selection_command +5368,16140425,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30704,0,"",python,selection_command +5369,16140748,"utils/nn.py",0,0,"",python,tab +5370,16140748,"utils/nn.py",10679,0,"",python,selection_command +5371,16141082,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +5372,16141082,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30704,0,"",python,selection_command +5373,16142776,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32291,0,"",python,selection_command +5374,16143603,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32307,0,"",python,selection_command +5375,16144480,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32291,0,"",python,selection_command +5376,16144649,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34566,0,"",python,selection_command +5377,16145140,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32616,0,"",python,selection_command +5378,16146391,"utils/nn.py",0,0,"",python,tab +5379,16146392,"utils/nn.py",10679,0,"",python,selection_command +5380,16150113,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +5381,16150113,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30613,0,"",python,selection_command +5382,16150655,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30640,0,"",python,selection_command +5383,16150939,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30662,0,"",python,selection_command +5384,16151071,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30682,0,"",python,selection_command +5385,16151204,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30704,0,"",python,selection_command +5386,16152508,"utils/nn.py",0,0,"",python,tab +5387,16152509,"utils/nn.py",10679,0,"",python,selection_command +5388,16159496,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +5389,16159496,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30704,0,"",python,selection_command +5390,16160580,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32291,0,"",python,selection_command +5391,16161553,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32307,0,"",python,selection_command +5392,16161723,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32616,0,"",python,selection_command +5393,16162130,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34566,0,"",python,selection_command +5394,16167037,"TERMINAL",0,0,"c",,terminal_output +5395,16167226,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 333, in attention_fn\r\n output_4d = jax.nn.dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 1204, in dot_product_attention\r\n out = cudnn_dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py"", line 1981, in dot_product_attention\r\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py"", line 332, in check_layout\r\n raise ValueError(\r\nValueError: Bias must have same seq length as QKV, got 1 and 1\r\n",,terminal_output +5396,16167984,"TERMINAL",0,0,"(Pdb) ",,terminal_output +5397,16168209,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +5398,16178950,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34584,0,"",python,selection_command +5399,16179352,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34593,0,"",python,selection_command +5400,16180117,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35801,0,"",python,selection_command +5401,16182590,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35776,0,"",python,selection_command +5402,16185423,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35803,0,"",python,selection_mouse +5403,16186292,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35776,0,"",python,selection_command +5404,16187939,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35803,0,"",python,selection_mouse +5405,16189323,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34593,0,"",python,selection_command +5406,16190424,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34584,0,"",python,selection_command +5407,16191285,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34583,0,"",python,selection_command +5408,16191374,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34573,0,"",python,selection_command +5409,16191610,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34303,0,"",python,selection_command +5410,16200501,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34324,0,"",python,selection_command +5411,16200747,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34347,0,"",python,selection_command +5412,16200821,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34376,0,"",python,selection_command +5413,16200821,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34400,0,"",python,selection_command +5414,16200829,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34464,0,"",python,selection_command +5415,16200862,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34471,0,"",python,selection_command +5416,16200896,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34478,0,"",python,selection_command +5417,16200929,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34510,0,"",python,selection_command +5418,16200963,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34538,0,"",python,selection_command +5419,16200998,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34570,0,"",python,selection_command +5420,16202019,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34566,0,"",python,selection_command +5421,16202961,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34584,0,"",python,selection_command +5422,16203481,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34593,0,"",python,selection_command +5423,16203939,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35801,0,"",python,selection_command +5424,16208780,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35800,0,"",python,selection_command +5425,16208905,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35778,0,"",python,selection_command +5426,16209325,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34981,0,"",python,selection_command +5427,16212704,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35778,0,"",python,selection_command +5428,16213043,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35800,0,"",python,selection_command +5429,16213177,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35801,0,"",python,selection_command +5430,16213784,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",35824,0,"",python,selection_command +5431,16214546,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36422,0,"",python,selection_command +5432,16222302,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36635,0,"",python,selection_command +5433,16224573,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36661,0,"",python,selection_command +5434,16224776,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36691,0,"",python,selection_command +5435,16225069,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36726,0,"",python,selection_command +5436,16225351,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36745,0,"",python,selection_command +5437,16225680,".venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37919,0,"",python,selection_command +5438,16234190,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +5439,16234191,"/fast/home/franz.srambical/jafar/utils/nn.py",4090,0,"",python,selection_command +5440,16234304,"/fast/home/franz.srambical/jafar/utils/nn.py",10277,48," mask_4d = mask_4d.astype(jnp.bool)\n",python,content +5441,16235563,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Attention core modules for Flax.""""""\n\nfrom __future__ import annotations\n\nimport functools\nfrom typing import Any\nfrom collections.abc import Callable\nimport math\n\nimport jax\nimport jax.numpy as jnp\nfrom jax import lax, random\n\nfrom flax import nnx\nfrom flax.nnx import rnglib\nfrom flax.nnx.module import Module, first_from\nfrom flax.nnx.nn import initializers\nfrom flax.nnx.nn import dtypes\nfrom flax.nnx.nn.linear import (\n LinearGeneral,\n default_kernel_init,\n)\nfrom flax.nnx.nn.normalization import LayerNorm\nfrom flax.typing import (\n Dtype,\n PromoteDtypeFn,\n Shape,\n Initializer,\n PrecisionLike,\n DotGeneralT,\n)\n\nArray = jax.Array\n\n\ndef dot_product_attention_weights(\n query: Array,\n key: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention weights given query and key.\n\n Used by :func:`dot_product_attention`, which is what you'll most likely use.\n But if you want access to the attention weights for introspection, then\n you can directly call this function and call einsum yourself.\n\n Args:\n query: queries for calculating attention with shape of `[batch..., q_length,\n num_heads, qk_depth_per_head]`.\n key: keys for calculating attention with shape of `[batch..., kv_length,\n num_heads, qk_depth_per_head]`.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs and params)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key)`` and a ``dtype``\n keyword argument, and return a tuple of arrays with the promoted dtype.\n\n Returns:\n Output of shape `[batch..., num_heads, q_length, kv_length]`.\n """"""\n query, key = promote_dtype((query, key), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n\n assert query.ndim == key.ndim, 'q, k must have same rank.'\n assert query.shape[:-3] == key.shape[:-3], 'q, k batch dims must match.'\n assert query.shape[-2] == key.shape[-2], 'q, k num_heads must match.'\n assert query.shape[-1] == key.shape[-1], 'q, k depths must match.'\n\n # calculate attention matrix\n depth = query.shape[-1]\n query = query / jnp.sqrt(depth).astype(dtype)\n # attn weight shape is (batch..., num_heads, q_length, kv_length)\n attn_weights = jnp.einsum(\n '...qhd,...khd->...hqk', query, key, precision=precision\n )\n\n # apply attention bias: masking, dropout, proximity bias, etc.\n if bias is not None:\n attn_weights = attn_weights + bias\n # apply attention mask\n if mask is not None:\n big_neg = jnp.finfo(dtype).min\n attn_weights = jnp.where(mask, attn_weights, big_neg)\n\n # normalize the attention weights\n attn_weights = jax.nn.softmax(attn_weights).astype(dtype)\n\n if module:\n module.sow(nnx.Intermediate, 'attention_weights', attn_weights)\n\n # apply attention dropout\n if not deterministic and dropout_rate > 0.0:\n keep_prob = 1.0 - dropout_rate\n if broadcast_dropout:\n # dropout is broadcast across the batch + head dimensions\n dropout_shape = tuple([1] * (key.ndim - 2)) + attn_weights.shape[-2:]\n keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape) # type: ignore\n else:\n keep = random.bernoulli(dropout_rng, keep_prob, attn_weights.shape) # type: ignore\n multiplier = keep.astype(dtype) / jnp.asarray(keep_prob, dtype=dtype)\n attn_weights = attn_weights * multiplier\n\n return attn_weights\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n )\n\n # return weighted sum over values for each query position\n return jnp.einsum(\n '...hqk,...khd->...qhd', attn_weights, value, precision=precision\n )\n\n\nclass MultiHeadAttention(Module):\n """"""Multi-head attention.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax\n\n >>> layer = nnx.MultiHeadAttention(num_heads=8, in_features=5, qkv_features=16,\n ... decode=False, rngs=nnx.Rngs(0))\n >>> key1, key2, key3 = jax.random.split(jax.random.key(0), 3)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = (\n ... jax.random.uniform(key1, shape),\n ... jax.random.uniform(key2, shape),\n ... jax.random.uniform(key3, shape),\n ... )\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer(q, k, v)\n >>> # equivalent output when inferring v\n >>> assert (layer(q, k) == layer(q, k, k)).all()\n >>> # equivalent output when inferring k and v\n >>> assert (layer(q) == layer(q, q)).all()\n >>> assert (layer(q) == layer(q, q, q)).all()\n\n Args:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n in_features: int or tuple with number of input features.\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection.\n in_kv_features: number of input features for computing key and value.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n out_kernel_init: optional initializer for the kernel of the output Dense layer,\n if None, the kernel_init is used.\n bias_init: initializer for the bias of the Dense layers.\n out_bias_init: optional initializer for the bias of the output Dense layer,\n if None, the bias_init is used.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape `[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n rngs: rng key.\n keep_rngs: whether to store the input rngs as attribute (i.e. `self.rngs = rngs`)\n (default: True). If rngs is stored, we should split the module as\n `graphdef, params, nondiff = nnx.split(module, nnx.Param, ...)` where `nondiff`\n contains RNG object associated with stored `self.rngs`.\n """"""\n\n __data__ = (\n 'query',\n 'key',\n 'value',\n 'out',\n 'query_ln',\n 'key_ln',\n 'cached_key',\n 'cached_value',\n 'cache_index',\n 'rngs',\n )\n\n def __init__(\n self,\n num_heads: int,\n in_features: int,\n qkv_features: int | None = None,\n out_features: int | None = None,\n in_kv_features: int | None = None,\n *,\n dtype: Dtype | None = None,\n param_dtype: Dtype = jnp.float32,\n broadcast_dropout: bool = True,\n dropout_rate: float = 0.0,\n deterministic: bool | None = None,\n precision: PrecisionLike = None,\n kernel_init: Initializer = default_kernel_init,\n out_kernel_init: Initializer | None = None,\n bias_init: Initializer = initializers.zeros_init(),\n out_bias_init: Initializer | None = None,\n use_bias: bool = True,\n attention_fn: Callable[..., Array] = dot_product_attention,\n decode: bool | None = None,\n normalize_qk: bool = False,\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None,\n out_dot_general: DotGeneralT | None = None,\n qkv_dot_general_cls: Any = None,\n out_dot_general_cls: Any = None,\n rngs: rnglib.Rngs,\n keep_rngs: bool = True,\n ):\n self.num_heads = num_heads\n self.in_features = in_features\n self.qkv_features = (\n qkv_features if qkv_features is not None else in_features\n )\n self.out_features = (\n out_features if out_features is not None else in_features\n )\n self.in_kv_features = (\n in_kv_features if in_kv_features is not None else in_features\n )\n self.dtype = dtype\n self.param_dtype = param_dtype\n self.broadcast_dropout = broadcast_dropout\n self.dropout_rate = dropout_rate\n self.deterministic = deterministic\n self.precision = precision\n self.kernel_init = kernel_init\n self.out_kernel_init = out_kernel_init\n self.bias_init = bias_init\n self.out_bias_init = out_bias_init\n self.use_bias = use_bias\n self.attention_fn = attention_fn\n self.decode = decode\n self.normalize_qk = normalize_qk\n self.qkv_dot_general = qkv_dot_general\n self.out_dot_general = out_dot_general\n self.qkv_dot_general_cls = qkv_dot_general_cls\n self.out_dot_general_cls = out_dot_general_cls\n\n if self.qkv_features % self.num_heads != 0:\n raise ValueError(\n f'Memory dimension ({self.qkv_features}) must be divisible by '\n f""'num_heads' heads ({self.num_heads}).""\n )\n\n self.head_dim = self.qkv_features // self.num_heads\n\n linear_general = functools.partial(\n LinearGeneral,\n out_features=(self.num_heads, self.head_dim),\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n self.query = linear_general(self.in_features, rngs=rngs)\n self.key = linear_general(self.in_kv_features, rngs=rngs)\n self.value = linear_general(self.in_kv_features, rngs=rngs)\n\n self.query_ln: LayerNorm | None\n self.key_ln: LayerNorm | None\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n self.query_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n self.key_ln = LayerNorm(\n self.head_dim,\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n rngs=rngs,\n )\n else:\n self.query_ln = None\n self.key_ln = None\n\n self.out = LinearGeneral(\n in_features=(self.num_heads, self.head_dim),\n out_features=self.out_features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n rngs=rngs,\n )\n self.rngs = rngs if keep_rngs and dropout_rate > 0 else None\n\n self.cached_key: nnx.Cache[Array] | None = None\n self.cached_value: nnx.Cache[Array] | None = None\n self.cache_index: nnx.Cache[Array] | None = None\n\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n rngs: rnglib.Rngs | None = None,\n sow_weights: bool = False,\n decode: bool | None = None,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape `[batch_sizes..., length, features]`.\n inputs_k: key of shape `[batch_sizes..., length, features]`. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape `[batch_sizes..., length, features]`. If None,\n inputs_v will copy the value of inputs_k.\n mask: attention mask of shape `[batch_sizes..., num_heads, query_length,\n key/value_length]`. Attention weights are masked out if their\n corresponding mask value is `False`.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic. The\n ``deterministic`` flag passed into the call method will take precedence\n over the ``deterministic`` flag passed into the constructor.\n rngs: rng key. The rng key passed into the call method will take\n precedence over the rng key passed into the constructor.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection.\n decode: whether to prepare and use an autoregressive cache. The ``decode``\n flag passed into the call method will take precedence over the ``decode``\n flag passed into the constructor.\n\n Returns:\n output of shape `[batch_sizes..., length, features]`.\n """"""\n if rngs is None:\n rngs = self.rngs\n\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n\n if inputs_q.shape[-1] != self.in_features:\n raise ValueError(\n f'Incompatible input dimension, got {inputs_q.shape[-1]} '\n f'but module expects {self.in_features}.'\n )\n\n query = self.query(inputs_q)\n key = self.key(inputs_k)\n value = self.value(inputs_v)\n\n if self.normalize_qk:\n assert self.query_ln is not None and self.key_ln is not None\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = self.query_ln(query)\n key = self.key_ln(key)\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n decode = first_from(\n decode,\n self.decode,\n error_msg=""""""No `decode` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n\n if decode:\n if (\n self.cached_key is None\n or self.cached_value is None\n or self.cache_index is None\n ):\n raise ValueError(\n 'Autoregressive cache not initialized, call ``init_cache`` first.'\n )\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = self.cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = self.cache_index[...]\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices = (zero,) * len(batch_dims) + (cur_index, zero, zero)\n key = lax.dynamic_update_slice(self.cached_key[...], key, indices)\n value = lax.dynamic_update_slice(self.cached_value[...], value, indices)\n self.cached_key[...] = key\n self.cached_value[...] = value\n self.cache_index[...] += 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n deterministic = first_from(\n deterministic,\n self.deterministic,\n error_msg=""""""No `deterministic` argument was provided to MultiHeadAttention\n as either a __call__ argument, class attribute, or nnx.flag."""""",\n )\n if not deterministic:\n if rngs is None:\n raise ValueError(\n ""'rngs' must be provided to __call__ method if ""\n ""MultiHeadAttention instance is defined with keep_rngs=False.""\n )\n dropout_rng = rngs.dropout()\n else:\n dropout_rng = None\n else:\n deterministic = True\n dropout_rng = None\n\n # apply attention\n x = self.attention_fn(\n query,\n key,\n value,\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=deterministic,\n dtype=self.dtype,\n precision=self.precision,\n module=self if sow_weights else None,\n )\n # back to the original inputs dimensions\n out = self.out(x)\n return out\n\n def init_cache(self, input_shape: Shape, dtype: Dtype = jnp.float32):\n """"""Initializes cache for fast autoregressive decoding. When\n ``decode=True``, this method must be called first before performing\n forward inference. When in decode mode, only one token must be passed\n at a time.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> batch_size = 5\n >>> embed_dim = 3\n >>> x = jnp.ones((batch_size, 1, embed_dim)) # single token\n ...\n >>> model_nnx = nnx.MultiHeadAttention(\n ... num_heads=2,\n ... in_features=3,\n ... qkv_features=6,\n ... out_features=6,\n ... decode=True,\n ... rngs=nnx.Rngs(42),\n ... )\n ...\n >>> # out_nnx = model_nnx(x) <-- throws an error because cache isn't initialized\n ...\n >>> model_nnx.init_cache(x.shape)\n >>> out_nnx = model_nnx(x)\n """"""\n cache_shape = (*input_shape[:-1], self.num_heads, self.head_dim)\n self.cached_key = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cached_value = nnx.Cache(jnp.zeros(cache_shape, dtype))\n self.cache_index = nnx.Cache(jnp.array(0, dtype=jnp.int32))\n\n\n# mask-making utility functions\n\n\ndef make_attention_mask(\n query_input: Array,\n key_input: Array,\n pairwise_fn: Callable[..., Any] = jnp.multiply,\n extra_batch_dims: int = 0,\n dtype: Dtype = jnp.float32,\n):\n """"""Mask-making helper for attention weights.\n\n In case of 1d inputs (i.e., `[batch..., len_q]`, `[batch..., len_kv]`, the\n attention weights will be `[batch..., heads, len_q, len_kv]` and this\n function will produce `[batch..., 1, len_q, len_kv]`.\n\n Args:\n query_input: a batched, flat input of query_length size\n key_input: a batched, flat input of key_length size\n pairwise_fn: broadcasting elementwise comparison function\n extra_batch_dims: number of extra batch dims to add singleton axes for, none\n by default\n dtype: mask return dtype\n\n Returns:\n A `[batch..., 1, len_q, len_kv]` shaped mask for 1d attention.\n """"""\n mask = pairwise_fn(\n jnp.expand_dims(query_input, axis=-1), jnp.expand_dims(key_input, axis=-2)\n )\n mask = jnp.expand_dims(mask, axis=-3)\n mask = jnp.expand_dims(mask, axis=tuple(range(extra_batch_dims)))\n return mask.astype(dtype)\n\n\ndef make_causal_mask(\n x: Array, extra_batch_dims: int = 0, dtype: Dtype = jnp.float32\n) -> Array:\n """"""Make a causal mask for self-attention.\n\n In case of 1d inputs (i.e., `[batch..., len]`, the self-attention weights\n will be `[batch..., heads, len, len]` and this function will produce a\n causal mask of shape `[batch..., 1, len, len]`.\n\n Args:\n x: input array of shape `[batch..., len]`\n extra_batch_dims: number of batch dims to add singleton axes for, none by\n default\n dtype: mask return dtype\n\n Returns:\n A `[batch..., 1, len, len]` shaped causal mask for 1d attention.\n """"""\n idxs = jnp.broadcast_to(jnp.arange(x.shape[-1], dtype=jnp.int32), x.shape)\n return make_attention_mask(\n idxs,\n idxs,\n jnp.greater_equal,\n extra_batch_dims=extra_batch_dims,\n dtype=dtype,\n )\n\n\ndef combine_masks(\n *masks: Array | None, dtype: Dtype = jnp.float32\n) -> Array | None:\n """"""Combine attention masks.\n\n Args:\n *masks: set of attention mask arguments to combine, some can be None.\n dtype: dtype for the returned mask.\n\n Returns:\n Combined mask, reduced by logical and, returns None if no masks given.\n """"""\n masks_list = [m for m in masks if m is not None]\n if not masks_list:\n return None\n assert all(\n map(lambda x: x.ndim == masks_list[0].ndim, masks_list)\n ), f'masks must have same rank: {tuple(map(lambda x: x.ndim, masks_list))}'\n mask, *other_masks = masks_list\n for other_mask in other_masks:\n mask = jnp.logical_and(mask, other_mask)\n return mask.astype(dtype)\n",python,tab +5442,16235564,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22684,0,"",python,selection_command +5443,16237867,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +5444,16237867,"/fast/home/franz.srambical/jafar/utils/nn.py",10652,0,"",python,selection_command +5445,16242372,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +5446,16242372,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37837,0,"",python,selection_command +5447,16250351,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37836,0,"",python,selection_command +5448,16250608,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37776,0,"",python,selection_command +5449,16250636,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37698,0,"",python,selection_command +5450,16250661,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37684,0,"",python,selection_command +5451,16250695,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37644,0,"",python,selection_command +5452,16250728,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37586,0,"",python,selection_command +5453,16250761,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37539,0,"",python,selection_command +5454,16250793,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37499,0,"",python,selection_command +5455,16250830,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37471,0,"",python,selection_command +5456,16250864,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37439,0,"",python,selection_command +5457,16250895,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37363,0,"",python,selection_command +5458,16250928,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37326,0,"",python,selection_command +5459,16250963,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37302,0,"",python,selection_command +5460,16250999,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37266,0,"",python,selection_command +5461,16251030,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37244,0,"",python,selection_command +5462,16251064,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37200,0,"",python,selection_command +5463,16251096,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37164,0,"",python,selection_command +5464,16251128,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37129,0,"",python,selection_command +5465,16251164,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37128,0,"",python,selection_command +5466,16251198,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37059,0,"",python,selection_command +5467,16251230,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37017,0,"",python,selection_command +5468,16251263,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36952,0,"",python,selection_command +5469,16251297,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36919,0,"",python,selection_command +5470,16251331,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36881,0,"",python,selection_command +5471,16251363,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36859,0,"",python,selection_command +5472,16251397,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36851,0,"",python,selection_command +5473,16251431,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36773,0,"",python,selection_command +5474,16251464,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36751,0,"",python,selection_command +5475,16251497,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36718,0,"",python,selection_command +5476,16251658,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36653,0,"",python,selection_command +5477,16251796,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36661,0,"",python,selection_command +5478,16252126,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36726,0,"",python,selection_command +5479,16252688,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36745,0,"",python,selection_command +5480,16252911,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37919,0,"",python,selection_command +5481,16255476,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +5482,16255477,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77888,0,"",python,selection_command +5483,16262043,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +5484,16262043,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37919,0,"",python,selection_command +5485,16262919,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37876,0,"",python,selection_command +5486,16263116,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37849,0,"",python,selection_command +5487,16263627,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +5488,16263628,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72096,0,"",python,selection_command +5489,16264166,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72123,0,"",python,selection_command +5490,16264322,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72141,0,"",python,selection_command +5491,16264464,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72157,0,"",python,selection_command +5492,16264602,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72175,0,"",python,selection_command +5493,16264770,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72206,0,"",python,selection_command +5494,16264936,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72175,0,"",python,selection_command +5495,16265236,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",73725,0,"",python,selection_command +5496,16267006,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",73731,0,"",python,selection_command +5497,16267457,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76699,0,"",python,selection_command +5498,16283774,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76790,0,"",python,selection_command +5499,16284355,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76850,0,"",python,selection_command +5500,16284724,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76856,0,"",python,selection_command +5501,16285053,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76997,0,"",python,selection_command +5502,16285370,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77778,0,"",python,selection_command +5503,16288840,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77807,0,"",python,selection_command +5504,16289003,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77924,0,"",python,selection_command +5505,16289135,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78015,0,"",python,selection_command +5506,16289289,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78089,0,"",python,selection_command +5507,16289764,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78248,0,"",python,selection_command +5508,16293275,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78089,0,"",python,selection_command +5509,16293414,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78015,0,"",python,selection_command +5510,16293562,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77924,0,"",python,selection_command +5511,16293981,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77807,0,"",python,selection_command +5512,16294501,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77778,0,"",python,selection_command +5513,16304401,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77783,0,"",python,selection_command +5514,16304537,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77785,0,"",python,selection_command +5515,16306332,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67527,0,"",python,selection_command +5516,16328149,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67573,0,"",python,selection_command +5517,16328547,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67574,0,"",python,selection_command +5518,16340718,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67606,0,"",python,selection_command +5519,16341024,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67632,0,"",python,selection_command +5520,16341303,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67639,0,"",python,selection_command +5521,16341531,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67668,0,"",python,selection_command +5522,16341735,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67683,0,"",python,selection_command +5523,16342010,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68032,0,"",python,selection_command +5524,16342371,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68051,0,"",python,selection_command +5525,16567253,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68070,0,"",python,selection_command +5526,16568202,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68065,15," bias = mask",python,selection_command +5527,16592870,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68070,0,"",python,selection_command +5528,16719099,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68051,0,"",python,selection_command +5529,16719531,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,18," if bias is None:",python,selection_command +5530,16719700,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,34," if bias is None:\n bias = mask",python,selection_command +5531,16719845,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,42," if bias is None:\n bias = mask\n else:",python,selection_command +5532,16720490,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68086,0,"",python,selection_command +5533,16748976,"utils/nn.py",0,0,"",python,tab +5534,16748976,"utils/nn.py",10338,0,"",python,selection_command +5535,16750057,"utils/nn.py",10338,105," # FIXME (f.srambical): Investigate whether/why this is needed\n mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +5536,16750589,"utils/nn.py",10507,0,"",python,selection_command +5537,16751208,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +5538,16762548,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68070,0,"",python,selection_command +5539,16762693,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68051,0,"",python,selection_command +5540,16762847,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68025,0,"",python,selection_command +5541,16763026,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68051,0,"",python,selection_command +5542,16763457,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,18," if bias is None:",python,selection_command +5543,16763566,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,34," if bias is None:\n bias = mask",python,selection_command +5544,16763735,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,42," if bias is None:\n bias = mask\n else:",python,selection_command +5545,16763866,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,67," if bias is None:\n bias = mask\n else:\n if mask is not None:",python,selection_command +5546,16764016,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,109," if bias is None:\n bias = mask\n else:\n if mask is not None:\n # should be broadcast to same shape",python,selection_command +5547,16764150,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,134," if bias is None:\n bias = mask\n else:\n if mask is not None:\n # should be broadcast to same shape\n bias = bias + mask",python,selection_command +5548,16817961,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68161,0,"",python,selection_command +5549,16819715,"utils/nn.py",0,0,"",python,tab +5550,16820496,"utils/nn.py",10511,0,"",python,selection_command +5551,16844033,"utils/nn.py",10338,605," # FIXME (f.srambical): Investigate whether/why this is needed\n mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +5552,16844792,"utils/nn.py",10507,0,"",python,selection_command +5553,16845053,"utils/nn.py",10410,0,"",python,selection_command +5554,16845471,"utils/nn.py",9410,0,"",python,selection_command +5555,16846299,"utils/nn.py",9389,0,"",python,selection_command +5556,16846545,"utils/nn.py",9376,0,"",python,selection_command +5557,16846582,"utils/nn.py",9321,0,"",python,selection_command +5558,16846607,"utils/nn.py",9294,0,"",python,selection_command +5559,16846709,"utils/nn.py",9281,0,"",python,selection_command +5560,16846897,"utils/nn.py",9227,0,"",python,selection_command +5561,16848694,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +5562,16848961,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5563,16850808,"utils/nn.py",9657,0,"",python,selection_keyboard +5564,16851104,"utils/nn.py",9702,0,"",python,selection_command +5565,16851340,"utils/nn.py",9711,0,"",python,selection_command +5566,16851367,"utils/nn.py",9754,0,"",python,selection_command +5567,16851402,"utils/nn.py",9793,0,"",python,selection_command +5568,16851426,"utils/nn.py",9828,0,"",python,selection_command +5569,16851466,"utils/nn.py",9837,0,"",python,selection_command +5570,16851492,"utils/nn.py",9922,0,"",python,selection_command +5571,16851630,"utils/nn.py",9998,0,"",python,selection_command +5572,16851803,"utils/nn.py",10066,0,"",python,selection_command +5573,16860306,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +5574,16868284,"TERMINAL",0,0,"2025-07-27 13:59:48.083577: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5575,16869748,"TERMINAL",0,0,"2025-07-27 13:59:49.549339: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5576,16873220,"TERMINAL",0,0,"2025-07-27 13:59:52.999207: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5577,16873939,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(298)attention_fn()\r\n-> implementation = ""cudnn"" if use_flash_attention else None\r\n",,terminal_output +5578,16878858,"TERMINAL",0,0,"c",,terminal_output +5579,16879320,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 333, in attention_fn\r\n output_4d = jax.nn.dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 1204, in dot_product_attention\r\n out = cudnn_dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py"", line 1981, in dot_product_attention\r\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py"", line 332, in check_layout\r\n raise ValueError(\r\nValueError: Bias must have same seq length as QKV, got 1 and 1\r\n",,terminal_output +5580,16880126,"TERMINAL",0,0,"(Pdb) ",,terminal_output +5581,16880341,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +5582,16883334,"utils/nn.py",10075,0,"",python,selection_command +5583,16883532,"utils/nn.py",10168,0,"",python,selection_command +5584,16883729,"utils/nn.py",10075,0,"",python,selection_command +5585,16883918,"utils/nn.py",10168,0,"",python,selection_command +5586,16884233,"utils/nn.py",10160,28," if mask is not None:",python,selection_command +5587,16884345,"utils/nn.py",10160,71," if mask is not None:\n # mask.shape (1, 921, 1, 1, 1)",python,selection_command +5588,16884512,"utils/nn.py",10160,116," if mask is not None:\n # mask.shape (1, 921, 1, 1, 1)\n mask_4d = _pad(_rearrange(mask))",python,selection_command +5589,16884644,"utils/nn.py",10160,163," if mask is not None:\n # mask.shape (1, 921, 1, 1, 1)\n mask_4d = _pad(_rearrange(mask))\n mask_4d = mask_4d.astype(jnp.bool)",python,selection_command +5590,16884948,"utils/nn.py",10285,0,"",python,selection_command +5591,16887626,"utils/nn.py",10240,0,"",python,selection_command +5592,16887879,"utils/nn.py",10197,0,"",python,selection_command +5593,16887899,"utils/nn.py",10168,0,"",python,selection_command +5594,16887929,"utils/nn.py",10075,0,"",python,selection_command +5595,16887963,"utils/nn.py",10066,0,"",python,selection_command +5596,16887997,"utils/nn.py",9998,0,"",python,selection_command +5597,16888031,"utils/nn.py",9922,0,"",python,selection_command +5598,16888064,"utils/nn.py",9837,0,"",python,selection_command +5599,16888099,"utils/nn.py",9828,0,"",python,selection_command +5600,16888135,"utils/nn.py",9793,0,"",python,selection_command +5601,16888171,"utils/nn.py",9754,0,"",python,selection_command +5602,16888203,"utils/nn.py",9711,0,"",python,selection_command +5603,16888236,"utils/nn.py",9702,0,"",python,selection_command +5604,16888271,"utils/nn.py",9657,0,"",python,selection_command +5605,16888303,"utils/nn.py",9598,0,"",python,selection_command +5606,16888348,"utils/nn.py",9559,0,"",python,selection_command +5607,16888373,"utils/nn.py",9550,0,"",python,selection_command +5608,16888405,"utils/nn.py",9515,0,"",python,selection_command +5609,16888437,"utils/nn.py",9478,0,"",python,selection_command +5610,16888472,"utils/nn.py",9469,0,"",python,selection_command +5611,16888505,"utils/nn.py",9406,0,"",python,selection_command +5612,16888539,"utils/nn.py",9385,0,"",python,selection_command +5613,16888574,"utils/nn.py",9376,0,"",python,selection_command +5614,16888606,"utils/nn.py",9317,0,"",python,selection_command +5615,16888640,"utils/nn.py",9290,0,"",python,selection_command +5616,16888682,"utils/nn.py",9281,0,"",python,selection_command +5617,16888703,"utils/nn.py",9223,0,"",python,selection_command +5618,16888831,"utils/nn.py",9198,0,"",python,selection_command +5619,16889024,"utils/nn.py",9152,0,"",python,selection_command +5620,16889273,"utils/nn.py",9144,45," if query.shape == (1, 921, 1, 8, 64):",python,selection_command +5621,16889417,"utils/nn.py",9144,70," if query.shape == (1, 921, 1, 8, 64):\n breakpoint()",python,selection_command +5622,16889493,"utils/nn.py",9144,71,"",python,content +5623,16889505,"utils/nn.py",9152,0,"",python,selection_command +5624,16889647,"utils/nn.py",9210,0,"",python,selection_command +5625,16889901,"utils/nn.py",9219,0,"",python,selection_command +5626,16889931,"utils/nn.py",9246,0,"",python,selection_command +5627,16889963,"utils/nn.py",9305,0,"",python,selection_command +5628,16889997,"utils/nn.py",9314,0,"",python,selection_command +5629,16890030,"utils/nn.py",9335,0,"",python,selection_command +5630,16890065,"utils/nn.py",9398,0,"",python,selection_command +5631,16890096,"utils/nn.py",9407,0,"",python,selection_command +5632,16890129,"utils/nn.py",9444,0,"",python,selection_command +5633,16890188,"utils/nn.py",9479,0,"",python,selection_command +5634,16890227,"utils/nn.py",9488,0,"",python,selection_command +5635,16890236,"utils/nn.py",9527,0,"",python,selection_command +5636,16890268,"utils/nn.py",9586,0,"",python,selection_command +5637,16890297,"utils/nn.py",9631,0,"",python,selection_command +5638,16890330,"utils/nn.py",9640,0,"",python,selection_command +5639,16890365,"utils/nn.py",9683,0,"",python,selection_command +5640,16890442,"utils/nn.py",9722,0,"",python,selection_command +5641,16890443,"utils/nn.py",9757,0,"",python,selection_command +5642,16890462,"utils/nn.py",9766,0,"",python,selection_command +5643,16890496,"utils/nn.py",9851,0,"",python,selection_command +5644,16890531,"utils/nn.py",9927,0,"",python,selection_command +5645,16890563,"utils/nn.py",9995,0,"",python,selection_command +5646,16890594,"utils/nn.py",10004,0,"",python,selection_command +5647,16890630,"utils/nn.py",10097,0,"",python,selection_command +5648,16890664,"utils/nn.py",10126,0,"",python,selection_command +5649,16890694,"utils/nn.py",10169,0,"",python,selection_command +5650,16890729,"utils/nn.py",10214,0,"",python,selection_command +5651,16890762,"utils/nn.py",10261,0,"",python,selection_command +5652,16890796,"utils/nn.py",10275,0,"",python,selection_command +5653,16890828,"utils/nn.py",10349,0,"",python,selection_command +5654,16890862,"utils/nn.py",10438,0,"",python,selection_command +5655,16890896,"utils/nn.py",10447,0,"",python,selection_command +5656,16891121,"utils/nn.py",10438,0,"",python,selection_command +5657,16891288,"utils/nn.py",10349,0,"",python,selection_command +5658,16891540,"utils/nn.py",10437,0,"\n if query.shape == (1, 921, 1, 8, 64):\n breakpoint()",python,content +5659,16891555,"utils/nn.py",10446,0,"",python,selection_command +5660,16893212,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +5661,16893432,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5662,16904341,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +5663,16912368,"TERMINAL",0,0,"2025-07-27 14:00:32.171060: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5664,16913998,"TERMINAL",0,0,"2025-07-27 14:00:33.670173: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5665,16916704,".venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +5666,16917385,"TERMINAL",0,0,"2025-07-27 14:00:37.188053: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5667,16918148,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(330)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +5668,16918940,"utils/nn.py",0,0,"",python,tab +5669,16920153,"TERMINAL",0,0,"m",,terminal_output +5670,16920635,"TERMINAL",0,0,"a",,terminal_output +5671,16920714,"TERMINAL",0,0,"s",,terminal_output +5672,16920798,"TERMINAL",0,0,"k",,terminal_output +5673,16920997,"TERMINAL",0,0,".",,terminal_output +5674,16921441,"TERMINAL",0,0,"s",,terminal_output +5675,16921505,"TERMINAL",0,0,"h",,terminal_output +5676,16921561,"TERMINAL",0,0,"a",,terminal_output +5677,16921669,"TERMINAL",0,0,"p",,terminal_output +5678,16921762,"TERMINAL",0,0,"e",,terminal_output +5679,16922041,"TERMINAL",0,0,"\r\n(Pdb) (1, 921, 1, 1, 1)\r\n",,terminal_output +5680,16929149,"TERMINAL",0,0,"m",,terminal_output +5681,16929240,"TERMINAL",0,0,"as",,terminal_output +5682,16929313,"TERMINAL",0,0,"k",,terminal_output +5683,16929996,"TERMINAL",0,0,"_",,terminal_output +5684,16930383,"TERMINAL",0,0,"4",,terminal_output +5685,16930564,"TERMINAL",0,0,"d",,terminal_output +5686,16930703,"TERMINAL",0,0,".",,terminal_output +5687,16930777,"TERMINAL",0,0,"s",,terminal_output +5688,16930963,"TERMINAL",0,0,"ha",,terminal_output +5689,16931061,"TERMINAL",0,0,"p",,terminal_output +5690,16931146,"TERMINAL",0,0,"e",,terminal_output +5691,16931263,"TERMINAL",0,0,"\r\n(Pdb) (921, 4, 1, 1)\r\n",,terminal_output +5692,17104436,"utils/nn.py",9398,0,"",python,selection_command +5693,17105918,"utils/nn.py",9327,0,"",python,selection_command +5694,17106145,"utils/nn.py",9306,0,"",python,selection_command +5695,17106178,"utils/nn.py",9305,0,"",python,selection_command +5696,17106212,"utils/nn.py",9238,0,"",python,selection_command +5697,17106243,"utils/nn.py",9211,0,"",python,selection_command +5698,17106276,"utils/nn.py",9210,0,"",python,selection_command +5699,17106432,"utils/nn.py",9144,0,"",python,selection_command +5700,17106962,"utils/nn.py",9152,0,"",python,selection_command +5701,17107620,"utils/nn.py",9152,0,"#",python,content +5702,17107621,"utils/nn.py",9153,0,"",python,selection_keyboard +5703,17107694,"utils/nn.py",9153,0," ",python,content +5704,17107695,"utils/nn.py",9154,0,"",python,selection_keyboard +5705,17107953,"utils/nn.py",9153,0,"",python,selection_command +5706,17108282,"utils/nn.py",9211,0,"\n # implementation = ""cudnn"" if use_flash_attention else None",python,content +5707,17108282,"utils/nn.py",9220,0,"",python,selection_command +5708,17108629,"utils/nn.py",9220,1,"",python,content +5709,17108799,"utils/nn.py",9220,1,"",python,content +5710,17109029,"utils/nn.py",9235,0,"",python,selection_command +5711,17109302,"utils/nn.py",9237,0,"",python,selection_command +5712,17109335,"utils/nn.py",9238,0,"",python,selection_command +5713,17109370,"utils/nn.py",9243,0,"",python,selection_command +5714,17109686,"utils/nn.py",9238,0,"",python,selection_command +5715,17109838,"utils/nn.py",9237,0,"",python,selection_command +5716,17110198,"utils/nn.py",9237,40,"",python,content +5717,17110422,"utils/nn.py",9237,0,"N",python,content +5718,17110422,"utils/nn.py",9238,0,"",python,selection_keyboard +5719,17110619,"utils/nn.py",9238,0,"o",python,content +5720,17110620,"utils/nn.py",9239,0,"",python,selection_keyboard +5721,17110679,"utils/nn.py",9239,0,"n",python,content +5722,17110679,"utils/nn.py",9240,0,"",python,selection_keyboard +5723,17110711,"utils/nn.py",9240,0,"e",python,content +5724,17110712,"utils/nn.py",9241,0,"",python,selection_keyboard +5725,17110920,"utils/nn.py",9240,0,"",python,selection_command +5726,17111387,"utils/nn.py",9212,0,"",python,selection_command +5727,17112050,"utils/nn.py",9220,0,"",python,selection_command +5728,17119900,"utils/nn.py",9242,0,"",python,selection_command +5729,17120128,"utils/nn.py",9251,0,"",python,selection_command +5730,17120155,"utils/nn.py",9278,0,"",python,selection_command +5731,17120187,"utils/nn.py",9337,0,"",python,selection_command +5732,17120219,"utils/nn.py",9346,0,"",python,selection_command +5733,17120251,"utils/nn.py",9367,0,"",python,selection_command +5734,17120285,"utils/nn.py",9430,0,"",python,selection_command +5735,17120327,"utils/nn.py",9439,0,"",python,selection_command +5736,17121751,"utils/nn.py",9430,0,"",python,selection_command +5737,17121898,"utils/nn.py",9367,0,"",python,selection_command +5738,17122039,"utils/nn.py",9346,0,"",python,selection_command +5739,17122171,"utils/nn.py",9337,0,"",python,selection_command +5740,17122327,"utils/nn.py",9278,0,"",python,selection_command +5741,17122480,"utils/nn.py",9251,0,"",python,selection_command +5742,17124250,"utils/nn.py",9278,0,"",python,selection_command +5743,17124415,"utils/nn.py",9337,0,"",python,selection_command +5744,17124563,"utils/nn.py",9346,0,"",python,selection_command +5745,17125217,"utils/nn.py",9367,0,"",python,selection_command +5746,17125450,"utils/nn.py",9430,0,"",python,selection_command +5747,17125476,"utils/nn.py",9439,0,"",python,selection_command +5748,17125508,"utils/nn.py",9476,0,"",python,selection_command +5749,17125534,"utils/nn.py",9511,0,"",python,selection_command +5750,17125567,"utils/nn.py",9520,0,"",python,selection_command +5751,17125602,"utils/nn.py",9559,0,"",python,selection_command +5752,17125660,"utils/nn.py",9618,0,"",python,selection_command +5753,17125692,"utils/nn.py",9663,0,"",python,selection_command +5754,17125722,"utils/nn.py",9672,0,"",python,selection_command +5755,17126352,"utils/nn.py",9672,1,"q",python,selection_command +5756,17126433,"utils/nn.py",9672,1,"q",python,selection_command +5757,17126604,"utils/nn.py",9672,1,"q",python,selection_command +5758,17126971,"utils/nn.py",9672,0,"",python,selection_command +5759,17127034,"utils/nn.py",9754,0,"#",python,content +5760,17127034,"utils/nn.py",9715,0,"#",python,content +5761,17127034,"utils/nn.py",9672,0,"#",python,content +5762,17127034,"utils/nn.py",9673,0,"",python,selection_keyboard +5763,17127062,"utils/nn.py",9757,0," ",python,content +5764,17127062,"utils/nn.py",9717,0," ",python,content +5765,17127062,"utils/nn.py",9673,0," ",python,content +5766,17127062,"utils/nn.py",9674,0,"",python,selection_keyboard +5767,17127269,"utils/nn.py",9673,0,"",python,selection_command +5768,17128655,"utils/nn.py",9664,44," # query_4d = _pad(_rearrange(query))",python,selection_command +5769,17128795,"utils/nn.py",9664,85," # query_4d = _pad(_rearrange(query))\n # key_4d = _pad(_rearrange(key))",python,selection_command +5770,17128938,"utils/nn.py",9664,130," # query_4d = _pad(_rearrange(query))\n # key_4d = _pad(_rearrange(key))\n # value_4d = _pad(_rearrange(value))",python,selection_command +5771,17129135,"utils/nn.py",9664,0,"",python,selection_command +5772,17129315,"utils/nn.py",9709,0,"",python,selection_command +5773,17129467,"utils/nn.py",9750,0,"",python,selection_command +5774,17129687,"utils/nn.py",9794,0,"\n ",python,content +5775,17129929,"utils/nn.py",9795,8,"",python,content +5776,17130016,"utils/nn.py",9795,0,"\n # query_4d = _pad(_rearrange(query))\n # key_4d = _pad(_rearrange(key))\n # value_4d = _pad(_rearrange(value))",python,content +5777,17130021,"utils/nn.py",9804,0,"",python,selection_command +5778,17130268,"utils/nn.py",9795,0,"",python,selection_command +5779,17131010,"utils/nn.py",9804,0,"",python,selection_command +5780,17131290,"utils/nn.py",9795,0,"",python,selection_command +5781,17131542,"utils/nn.py",9795,1,"",python,content +5782,17131542,"utils/nn.py",9803,0,"",python,selection_command +5783,17132102,"utils/nn.py",9803,1,"#",python,selection_command +5784,17132210,"utils/nn.py",9803,2,"# ",python,selection_command +5785,17132250,"utils/nn.py",9803,2,"# ",python,selection_command +5786,17132421,"utils/nn.py",9803,2,"# ",python,selection_command +5787,17132617,"utils/nn.py",9889,2,"",python,content +5788,17132617,"utils/nn.py",9848,2,"",python,content +5789,17132617,"utils/nn.py",9803,2,"",python,content +5790,17132624,"utils/nn.py",9803,0,"",python,selection_command +5791,17133166,"utils/nn.py",9812,0,"",python,selection_command +5792,17133466,"utils/nn.py",9814,0,"",python,selection_command +5793,17133949,"utils/nn.py",9814,1,"_",python,selection_command +5794,17134004,"utils/nn.py",9814,4,"_pad",python,selection_command +5795,17134542,"utils/nn.py",9814,5,"_pad(",python,selection_command +5796,17134909,"utils/nn.py",9814,5,"",python,content +5797,17135088,"utils/nn.py",9852,0,"",python,selection_command +5798,17135388,"utils/nn.py",9853,0,"",python,selection_command +5799,17135601,"utils/nn.py",9852,0,"",python,selection_command +5800,17135770,"utils/nn.py",9850,0,"",python,selection_command +5801,17136165,"utils/nn.py",9850,1,"_",python,selection_command +5802,17136187,"utils/nn.py",9850,4,"_pad",python,selection_command +5803,17136310,"utils/nn.py",9850,5,"_pad(",python,selection_command +5804,17136619,"utils/nn.py",9850,5,"",python,content +5805,17136743,"utils/nn.py",9884,0,"",python,selection_command +5806,17137088,"utils/nn.py",9885,0,"",python,selection_command +5807,17137259,"utils/nn.py",9886,0,"",python,selection_command +5808,17137518,"utils/nn.py",9886,1,"_",python,selection_command +5809,17137572,"utils/nn.py",9886,4,"_pad",python,selection_command +5810,17137897,"utils/nn.py",9886,5,"_pad(",python,selection_command +5811,17138052,"utils/nn.py",9886,5,"",python,content +5812,17138595,"utils/nn.py",9904,0,"",python,selection_command +5813,17138707,"utils/nn.py",9903,1,"",python,content +5814,17138824,"utils/nn.py",9902,0,"",python,selection_command +5815,17138928,"utils/nn.py",9865,0,"",python,selection_command +5816,17139089,"utils/nn.py",9866,0,"",python,selection_command +5817,17139210,"utils/nn.py",9865,1,"",python,content +5818,17139309,"utils/nn.py",9864,0,"",python,selection_command +5819,17139414,"utils/nn.py",9826,0,"",python,selection_command +5820,17139557,"utils/nn.py",9832,0,"",python,selection_command +5821,17139695,"utils/nn.py",9831,1,"",python,content +5822,17139787,"utils/nn.py",9830,0,"",python,selection_command +5823,17141597,"utils/nn.py",9863,0,"",python,selection_command +5824,17141846,"utils/nn.py",9900,0,"",python,selection_command +5825,17141872,"utils/nn.py",9902,0,"",python,selection_command +5826,17141908,"utils/nn.py",9938,0,"",python,selection_command +5827,17141936,"utils/nn.py",10023,0,"",python,selection_command +5828,17141972,"utils/nn.py",10099,0,"",python,selection_command +5829,17146698,"utils/nn.py",10140,0,"",python,selection_command +5830,17146943,"utils/nn.py",10176,0,"",python,selection_command +5831,17146965,"utils/nn.py",10261,0,"",python,selection_command +5832,17146995,"utils/nn.py",10298,0,"",python,selection_command +5833,17147029,"utils/nn.py",10341,0,"",python,selection_command +5834,17147738,"utils/nn.py",10318,0,"#",python,content +5835,17147765,"utils/nn.py",10319,0," ",python,content +5836,17147966,"utils/nn.py",10319,0,"",python,selection_command +5837,17149149,"utils/nn.py",10352,0,"\n # mask_4d = _pad(_rearrange(mask))",python,content +5838,17149153,"utils/nn.py",10365,0,"",python,selection_command +5839,17149807,"utils/nn.py",10365,1,"",python,content +5840,17149946,"utils/nn.py",10365,1,"",python,content +5841,17150187,"utils/nn.py",10373,0,"",python,selection_command +5842,17150351,"utils/nn.py",10375,0,"",python,selection_command +5843,17150704,"utils/nn.py",10375,1,"_",python,selection_command +5844,17150749,"utils/nn.py",10375,4,"_pad",python,selection_command +5845,17151020,"utils/nn.py",10375,5,"_pad(",python,selection_command +5846,17151359,"utils/nn.py",10375,5,"",python,content +5847,17151603,"utils/nn.py",10392,0,"",python,selection_command +5848,17151739,"utils/nn.py",10391,1,"",python,content +5849,17151833,"utils/nn.py",10390,0,"",python,selection_command +5850,17152966,"utils/nn.py",10353,0,"",python,selection_command +5851,17153032,"utils/nn.py",10392,0,"",python,selection_command +5852,17153455,"utils/nn.py",10439,0,"",python,selection_command +5853,17153784,"utils/nn.py",10453,0,"",python,selection_command +5854,17153953,"utils/nn.py",10527,0,"",python,selection_command +5855,17161627,"utils/nn.py",10624,0,"",python,selection_command +5856,17161766,"utils/nn.py",10670,0,"",python,selection_command +5857,17161932,"utils/nn.py",10695,0,"",python,selection_command +5858,17162042,"utils/nn.py",10696,0,"",python,selection_command +5859,17162381,"utils/nn.py",10704,0,"",python,selection_command +5860,17162462,"utils/nn.py",10704,0,"#",python,content +5861,17162462,"utils/nn.py",10705,0,"",python,selection_keyboard +5862,17162462,"utils/nn.py",10705,0," ",python,content +5863,17162463,"utils/nn.py",10706,0,"",python,selection_keyboard +5864,17162634,"utils/nn.py",10705,0,"",python,selection_command +5865,17163029,"utils/nn.py",10768,0,"\n # bias_4d = _pad(_rearrange(bias)) if bias is not None else None",python,content +5866,17163029,"utils/nn.py",10777,0,"",python,selection_command +5867,17163418,"utils/nn.py",10777,1,"",python,content +5868,17163526,"utils/nn.py",10777,1,"",python,content +5869,17163777,"utils/nn.py",10785,0,"",python,selection_command +5870,17163982,"utils/nn.py",10787,0,"",python,selection_command +5871,17164371,"utils/nn.py",10787,1,"_",python,selection_command +5872,17164372,"utils/nn.py",10787,4,"_pad",python,selection_command +5873,17164564,"utils/nn.py",10787,5,"_pad(",python,selection_command +5874,17164713,"utils/nn.py",10787,6,"_pad(_",python,selection_command +5875,17165018,"utils/nn.py",10787,5,"_pad(",python,selection_command +5876,17165117,"utils/nn.py",10787,5,"",python,content +5877,17165328,"utils/nn.py",10797,0,"",python,selection_command +5878,17165494,"utils/nn.py",10798,0,"",python,selection_command +5879,17165813,"utils/nn.py",10802,0,"",python,selection_command +5880,17166119,"utils/nn.py",10803,0,"",python,selection_command +5881,17166268,"utils/nn.py",10803,1,"",python,content +5882,17169581,"TERMINAL",0,0,"^L",,terminal_output +5883,17169647,"TERMINAL",0,0,"^P",,terminal_output +5884,17170462,"TERMINAL",0,0,"q",,terminal_output +5885,17170608,"TERMINAL",0,0,"ui",,terminal_output +5886,17170973,"TERMINAL",0,0," ",,terminal_output +5887,17171092,"TERMINAL",0,0," ",,terminal_output +5888,17171358,"TERMINAL",0,0,"     ",,terminal_output +5889,17171528,"TERMINAL",0,0,"q",,terminal_output +5890,17171755,"TERMINAL",0,0,"uit",,terminal_output +5891,17171980,"TERMINAL",0,0,"()",,terminal_output +5892,17172211,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 330, in attention_fn\r\n # FIXME (f.srambical): Investigate whether/why this is needed\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 330, in attention_fn\r\n # FIXME (f.srambical): Investigate whether/why this is needed\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +5893,17172666,"TERMINAL",0,0,"^L",,terminal_output +5894,17172730,"TERMINAL",0,0,"^P",,terminal_output +5895,17172995,"TERMINAL",0,0,"\r\n(Pdb) ",,terminal_output +5896,17173253,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +5897,17174017,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +5898,17174077,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +5899,17174318,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5900,17185787,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +5901,17190533,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 193, in \r\n action_batch = jasmine.vq_encode(batch, training=False)\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 391, in vq_encode\r\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\r\n File ""/fast/home/franz.srambical/jafar/models/lam.py"", line 133, in vq_encode\r\n z = self.encoder(padded_patches) # (B, T, N, E)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 126, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 339, in attention_fn\r\n output_4d = jax.nn.dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 1212, in dot_product_attention\r\n out = _dot_product_attention_xla(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 974, in _dot_product_attention_xla\r\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 928, in _dot_product_attention_core\r\n padded_logits = _apply_masks(logits, mask, is_causal, q_seqlen, kv_seqlen,\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 868, in _apply_masks\r\n combined_mask = jnp.logical_and(combined_mask, mask)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufunc_api.py"", line 182, in __call__\r\n return call(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py"", line 1888, in logical_and\r\n return lax.bitwise_and(*map(_to_bool, promote_args(""logical_and"", x, y)))\r\nTypeError: and got incompatible shapes for broadcasting: (2, 8, 61, 61), (1, 1, 64, 64).\r\n",,terminal_output +5902,17191304,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +5903,17199335,"/fast/home/franz.srambical/jafar/sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom jasmine import Jasmine\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_co_train: bool = False\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n dynamics_type: str = ""maskgit""\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Load Dynamics model checkpoint ---\n rngs = nnx.Rngs(rng)\n jasmine = Jasmine(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=args.lam_co_train,\n # Dynamics\n dynamics_type=args.dynamics_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=True,\n rngs=rngs,\n )\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(jasmine, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Jasmine, batch: dict) -> jax.Array:\n """"""Runs Jasmine.sample with pre-defined generation hyper-parameters.""""""\n if args.dynamics_type == ""maskgit"":\n return model.sample_maskgit(\n batch,\n args.seq_len,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n )\n else:\n return model.sample_causal(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n )\n\n # --- Define autoregressive sampling loop ---\n # @nnx.jit\n def _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = _sampling_fn(jasmine, batch)\n return generated_vid\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n video_batch = next(dataloader)\n video_batch = video_batch.astype(args.dtype) / 255.0\n # Get latent actions for all videos in the batch\n batch = dict(videos=video_batch)\n action_batch = jasmine.vq_encode(batch, training=False)\n action_batch = jnp.asarray(action_batch).reshape(\n video_batch.shape[0], args.seq_len - 1, 1\n )\n\n # --- Sample + evaluate video ---\n # The autoregressive cache needs to be initialized with the shape of the tokenized inputs, not the raw video.\n # The number of spatial tokens is derived from the image dimensions and patch size.\n # It appears the 90x160 image is padded to 92x160, and a CLS token is added.\n # (92 // args.patch_size) * (160 // args.patch_size) + 1 = 23 * 40 + 1 = 921\n num_patches = ((args.image_height + 3) // 4 * 4 // args.patch_size) * (\n args.image_width // args.patch_size\n ) + 1\n # Shape for spatial attention: (batch, time, patches, num_heads, head_dim)\n spatial_token_shape = (\n args.batch_size,\n 1,\n num_patches,\n args.dyna_dim,\n )\n # Shape for temporal attention: (batch, patches, time, num_heads, head_dim)\n temporal_token_shape = (\n args.batch_size,\n num_patches,\n 1,\n args.dyna_dim,\n )\n if args.dynamics_type == ""causal"":\n transformer_blocks = jasmine.dynamics.transformer.blocks\n for block in transformer_blocks:\n block.spatial_attention.init_cache(spatial_token_shape, dtype=args.dtype)\n block.temporal_attention.init_cache(temporal_token_shape, dtype=args.dtype)\n vid = _autoreg_sample(rng, video_batch, action_batch)\n gt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\n recon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (video_batch * 255).astype(np.uint8)\n pred_videos = (vid * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +5904,17199336,"/fast/home/franz.srambical/jafar/sample.py",5926,0,"",python,selection_command +5905,17201996,"/fast/home/franz.srambical/jafar/jasmine.py",0,0,"from typing import Dict\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics_causal import DynamicsCausal\nfrom models.dynamics_maskgit import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Jasmine(nnx.Module):\n """"""World model with three components: a tokenizer, a latent action model (LAM), and a dynamics model for predicting future tokens.""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n dynamics_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n # --- Dynamics ---\n self.dynamics_type = dynamics_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n self.dropout = dropout\n self.mask_limit = mask_limit\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.dynamics_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dynamics_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dynamics_type}"")\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_logits, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits\n if dyna_mask is not None:\n outputs[""mask""] = dyna_mask\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices, (H, W))\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> jax.Array:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: patches per frame\n S: sequence length\n A: action space\n D: model latent dimension\n """"""\n assert self.dynamics_type == ""maskgit""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array], step: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array, jax.Array, jax.Array], None]:\n rng, token_idxs, mask, action_tokens = carry\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n if not isinstance(self.dynamics, DynamicsMaskGIT):\n raise TypeError(""`sample_maskgit` requires `DynamicsMaskGIT`."")\n mask_token = self.dynamics.mask_token.value # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1)\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.transformer(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(\n jax.nn.softmax(final_logits), sampled_token_idxs\n )\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array], None]:\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)).astype(\n bool\n ) # (B, S, N)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = jax.lax.scan(\n maskgit_step_fn, init_carry_maskgit, jnp.arange(steps)\n )\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=(H, W),\n )\n return final_frames\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> jax.Array:\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n\n def token_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], token_idx: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array, jax.Array], None]:\n rng, token_idxs_full, action_tokens = carry\n t = token_idx // N\n n = token_idx % N\n\n # For autoregressive decoding, we only need to pass the token from the previous step.\n # The model internally uses a KV cache to remember previous tokens.\n current_token_sequence = jax.lax.dynamic_slice(\n token_idxs_full, (0, t, 0), (B, 1, N)\n )\n\n dyna_inputs = {\n ""video_tokens"": current_token_sequence,\n ""latent_actions"": action_tokens,\n }\n # The model will output logits for all patches in the sequence (which is just one frame).\n # FIXME: do we need the model to output logits for a single frame in order to use kv caching?\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\n # We select the logits for the specific patch `n` we are currently generating.\n next_token_logits = next_token_logits[:, 0, n, :].astype(\n jnp.float32\n ) # (B, vocab_size)\n\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B,)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B,)\n\n # Insert the generated token into the full sequence.\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n new_carry = (rng, token_idxs_full, action_tokens)\n return new_carry, None\n\n # --- Autoregressive generation ---\n future_frames = seq_len - T\n total_future_tokens = future_frames * N\n start_token_idx = T * N\n step_indices = jnp.arange(start_token_idx, start_token_idx + total_future_tokens)\n\n initial_carry = (batch[""rng""], token_idxs_full, action_tokens)\n final_carry, _ = jax.lax.scan(\n token_step_fn, initial_carry, step_indices\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames = self.tokenizer.decode(final_token_idxs, video_hw=(H, W))\n return final_frames\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rngs = nnx.Rngs(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, dummy_tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n optimizer.model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, dummy_tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n optimizer.model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del optimizer.model.lam.decoder\n lam_checkpoint_manager.close()\n\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +5906,17201996,"/fast/home/franz.srambical/jafar/jasmine.py",15968,0,"",python,selection_command +5907,17204186,"/fast/home/franz.srambical/jafar/models/lam.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nnx.Module):\n """"""Latent Action ST-ViVit VQ-VAE""""""\n\n def __init__(\n self,\n in_dim: int,\n model_dim: int,\n ffn_dim: int,\n latent_dim: int,\n num_latents: int,\n patch_size: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n codebook_dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.in_dim = in_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.patch_size = patch_size\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.codebook_dropout = codebook_dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.in_dim * self.patch_size**2,\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=False,\n decode=False,\n rngs=rngs,\n )\n self.action_in = nnx.Param(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (1, 1, 1, self.patch_token_dim)\n )\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n rngs=rngs,\n )\n self.patch_up = nnx.Linear(\n self.patch_token_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.action_up = nnx.Linear(\n self.latent_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=False,\n decode=False,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n video_action_patches = self.action_up(outputs[""z_q""]) + self.patch_up(\n outputs[""patches""][:, :-1]\n )\n del outputs[""patches""]\n\n # --- Decode ---\n video_recon = self.decoder(video_action_patches)\n video_recon = video_recon.astype(jnp.float32)\n video_recon = nnx.sigmoid(video_recon)\n video_recon = video_recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(video_recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(\n self, videos: jax.Array, training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(\n self.action_in.value, (B, T, 1, self.patch_token_dim)\n )\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,tab +5908,17204186,"/fast/home/franz.srambical/jafar/models/lam.py",4026,0,"",python,selection_command +5909,17206693,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +5910,17206694,"/fast/home/franz.srambical/jafar/utils/nn.py",6905,0,"",python,selection_command +5911,17206781,"/fast/home/franz.srambical/jafar/utils/nn.py",9144,1437," # implementation = ""cudnn"" if use_flash_attention else None\n implementation = None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n # query_4d = _pad(_rearrange(query))\n # key_4d = _pad(_rearrange(key))\n # value_4d = _pad(_rearrange(value))\n query_4d = _rearrange(query)\n key_4d = _rearrange(key)\n value_4d = _rearrange(value)\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n # Handle causal mask for cached decoder self-attention (from nnx.MultiHeadAttention)\n if mask is not None:\n # mask.shape (1, 921, 1, 1, 1)\n # mask_4d = _pad(_rearrange(mask))\n mask_4d = _rearrange(mask)\n mask_4d = mask_4d.astype(jnp.bool)\n else:\n # FIXME (f.srambical): Investigate whether/why this is needed\n mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n if query.shape == (1, 921, 1, 8, 64):\n breakpoint()\n\n # bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n bias_4d = _rearrange(bias) if bias is not None else None\n",python,content +5912,17215030,"/fast/home/franz.srambical/jafar/utils/nn.py",3861,0,"",python,selection_command +5913,17218963,"/fast/home/franz.srambical/jafar/utils/nn.py",10905,0,"",python,selection_command +5914,17239194,"/fast/home/franz.srambical/jafar/utils/nn.py",10835,0,"",python,selection_command +5915,17239439,"/fast/home/franz.srambical/jafar/utils/nn.py",10834,0,"",python,selection_command +5916,17239456,"/fast/home/franz.srambical/jafar/utils/nn.py",10769,0,"",python,selection_command +5917,17239485,"/fast/home/franz.srambical/jafar/utils/nn.py",10696,0,"",python,selection_command +5918,17239516,"/fast/home/franz.srambical/jafar/utils/nn.py",10695,0,"",python,selection_command +5919,17239553,"/fast/home/franz.srambical/jafar/utils/nn.py",10670,0,"",python,selection_command +5920,17239583,"/fast/home/franz.srambical/jafar/utils/nn.py",10624,0,"",python,selection_command +5921,17239617,"/fast/home/franz.srambical/jafar/utils/nn.py",10527,0,"",python,selection_command +5922,17239650,"/fast/home/franz.srambical/jafar/utils/nn.py",10453,0,"",python,selection_command +5923,17239684,"/fast/home/franz.srambical/jafar/utils/nn.py",10439,0,"",python,selection_command +5924,17239718,"/fast/home/franz.srambical/jafar/utils/nn.py",10392,0,"",python,selection_command +5925,17239752,"/fast/home/franz.srambical/jafar/utils/nn.py",10353,0,"",python,selection_command +5926,17239786,"/fast/home/franz.srambical/jafar/utils/nn.py",10306,0,"",python,selection_command +5927,17239819,"/fast/home/franz.srambical/jafar/utils/nn.py",10263,0,"",python,selection_command +5928,17239853,"/fast/home/franz.srambical/jafar/utils/nn.py",10234,0,"",python,selection_command +5929,17239886,"/fast/home/franz.srambical/jafar/utils/nn.py",10141,0,"",python,selection_command +5930,17239920,"/fast/home/franz.srambical/jafar/utils/nn.py",10140,0,"",python,selection_command +5931,17240037,"/fast/home/franz.srambical/jafar/utils/nn.py",10064,0,"",python,selection_command +5932,17240204,"/fast/home/franz.srambical/jafar/utils/nn.py",9988,0,"",python,selection_command +5933,17240357,"/fast/home/franz.srambical/jafar/utils/nn.py",9903,0,"",python,selection_command +5934,17240476,"/fast/home/franz.srambical/jafar/utils/nn.py",9911,0,"",python,selection_command +5935,17240834,"/fast/home/franz.srambical/jafar/utils/nn.py",9911,1,"a",python,selection_command +5936,17240996,"/fast/home/franz.srambical/jafar/utils/nn.py",9911,1,"a",python,selection_command +5937,17241154,"/fast/home/franz.srambical/jafar/utils/nn.py",9911,1,"a",python,selection_command +5938,17241502,"/fast/home/franz.srambical/jafar/utils/nn.py",9911,0,"",python,selection_command +5939,17241701,"/fast/home/franz.srambical/jafar/utils/nn.py",10072,0,"#",python,content +5940,17241701,"/fast/home/franz.srambical/jafar/utils/nn.py",9996,0,"#",python,content +5941,17241701,"/fast/home/franz.srambical/jafar/utils/nn.py",9911,0,"#",python,content +5942,17241701,"/fast/home/franz.srambical/jafar/utils/nn.py",9912,0,"",python,selection_keyboard +5943,17241711,"/fast/home/franz.srambical/jafar/utils/nn.py",10075,0," ",python,content +5944,17241711,"/fast/home/franz.srambical/jafar/utils/nn.py",9998,0," ",python,content +5945,17241711,"/fast/home/franz.srambical/jafar/utils/nn.py",9912,0," ",python,content +5946,17241711,"/fast/home/franz.srambical/jafar/utils/nn.py",9913,0,"",python,selection_keyboard +5947,17241942,"/fast/home/franz.srambical/jafar/utils/nn.py",9912,0,"",python,selection_command +5948,17244048,"/fast/home/franz.srambical/jafar/utils/nn.py",9903,86," # attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)",python,selection_command +5949,17244178,"/fast/home/franz.srambical/jafar/utils/nn.py",9903,164," # attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n # attention_mask = attention_mask.at[original_seq_len:, :].set(False)",python,selection_command +5950,17244342,"/fast/home/franz.srambical/jafar/utils/nn.py",9903,242," # attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n # attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n # attention_mask = attention_mask.at[:, original_seq_len:].set(False)",python,selection_command +5951,17244575,"/fast/home/franz.srambical/jafar/utils/nn.py",9903,0,"",python,selection_command +5952,17244814,"/fast/home/franz.srambical/jafar/utils/nn.py",9990,0,"",python,selection_command +5953,17244980,"/fast/home/franz.srambical/jafar/utils/nn.py",10068,0,"",python,selection_command +5954,17245396,"/fast/home/franz.srambical/jafar/utils/nn.py",10145,0,"\n ",python,content +5955,17245649,"/fast/home/franz.srambical/jafar/utils/nn.py",10146,8,"",python,content +5956,17245748,"/fast/home/franz.srambical/jafar/utils/nn.py",10146,0,"\n # attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n # attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n # attention_mask = attention_mask.at[:, original_seq_len:].set(False)",python,content +5957,17245764,"/fast/home/franz.srambical/jafar/utils/nn.py",10155,0,"",python,selection_command +5958,17246061,"/fast/home/franz.srambical/jafar/utils/nn.py",10146,0,"",python,selection_command +5959,17246417,"/fast/home/franz.srambical/jafar/utils/nn.py",10146,1,"",python,content +5960,17246417,"/fast/home/franz.srambical/jafar/utils/nn.py",10154,0,"",python,selection_command +5961,17247899,"/fast/home/franz.srambical/jafar/utils/nn.py",10146,86," # attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)",python,selection_command +5962,17248065,"/fast/home/franz.srambical/jafar/utils/nn.py",10146,164," # attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n # attention_mask = attention_mask.at[original_seq_len:, :].set(False)",python,selection_command +5963,17248223,"/fast/home/franz.srambical/jafar/utils/nn.py",10146,242," # attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n # attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n # attention_mask = attention_mask.at[:, original_seq_len:].set(False)",python,selection_command +5964,17248754,"/fast/home/franz.srambical/jafar/utils/nn.py",10146,243,"",python,content +5965,17250265,"/fast/home/franz.srambical/jafar/utils/nn.py",10068,0,"",python,selection_command +5966,17250470,"/fast/home/franz.srambical/jafar/utils/nn.py",10146,0,"",python,selection_command +5967,17250708,"/fast/home/franz.srambical/jafar/utils/nn.py",10147,0,"",python,selection_command +5968,17250732,"/fast/home/franz.srambical/jafar/utils/nn.py",10240,0,"",python,selection_command +5969,17250767,"/fast/home/franz.srambical/jafar/utils/nn.py",10269,0,"",python,selection_command +5970,17250800,"/fast/home/franz.srambical/jafar/utils/nn.py",10312,0,"",python,selection_command +5971,17250832,"/fast/home/franz.srambical/jafar/utils/nn.py",10359,0,"",python,selection_command +5972,17251001,"/fast/home/franz.srambical/jafar/utils/nn.py",10398,0,"",python,selection_command +5973,17251189,"/fast/home/franz.srambical/jafar/utils/nn.py",10445,0,"",python,selection_command +5974,17256243,"/fast/home/franz.srambical/jafar/utils/nn.py",10545,0,"",python,selection_command +5975,17256540,"/fast/home/franz.srambical/jafar/utils/nn.py",10545,0," ",python,content +5976,17256540,"/fast/home/franz.srambical/jafar/utils/nn.py",10546,0,"",python,selection_keyboard +5977,17256973,"/fast/home/franz.srambical/jafar/utils/nn.py",10545,1,"",python,content +5978,17257025,"/fast/home/franz.srambical/jafar/utils/nn.py",10545,0,"#",python,content +5979,17257026,"/fast/home/franz.srambical/jafar/utils/nn.py",10546,0,"",python,selection_keyboard +5980,17257100,"/fast/home/franz.srambical/jafar/utils/nn.py",10546,0," ",python,content +5981,17257100,"/fast/home/franz.srambical/jafar/utils/nn.py",10547,0,"",python,selection_keyboard +5982,17257308,"/fast/home/franz.srambical/jafar/utils/nn.py",10546,0,"",python,selection_command +5983,17258542,"/fast/home/franz.srambical/jafar/utils/nn.py",10631,0,"\n # mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)",python,content +5984,17258546,"/fast/home/franz.srambical/jafar/utils/nn.py",10644,0,"",python,selection_command +5985,17259047,"/fast/home/franz.srambical/jafar/utils/nn.py",10644,1,"",python,content +5986,17259188,"/fast/home/franz.srambical/jafar/utils/nn.py",10644,1,"",python,content +5987,17259758,"/fast/home/franz.srambical/jafar/utils/nn.py",10652,0,"",python,selection_command +5988,17259962,"/fast/home/franz.srambical/jafar/utils/nn.py",10654,0,"",python,selection_command +5989,17260124,"/fast/home/franz.srambical/jafar/utils/nn.py",10668,0,"",python,selection_command +5990,17260275,"/fast/home/franz.srambical/jafar/utils/nn.py",10669,0,"",python,selection_command +5991,17260563,"/fast/home/franz.srambical/jafar/utils/nn.py",10672,0,"",python,selection_command +5992,17261214,"/fast/home/franz.srambical/jafar/utils/nn.py",10673,0,"",python,selection_command +5993,17261461,"/fast/home/franz.srambical/jafar/utils/nn.py",10680,0,"",python,selection_command +5994,17262019,"/fast/home/franz.srambical/jafar/utils/nn.py",10673,0,"",python,selection_command +5995,17262191,"/fast/home/franz.srambical/jafar/utils/nn.py",10672,0,"",python,selection_command +5996,17262304,"/fast/home/franz.srambical/jafar/utils/nn.py",10669,0,"",python,selection_command +5997,17262681,"/fast/home/franz.srambical/jafar/utils/nn.py",10668,0,"",python,selection_command +5998,17263112,"/fast/home/franz.srambical/jafar/utils/nn.py",10654,0,"",python,selection_command +5999,17270192,"/fast/home/franz.srambical/jafar/utils/nn.py",10654,74,"",python,content +6000,17270420,"/fast/home/franz.srambical/jafar/utils/nn.py",10654,0,"j",python,content +6001,17270420,"/fast/home/franz.srambical/jafar/utils/nn.py",10655,0,"",python,selection_keyboard +6002,17270561,"/fast/home/franz.srambical/jafar/utils/nn.py",10655,0,"n",python,content +6003,17270562,"/fast/home/franz.srambical/jafar/utils/nn.py",10656,0,"",python,selection_keyboard +6004,17270614,"/fast/home/franz.srambical/jafar/utils/nn.py",10656,0,"p",python,content +6005,17270614,"/fast/home/franz.srambical/jafar/utils/nn.py",10657,0,"",python,selection_keyboard +6006,17271110,"/fast/home/franz.srambical/jafar/utils/nn.py",10657,0,".",python,content +6007,17271110,"/fast/home/franz.srambical/jafar/utils/nn.py",10658,0,"",python,selection_keyboard +6008,17271463,"/fast/home/franz.srambical/jafar/utils/nn.py",10658,0,"ones((1, 1, target_seq_len, target_seq_len), dtype=jnp.bool)",python,content +6009,17271730,"/fast/home/franz.srambical/jafar/utils/nn.py",10717,0,"",python,selection_command +6010,17271916,"/fast/home/franz.srambical/jafar/utils/nn.py",10713,0,"",python,selection_command +6011,17272165,"/fast/home/franz.srambical/jafar/utils/nn.py",10712,0,"",python,selection_command +6012,17272200,"/fast/home/franz.srambical/jafar/utils/nn.py",10709,0,"",python,selection_command +6013,17272252,"/fast/home/franz.srambical/jafar/utils/nn.py",10708,0,"",python,selection_command +6014,17272356,"/fast/home/franz.srambical/jafar/utils/nn.py",10703,0,"",python,selection_command +6015,17272517,"/fast/home/franz.srambical/jafar/utils/nn.py",10700,0,"",python,selection_command +6016,17272653,"/fast/home/franz.srambical/jafar/utils/nn.py",10686,0,"",python,selection_command +6017,17272803,"/fast/home/franz.srambical/jafar/utils/nn.py",10684,0,"",python,selection_command +6018,17273115,"/fast/home/franz.srambical/jafar/utils/nn.py",10670,0,"",python,selection_command +6019,17274993,"/fast/home/franz.srambical/jafar/utils/nn.py",10670,14,"",python,content +6020,17275351,"/fast/home/franz.srambical/jafar/utils/nn.py",10670,0,"s",python,content +6021,17275351,"/fast/home/franz.srambical/jafar/utils/nn.py",10671,0,"",python,selection_keyboard +6022,17275664,"/fast/home/franz.srambical/jafar/utils/nn.py",10671,0,"e",python,content +6023,17275664,"/fast/home/franz.srambical/jafar/utils/nn.py",10672,0,"",python,selection_keyboard +6024,17275767,"/fast/home/franz.srambical/jafar/utils/nn.py",10672,0,"q",python,content +6025,17275768,"/fast/home/franz.srambical/jafar/utils/nn.py",10673,0,"",python,selection_keyboard +6026,17276136,"/fast/home/franz.srambical/jafar/utils/nn.py",10672,1,"",python,content +6027,17276268,"/fast/home/franz.srambical/jafar/utils/nn.py",10671,1,"",python,content +6028,17276631,"/fast/home/franz.srambical/jafar/utils/nn.py",10671,0,"o",python,content +6029,17276631,"/fast/home/franz.srambical/jafar/utils/nn.py",10672,0,"",python,selection_keyboard +6030,17276885,"/fast/home/franz.srambical/jafar/utils/nn.py",10671,1,"",python,content +6031,17277024,"/fast/home/franz.srambical/jafar/utils/nn.py",10670,1,"",python,content +6032,17277171,"/fast/home/franz.srambical/jafar/utils/nn.py",10670,0,"o",python,content +6033,17277171,"/fast/home/franz.srambical/jafar/utils/nn.py",10671,0,"",python,selection_keyboard +6034,17277285,"/fast/home/franz.srambical/jafar/utils/nn.py",10671,0,"r",python,content +6035,17277285,"/fast/home/franz.srambical/jafar/utils/nn.py",10672,0,"",python,selection_keyboard +6036,17277353,"/fast/home/franz.srambical/jafar/utils/nn.py",10672,0,"i",python,content +6037,17277354,"/fast/home/franz.srambical/jafar/utils/nn.py",10673,0,"",python,selection_keyboard +6038,17277438,"/fast/home/franz.srambical/jafar/utils/nn.py",10673,0,"g",python,content +6039,17277438,"/fast/home/franz.srambical/jafar/utils/nn.py",10674,0,"",python,selection_keyboard +6040,17277503,"/fast/home/franz.srambical/jafar/utils/nn.py",10674,0,"i",python,content +6041,17277503,"/fast/home/franz.srambical/jafar/utils/nn.py",10675,0,"",python,selection_keyboard +6042,17278004,"/fast/home/franz.srambical/jafar/utils/nn.py",10675,0,"nal_seq_len",python,content +6043,17278255,"/fast/home/franz.srambical/jafar/utils/nn.py",10685,0,"",python,selection_command +6044,17278456,"/fast/home/franz.srambical/jafar/utils/nn.py",10686,0,"",python,selection_command +6045,17278619,"/fast/home/franz.srambical/jafar/utils/nn.py",10688,0,"",python,selection_command +6046,17278827,"/fast/home/franz.srambical/jafar/utils/nn.py",10688,14,"",python,content +6047,17278921,"/fast/home/franz.srambical/jafar/utils/nn.py",10688,0,"o",python,content +6048,17278922,"/fast/home/franz.srambical/jafar/utils/nn.py",10689,0,"",python,selection_keyboard +6049,17279037,"/fast/home/franz.srambical/jafar/utils/nn.py",10689,0,"r",python,content +6050,17279037,"/fast/home/franz.srambical/jafar/utils/nn.py",10690,0,"",python,selection_keyboard +6051,17279118,"/fast/home/franz.srambical/jafar/utils/nn.py",10690,0,"i",python,content +6052,17279118,"/fast/home/franz.srambical/jafar/utils/nn.py",10691,0,"",python,selection_keyboard +6053,17279252,"/fast/home/franz.srambical/jafar/utils/nn.py",10691,0,"g",python,content +6054,17279253,"/fast/home/franz.srambical/jafar/utils/nn.py",10692,0,"",python,selection_keyboard +6055,17279654,"/fast/home/franz.srambical/jafar/utils/nn.py",10691,0,"",python,selection_command +6056,17280909,"/fast/home/franz.srambical/jafar/utils/nn.py",10692,0,"",python,selection_command +6057,17281041,"/fast/home/franz.srambical/jafar/utils/nn.py",10692,0,"i",python,content +6058,17281041,"/fast/home/franz.srambical/jafar/utils/nn.py",10693,0,"",python,selection_keyboard +6059,17281261,"/fast/home/franz.srambical/jafar/utils/nn.py",10693,0,"g",python,content +6060,17281261,"/fast/home/franz.srambical/jafar/utils/nn.py",10694,0,"",python,selection_keyboard +6061,17281565,"/fast/home/franz.srambical/jafar/utils/nn.py",10693,1,"",python,content +6062,17281690,"/fast/home/franz.srambical/jafar/utils/nn.py",10693,0,"n",python,content +6063,17281690,"/fast/home/franz.srambical/jafar/utils/nn.py",10694,0,"",python,selection_keyboard +6064,17281757,"/fast/home/franz.srambical/jafar/utils/nn.py",10694,0,"a",python,content +6065,17281757,"/fast/home/franz.srambical/jafar/utils/nn.py",10695,0,"",python,selection_keyboard +6066,17281874,"/fast/home/franz.srambical/jafar/utils/nn.py",10695,0,"l",python,content +6067,17281875,"/fast/home/franz.srambical/jafar/utils/nn.py",10696,0,"",python,selection_keyboard +6068,17282279,"/fast/home/franz.srambical/jafar/utils/nn.py",10695,0,"",python,selection_command +6069,17283326,"/fast/home/franz.srambical/jafar/utils/nn.py",10696,0,"",python,selection_command +6070,17283585,"/fast/home/franz.srambical/jafar/utils/nn.py",10696,0,"_",python,content +6071,17283585,"/fast/home/franz.srambical/jafar/utils/nn.py",10697,0,"",python,selection_keyboard +6072,17284530,"/fast/home/franz.srambical/jafar/utils/nn.py",10688,9,"original_seq_len",python,content +6073,17284743,"/fast/home/franz.srambical/jafar/utils/nn.py",10703,0,"",python,selection_command +6074,17284974,"/fast/home/franz.srambical/jafar/utils/nn.py",10632,0,"",python,selection_command +6075,17286640,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +6076,17286723,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +6077,17287008,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6078,17298399,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +6079,17308916,"TERMINAL",0,0,"2025-07-27 14:07:08.718423: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6080,17309399,"TERMINAL",0,0,"2025-07-27 14:07:09.201462: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6081,17312058,"TERMINAL",0,0,"2025-07-27 14:07:11.861113: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6082,17313545,"TERMINAL",0,0,"2025-07-27 14:07:13.347575: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6083,17318575,"TERMINAL",0,0,"2025-07-27 14:07:18.375137: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6084,17319265,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(337)attention_fn()\r\n-> bias_4d = _rearrange(bias) if bias is not None else None\r\n",,terminal_output +6085,17323302,"TERMINAL",0,0,"c",,terminal_output +6086,17323384,"TERMINAL",0,0,"\r\n",,terminal_output +6087,17323437,"TERMINAL",0,0,"(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(337)attention_fn()\r\n-> bias_4d = _rearrange(bias) if bias is not None else None\r\n",,terminal_output +6088,17326495,"TERMINAL",0,0,"\r\n",,terminal_output +6089,17332282,"TERMINAL",0,0,"c",,terminal_output +6090,17332884,"TERMINAL",0,0,"\r\n(Pdb) (Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(337)attention_fn()\r\n-> bias_4d = _rearrange(bias) if bias is not None else None\r\n",,terminal_output +6091,17334802,"TERMINAL",0,0,"c\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(337)attention_fn()\r\n-> bias_4d = _rearrange(bias) if bias is not None else None\r\n",,terminal_output +6092,17335305,"TERMINAL",0,0,"c",,terminal_output +6093,17335447,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(337)attention_fn()\r\n-> bias_4d = _rearrange(bias) if bias is not None else None\r\n",,terminal_output +6094,17335704,"TERMINAL",0,0,"c",,terminal_output +6095,17335803,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(337)attention_fn()\r\n-> bias_4d = _rearrange(bias) if bias is not None else None\r\n",,terminal_output +6096,17336064,"TERMINAL",0,0,"c",,terminal_output +6097,17336129,"TERMINAL",0,0,"\r\n",,terminal_output +6098,17336345,"TERMINAL",0,0,"c",,terminal_output +6099,17336426,"TERMINAL",0,0,"\r\n",,terminal_output +6100,17336564,"TERMINAL",0,0,"c",,terminal_output +6101,17336669,"TERMINAL",0,0,"2025-07-27 14:07:36.432751: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n\r\n",,terminal_output +6102,17336792,"TERMINAL",0,0,"c",,terminal_output +6103,17336935,"TERMINAL",0,0,"\r\n",,terminal_output +6104,17337054,"TERMINAL",0,0,"c",,terminal_output +6105,17337197,"TERMINAL",0,0,"\r\n",,terminal_output +6106,17339233,"TERMINAL",0,0,"(Pdb) SSIM: 0.01034794095903635\r\n",,terminal_output +6107,17340457,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +6108,17380390,"/fast/home/franz.srambical/jafar/utils/nn.py",10632,91,"",python,content +6109,17380413,"/fast/home/franz.srambical/jafar/utils/nn.py",10640,0,"",python,selection_command +6110,17380795,"/fast/home/franz.srambical/jafar/utils/nn.py",10541,0,"",python,selection_command +6111,17380907,"/fast/home/franz.srambical/jafar/utils/nn.py",10545,0,"",python,selection_command +6112,17381121,"/fast/home/franz.srambical/jafar/utils/nn.py",10547,0,"",python,selection_command +6113,17381716,"/fast/home/franz.srambical/jafar/utils/nn.py",10545,1,"",python,content +6114,17381877,"/fast/home/franz.srambical/jafar/utils/nn.py",10545,1,"",python,content +6115,17382360,"/fast/home/franz.srambical/jafar/utils/nn.py",10471,0,"",python,selection_command +6116,17382508,"/fast/home/franz.srambical/jafar/utils/nn.py",10457,0,"",python,selection_command +6117,17382664,"/fast/home/franz.srambical/jafar/utils/nn.py",10410,0,"",python,selection_command +6118,17382790,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_command +6119,17382962,"/fast/home/franz.srambical/jafar/utils/nn.py",10324,0,"",python,selection_command +6120,17383127,"/fast/home/franz.srambical/jafar/utils/nn.py",10281,0,"",python,selection_command +6121,17383481,"/fast/home/franz.srambical/jafar/utils/nn.py",10324,0,"",python,selection_command +6122,17383644,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_command +6123,17384100,"/fast/home/franz.srambical/jafar/utils/nn.py",10359,39,"",python,content +6124,17384133,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_command +6125,17384479,"/fast/home/franz.srambical/jafar/utils/nn.py",10359,47,"",python,content +6126,17384497,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"",python,selection_command +6127,17384723,"/fast/home/franz.srambical/jafar/utils/nn.py",10320,0,"",python,selection_command +6128,17385080,"/fast/home/franz.srambical/jafar/utils/nn.py",10324,0,"",python,selection_command +6129,17385619,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0," mask_4d = mask_4d.astype(jnp.bool)\n ",python,content +6130,17385619,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_command +6131,17386012,"/fast/home/franz.srambical/jafar/utils/nn.py",10381,0,"_rearrange(mask)\n mask_4d = ",python,content +6132,17388064,"/fast/home/franz.srambical/jafar/utils/nn.py",10359,39,"",python,content +6133,17388086,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_command +6134,17388177,"/fast/home/franz.srambical/jafar/utils/nn.py",10324,0,"",python,selection_command +6135,17388652,"/fast/home/franz.srambical/jafar/utils/nn.py",10324,1,"",python,content +6136,17388775,"/fast/home/franz.srambical/jafar/utils/nn.py",10324,1,"",python,content +6137,17392433,"/fast/home/franz.srambical/jafar/utils/nn.py",10281,0,"",python,selection_command +6138,17393319,"/fast/home/franz.srambical/jafar/utils/nn.py",10252,0,"",python,selection_command +6139,17393453,"/fast/home/franz.srambical/jafar/utils/nn.py",10159,0,"",python,selection_command +6140,17393600,"/fast/home/franz.srambical/jafar/utils/nn.py",10146,0,"",python,selection_command +6141,17393793,"/fast/home/franz.srambical/jafar/utils/nn.py",10080,0,"",python,selection_command +6142,17394590,"/fast/home/franz.srambical/jafar/utils/nn.py",10068,0,"",python,selection_command +6143,17395087,"/fast/home/franz.srambical/jafar/utils/nn.py",10076,0,"",python,selection_command +6144,17395325,"/fast/home/franz.srambical/jafar/utils/nn.py",10076,1,"#",python,selection_command +6145,17395437,"/fast/home/franz.srambical/jafar/utils/nn.py",10076,2,"# ",python,selection_command +6146,17395608,"/fast/home/franz.srambical/jafar/utils/nn.py",9998,2,"# ",python,selection_command +6147,17395790,"/fast/home/franz.srambical/jafar/utils/nn.py",9911,2,"# ",python,selection_command +6148,17396024,"/fast/home/franz.srambical/jafar/utils/nn.py",10076,2,"",python,content +6149,17396024,"/fast/home/franz.srambical/jafar/utils/nn.py",9998,2,"",python,content +6150,17396024,"/fast/home/franz.srambical/jafar/utils/nn.py",9911,2,"",python,content +6151,17396047,"/fast/home/franz.srambical/jafar/utils/nn.py",9911,0,"",python,selection_command +6152,17396394,"/fast/home/franz.srambical/jafar/utils/nn.py",9902,0,"",python,selection_command +6153,17396671,"/fast/home/franz.srambical/jafar/utils/nn.py",9873,0,"",python,selection_command +6154,17396800,"/fast/home/franz.srambical/jafar/utils/nn.py",9840,0,"",python,selection_command +6155,17396988,"/fast/home/franz.srambical/jafar/utils/nn.py",9803,0,"",python,selection_command +6156,17397259,"/fast/home/franz.srambical/jafar/utils/nn.py",9795,37,"",python,content +6157,17397292,"/fast/home/franz.srambical/jafar/utils/nn.py",9803,0,"",python,selection_command +6158,17397586,"/fast/home/franz.srambical/jafar/utils/nn.py",9795,33,"",python,content +6159,17397595,"/fast/home/franz.srambical/jafar/utils/nn.py",9803,0,"",python,selection_command +6160,17397994,"/fast/home/franz.srambical/jafar/utils/nn.py",9795,37,"",python,content +6161,17398186,"/fast/home/franz.srambical/jafar/utils/nn.py",9750,0,"",python,selection_command +6162,17398469,"/fast/home/franz.srambical/jafar/utils/nn.py",9758,0,"",python,selection_command +6163,17398824,"/fast/home/franz.srambical/jafar/utils/nn.py",9758,1,"#",python,selection_command +6164,17398956,"/fast/home/franz.srambical/jafar/utils/nn.py",9758,2,"# ",python,selection_command +6165,17399161,"/fast/home/franz.srambical/jafar/utils/nn.py",9717,2,"# ",python,selection_command +6166,17399326,"/fast/home/franz.srambical/jafar/utils/nn.py",9672,2,"# ",python,selection_command +6167,17399649,"/fast/home/franz.srambical/jafar/utils/nn.py",9758,2,"",python,content +6168,17399649,"/fast/home/franz.srambical/jafar/utils/nn.py",9717,2,"",python,content +6169,17399649,"/fast/home/franz.srambical/jafar/utils/nn.py",9672,2,"",python,content +6170,17399657,"/fast/home/franz.srambical/jafar/utils/nn.py",9672,0,"",python,selection_command +6171,17400108,"/fast/home/franz.srambical/jafar/utils/nn.py",9663,0,"",python,selection_command +6172,17400274,"/fast/home/franz.srambical/jafar/utils/nn.py",9618,0,"",python,selection_command +6173,17400425,"/fast/home/franz.srambical/jafar/utils/nn.py",9559,0,"",python,selection_command +6174,17403731,"/fast/home/franz.srambical/jafar/utils/nn.py",9618,0,"",python,selection_command +6175,17404898,"/fast/home/franz.srambical/jafar/utils/nn.py",9559,0,"",python,selection_command +6176,17405123,"/fast/home/franz.srambical/jafar/utils/nn.py",9520,0,"",python,selection_command +6177,17405155,"/fast/home/franz.srambical/jafar/utils/nn.py",9511,0,"",python,selection_command +6178,17405192,"/fast/home/franz.srambical/jafar/utils/nn.py",9476,0,"",python,selection_command +6179,17405225,"/fast/home/franz.srambical/jafar/utils/nn.py",9439,0,"",python,selection_command +6180,17405258,"/fast/home/franz.srambical/jafar/utils/nn.py",9430,0,"",python,selection_command +6181,17405302,"/fast/home/franz.srambical/jafar/utils/nn.py",9367,0,"",python,selection_command +6182,17405338,"/fast/home/franz.srambical/jafar/utils/nn.py",9346,0,"",python,selection_command +6183,17405366,"/fast/home/franz.srambical/jafar/utils/nn.py",9337,0,"",python,selection_command +6184,17405390,"/fast/home/franz.srambical/jafar/utils/nn.py",9278,0,"",python,selection_command +6185,17405416,"/fast/home/franz.srambical/jafar/utils/nn.py",9251,0,"",python,selection_command +6186,17405450,"/fast/home/franz.srambical/jafar/utils/nn.py",9242,0,"",python,selection_command +6187,17405641,"/fast/home/franz.srambical/jafar/utils/nn.py",9220,0,"",python,selection_command +6188,17405812,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"",python,selection_command +6189,17405921,"/fast/home/franz.srambical/jafar/utils/nn.py",9118,0,"",python,selection_command +6190,17406936,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"",python,selection_command +6191,17408934,"/fast/home/franz.srambical/jafar/utils/nn.py",9220,0,"",python,selection_command +6192,17409117,"/fast/home/franz.srambical/jafar/utils/nn.py",9242,0,"",python,selection_command +6193,17409249,"/fast/home/franz.srambical/jafar/utils/nn.py",9220,0,"",python,selection_command +6194,17409669,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,0,"",python,selection_command +6195,17411549,"/fast/home/franz.srambical/jafar/utils/nn.py",9242,0,"",python,selection_command +6196,17411784,"/fast/home/franz.srambical/jafar/utils/nn.py",9243,0,"",python,selection_command +6197,17411813,"/fast/home/franz.srambical/jafar/utils/nn.py",9270,0,"",python,selection_command +6198,17411847,"/fast/home/franz.srambical/jafar/utils/nn.py",9337,0,"",python,selection_command +6199,17411879,"/fast/home/franz.srambical/jafar/utils/nn.py",9338,0,"",python,selection_command +6200,17411915,"/fast/home/franz.srambical/jafar/utils/nn.py",9359,0,"",python,selection_command +6201,17411949,"/fast/home/franz.srambical/jafar/utils/nn.py",9430,0,"",python,selection_command +6202,17411980,"/fast/home/franz.srambical/jafar/utils/nn.py",9431,0,"",python,selection_command +6203,17412014,"/fast/home/franz.srambical/jafar/utils/nn.py",9468,0,"",python,selection_command +6204,17412048,"/fast/home/franz.srambical/jafar/utils/nn.py",9511,0,"",python,selection_command +6205,17412080,"/fast/home/franz.srambical/jafar/utils/nn.py",9512,0,"",python,selection_command +6206,17412114,"/fast/home/franz.srambical/jafar/utils/nn.py",9551,0,"",python,selection_command +6207,17412148,"/fast/home/franz.srambical/jafar/utils/nn.py",9610,0,"",python,selection_command +6208,17412181,"/fast/home/franz.srambical/jafar/utils/nn.py",9663,0,"",python,selection_command +6209,17413944,"/fast/home/franz.srambical/jafar/utils/nn.py",9664,0,"",python,selection_command +6210,17414203,"/fast/home/franz.srambical/jafar/utils/nn.py",9707,0,"",python,selection_command +6211,17414203,"/fast/home/franz.srambical/jafar/utils/nn.py",9746,0,"",python,selection_command +6212,17414230,"/fast/home/franz.srambical/jafar/utils/nn.py",9789,0,"",python,selection_command +6213,17414261,"/fast/home/franz.srambical/jafar/utils/nn.py",9790,0,"",python,selection_command +6214,17414334,"/fast/home/franz.srambical/jafar/utils/nn.py",9875,0,"",python,selection_command +6215,17414334,"/fast/home/franz.srambical/jafar/utils/nn.py",9951,0,"",python,selection_command +6216,17414362,"/fast/home/franz.srambical/jafar/utils/nn.py",10027,0,"",python,selection_command +6217,17414395,"/fast/home/franz.srambical/jafar/utils/nn.py",10028,0,"",python,selection_command +6218,17414463,"/fast/home/franz.srambical/jafar/utils/nn.py",10121,0,"",python,selection_command +6219,17414464,"/fast/home/franz.srambical/jafar/utils/nn.py",10150,0,"",python,selection_command +6220,17414495,"/fast/home/franz.srambical/jafar/utils/nn.py",10193,0,"",python,selection_command +6221,17414528,"/fast/home/franz.srambical/jafar/utils/nn.py",10238,0,"",python,selection_command +6222,17414593,"/fast/home/franz.srambical/jafar/utils/nn.py",10285,0,"",python,selection_command +6223,17414594,"/fast/home/franz.srambical/jafar/utils/nn.py",10299,0,"",python,selection_command +6224,17414628,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"",python,selection_command +6225,17414662,"/fast/home/franz.srambical/jafar/utils/nn.py",10470,0,"",python,selection_command +6226,17414723,"/fast/home/franz.srambical/jafar/utils/nn.py",10516,0,"",python,selection_command +6227,17414870,"/fast/home/franz.srambical/jafar/utils/nn.py",10541,0,"",python,selection_command +6228,17414995,"/fast/home/franz.srambical/jafar/utils/nn.py",10542,0,"",python,selection_command +6229,17415139,"/fast/home/franz.srambical/jafar/utils/nn.py",10615,0,"",python,selection_command +6230,17415485,"/fast/home/franz.srambical/jafar/utils/nn.py",10615,65,"",python,content +6231,17415600,"/fast/home/franz.srambical/jafar/utils/nn.py",10542,0,"",python,selection_command +6232,17415736,"/fast/home/franz.srambical/jafar/utils/nn.py",10550,0,"",python,selection_command +6233,17415982,"/fast/home/franz.srambical/jafar/utils/nn.py",10550,1,"",python,content +6234,17416136,"/fast/home/franz.srambical/jafar/utils/nn.py",10550,1,"",python,content +6235,17419393,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +6236,17419703,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6237,17423057,"/fast/home/franz.srambical/jafar/utils/nn.py",9520,0,"",python,selection_command +6238,17424464,"/fast/home/franz.srambical/jafar/utils/nn.py",10826,0,"",python,selection_command +6239,17426345,"/fast/home/franz.srambical/jafar/utils/nn.py",11056,0,"",python,selection_command +6240,17431050,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +6241,17495932,"TERMINAL",0,0,"2025-07-27 14:10:15.730276: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6242,17496474,"TERMINAL",0,0,"2025-07-27 14:10:16.279737: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6243,17499351,"TERMINAL",0,0,"2025-07-27 14:10:19.154960: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6244,17500823,"TERMINAL",0,0,"2025-07-27 14:10:20.627983: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6245,17505845,"TERMINAL",0,0,"2025-07-27 14:10:25.645641: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6246,17506509,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(331)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +6247,17513180,"TERMINAL",0,0,"c",,terminal_output +6248,17513727,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 334, in attention_fn\r\n output_4d = jax.nn.dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 1212, in dot_product_attention\r\n out = _dot_product_attention_xla(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 968, in _dot_product_attention_xla\r\n mask = _reshape_to_grouped(mask)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 964, in _reshape_to_grouped\r\n assert tN == N\r\nAssertionError\r\n",,terminal_output +6249,17514616,"TERMINAL",0,0,"(Pdb) ",,terminal_output +6250,17514865,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +6251,17530380,"/fast/home/franz.srambical/jafar/utils/nn.py",4090,0,"",python,selection_command +6252,17532453,"/fast/home/franz.srambical/jafar/utils/nn.py",10684,0,"",python,selection_command +6253,17545092,"/fast/home/franz.srambical/jafar/utils/nn.py",4090,0,"",python,selection_command +6254,17547117,"/fast/home/franz.srambical/jafar/utils/nn.py",10684,0,"",python,selection_command +6255,17549826,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +6256,17549827,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",38193,0,"",python,selection_command +6257,17552901,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27853,0,"",python,selection_command +6258,17567377,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27735,0,"",python,selection_command +6259,17603715,"utils/nn.py",0,0,"",python,tab +6260,17616362,"utils/nn.py",10210,0,"",python,selection_mouse +6261,17616734,"utils/nn.py",10219,0,"",python,selection_mouse +6262,17617122,"utils/nn.py",10227,0,"",python,selection_mouse +6263,17618270,"utils/nn.py",10175,0,"",python,selection_mouse +6264,17618355,"utils/nn.py",10175,1,"(",python,selection_mouse +6265,17618383,"utils/nn.py",10175,2,"(1",python,selection_mouse +6266,17618402,"utils/nn.py",10175,3,"(1,",python,selection_mouse +6267,17618431,"utils/nn.py",10175,4,"(1, ",python,selection_mouse +6268,17618466,"utils/nn.py",10175,5,"(1, 9",python,selection_mouse +6269,17618499,"utils/nn.py",10175,6,"(1, 92",python,selection_mouse +6270,17618532,"utils/nn.py",10175,7,"(1, 921",python,selection_mouse +6271,17618567,"utils/nn.py",10175,8,"(1, 921,",python,selection_mouse +6272,17618616,"utils/nn.py",10175,9,"(1, 921, ",python,selection_mouse +6273,17618666,"utils/nn.py",10175,10,"(1, 921, 1",python,selection_mouse +6274,17618699,"utils/nn.py",10175,11,"(1, 921, 1,",python,selection_mouse +6275,17618717,"utils/nn.py",10175,12,"(1, 921, 1, ",python,selection_mouse +6276,17618782,"utils/nn.py",10175,13,"(1, 921, 1, 1",python,selection_mouse +6277,17618867,"utils/nn.py",10175,14,"(1, 921, 1, 1,",python,selection_mouse +6278,17618933,"utils/nn.py",10175,15,"(1, 921, 1, 1, ",python,selection_mouse +6279,17619002,"utils/nn.py",10175,16,"(1, 921, 1, 1, 1",python,selection_mouse +6280,17619082,"utils/nn.py",10175,17,"(1, 921, 1, 1, 1)",python,selection_mouse +6281,17639930,"utils/nn.py",10175,61,"(1, 921, 1, 1, 1)\n mask_4d = _pad(_rearrange(mask)",python,selection_command +6282,17640061,"utils/nn.py",10175,106,"(1, 921, 1, 1, 1)\n mask_4d = _pad(_rearrange(mask))\n mask_4d = mask_4d.astype(jnp.bo",python,selection_command +6283,17640425,"utils/nn.py",10280,0,"",python,selection_command +6284,17640696,"utils/nn.py",10284,0,"\n ",python,content +6285,17640955,"utils/nn.py",10297,0,"#",python,content +6286,17640955,"utils/nn.py",10298,0,"",python,selection_keyboard +6287,17641065,"utils/nn.py",10298,0," ",python,content +6288,17641065,"utils/nn.py",10299,0,"",python,selection_keyboard +6289,17641440,"utils/nn.py",10299,0,"m",python,content +6290,17641440,"utils/nn.py",10300,0,"",python,selection_keyboard +6291,17641489,"utils/nn.py",10300,0,"a",python,content +6292,17641489,"utils/nn.py",10301,0,"",python,selection_keyboard +6293,17641571,"utils/nn.py",10301,0,"s",python,content +6294,17641572,"utils/nn.py",10302,0,"",python,selection_keyboard +6295,17641655,"utils/nn.py",10302,0,"k",python,content +6296,17641655,"utils/nn.py",10303,0,"",python,selection_keyboard +6297,17642042,"utils/nn.py",10303,0,"4",python,content +6298,17642042,"utils/nn.py",10304,0,"",python,selection_keyboard +6299,17642388,"utils/nn.py",10303,1,"",python,content +6300,17642590,"utils/nn.py",10303,0,"_",python,content +6301,17642590,"utils/nn.py",10304,0,"",python,selection_keyboard +6302,17642775,"utils/nn.py",10304,0,"4",python,content +6303,17642775,"utils/nn.py",10305,0,"",python,selection_keyboard +6304,17642977,"utils/nn.py",10305,0,"d",python,content +6305,17642978,"utils/nn.py",10306,0,"",python,selection_keyboard +6306,17643261,"utils/nn.py",10306,0,".",python,content +6307,17643262,"utils/nn.py",10307,0,"",python,selection_keyboard +6308,17643357,"utils/nn.py",10307,0,"s",python,content +6309,17643357,"utils/nn.py",10308,0,"",python,selection_keyboard +6310,17643475,"utils/nn.py",10308,0,"h",python,content +6311,17643476,"utils/nn.py",10309,0,"",python,selection_keyboard +6312,17643524,"utils/nn.py",10309,0,"a",python,content +6313,17643524,"utils/nn.py",10310,0,"",python,selection_keyboard +6314,17643610,"utils/nn.py",10310,0,"p",python,content +6315,17643610,"utils/nn.py",10311,0,"",python,selection_keyboard +6316,17643705,"utils/nn.py",10311,0,"e",python,content +6317,17643706,"utils/nn.py",10312,0,"",python,selection_keyboard +6318,17643807,"utils/nn.py",10312,0," ",python,content +6319,17643807,"utils/nn.py",10313,0,"",python,selection_keyboard +6320,17644006,"utils/nn.py",10313,0,"()",python,content +6321,17644007,"utils/nn.py",10314,0,"",python,selection_keyboard +6322,17645663,"utils/nn.py",10314,0,"9",python,content +6323,17645663,"utils/nn.py",10315,0,"",python,selection_keyboard +6324,17645777,"utils/nn.py",10315,0,"2",python,content +6325,17645777,"utils/nn.py",10316,0,"",python,selection_keyboard +6326,17645842,"utils/nn.py",10316,0,"1",python,content +6327,17645842,"utils/nn.py",10317,0,"",python,selection_keyboard +6328,17645974,"utils/nn.py",10317,0,",",python,content +6329,17645975,"utils/nn.py",10318,0,"",python,selection_keyboard +6330,17646107,"utils/nn.py",10318,0," ",python,content +6331,17646108,"utils/nn.py",10319,0,"",python,selection_keyboard +6332,17646574,"utils/nn.py",10319,0,"4",python,content +6333,17646574,"utils/nn.py",10320,0,"",python,selection_keyboard +6334,17646688,"utils/nn.py",10320,0,",",python,content +6335,17646689,"utils/nn.py",10321,0,"",python,selection_keyboard +6336,17646825,"utils/nn.py",10321,0," ",python,content +6337,17646826,"utils/nn.py",10322,0,"",python,selection_keyboard +6338,17648236,"utils/nn.py",10322,0,"1",python,content +6339,17648237,"utils/nn.py",10323,0,"",python,selection_keyboard +6340,17648305,"utils/nn.py",10323,0,",",python,content +6341,17648305,"utils/nn.py",10324,0,"",python,selection_keyboard +6342,17648462,"utils/nn.py",10324,0," ",python,content +6343,17648462,"utils/nn.py",10325,0,"",python,selection_keyboard +6344,17648577,"utils/nn.py",10325,0,"1",python,content +6345,17648577,"utils/nn.py",10326,0,"",python,selection_keyboard +6346,17648912,"utils/nn.py",10325,0,"",python,selection_command +6347,17649543,"utils/nn.py",10285,0,"",python,selection_command +6348,17649657,"utils/nn.py",10297,0,"",python,selection_command +6349,17649861,"utils/nn.py",10299,0,"",python,selection_command +6350,17650199,"utils/nn.py",10306,0,"",python,selection_command +6351,17650396,"utils/nn.py",10307,0,"",python,selection_command +6352,17650520,"utils/nn.py",10313,0,"",python,selection_command +6353,17650650,"utils/nn.py",10314,0,"",python,selection_command +6354,17651281,"utils/nn.py",10327,0,"",python,selection_command +6355,17651532,"utils/nn.py",10326,0,"",python,selection_command +6356,17652318,"utils/nn.py",10285,0,"",python,selection_command +6357,17663561,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +6358,17663806,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6359,17675146,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +6360,17725284,"TERMINAL",0,0,"2025-07-27 14:14:05.079655: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6361,17725748,"TERMINAL",0,0,"2025-07-27 14:14:05.550031: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6362,17728630,"TERMINAL",0,0,"2025-07-27 14:14:08.433944: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6363,17730110,"TERMINAL",0,0,"2025-07-27 14:14:09.910876: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6364,17735138,"TERMINAL",0,0,"2025-07-27 14:14:14.939424: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6365,17735810,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(332)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +6366,17739725,"TERMINAL",0,0,"m",,terminal_output +6367,17739824,"TERMINAL",0,0,"as",,terminal_output +6368,17739960,"TERMINAL",0,0,"k",,terminal_output +6369,17740190,"TERMINAL",0,0,"_",,terminal_output +6370,17740402,"TERMINAL",0,0,"4",,terminal_output +6371,17740677,"TERMINAL",0,0,"d",,terminal_output +6372,17741155,"TERMINAL",0,0,".",,terminal_output +6373,17741299,"TERMINAL",0,0,"s",,terminal_output +6374,17741415,"TERMINAL",0,0,"h",,terminal_output +6375,17741504,"TERMINAL",0,0,"a",,terminal_output +6376,17741644,"TERMINAL",0,0,"p",,terminal_output +6377,17741744,"TERMINAL",0,0,"e",,terminal_output +6378,17741852,"TERMINAL",0,0,"\r\n(Pdb) (921, 4, 1, 1)\r\n",,terminal_output +6379,17744867,"TERMINAL",0,0,"q",,terminal_output +6380,17744998,"TERMINAL",0,0,"u",,terminal_output +6381,17745058,"TERMINAL",0,0,"e",,terminal_output +6382,17745169,"TERMINAL",0,0,"r",,terminal_output +6383,17745503,"TERMINAL",0,0,"y",,terminal_output +6384,17745755,"TERMINAL",0,0,"_",,terminal_output +6385,17746105,"TERMINAL",0,0,"4",,terminal_output +6386,17746424,"TERMINAL",0,0,"d",,terminal_output +6387,17746603,"TERMINAL",0,0,".",,terminal_output +6388,17746701,"TERMINAL",0,0,"s",,terminal_output +6389,17746812,"TERMINAL",0,0,"h",,terminal_output +6390,17746867,"TERMINAL",0,0,"a",,terminal_output +6391,17746986,"TERMINAL",0,0,"p",,terminal_output +6392,17747054,"TERMINAL",0,0,"e",,terminal_output +6393,17747187,"TERMINAL",0,0,"\r\n(Pdb) (921, 4, 8, 64)\r\n",,terminal_output +6394,17748467,"utils/nn.py",10238,0,"",python,selection_command +6395,17748681,"utils/nn.py",10193,0,"",python,selection_command +6396,17748712,"utils/nn.py",10150,0,"",python,selection_command +6397,17748745,"utils/nn.py",10121,0,"",python,selection_command +6398,17748781,"utils/nn.py",10028,0,"",python,selection_command +6399,17748816,"utils/nn.py",10027,0,"",python,selection_command +6400,17748848,"utils/nn.py",9951,0,"",python,selection_command +6401,17748882,"utils/nn.py",9875,0,"",python,selection_command +6402,17748914,"utils/nn.py",9790,0,"",python,selection_command +6403,17748950,"utils/nn.py",9789,0,"",python,selection_command +6404,17748983,"utils/nn.py",9746,0,"",python,selection_command +6405,17749125,"utils/nn.py",9707,0,"",python,selection_command +6406,17749334,"utils/nn.py",9746,0,"",python,selection_command +6407,17749648,"utils/nn.py",9788,0,"\n ",python,content +6408,17751217,"utils/nn.py",9797,0,"#",python,content +6409,17751217,"utils/nn.py",9798,0,"",python,selection_keyboard +6410,17751648,"utils/nn.py",9798,0," ",python,content +6411,17751648,"utils/nn.py",9799,0,"",python,selection_keyboard +6412,17752581,"utils/nn.py",9799,0,"query_4d.shape (921, 4, 1, 8, 64)",python,content +6413,17752794,"utils/nn.py",9831,0,"",python,selection_command +6414,17754312,"utils/nn.py",9829,0,"",python,selection_command +6415,17754487,"utils/nn.py",9827,0,"",python,selection_command +6416,17754611,"utils/nn.py",9826,0,"",python,selection_command +6417,17754750,"utils/nn.py",9824,0,"",python,selection_command +6418,17754887,"utils/nn.py",9826,0,"",python,selection_command +6419,17755044,"utils/nn.py",9827,0,"",python,selection_command +6420,17755601,"utils/nn.py",9789,0,"",python,selection_command +6421,17808295,"TERMINAL",0,0,"c",,terminal_output +6422,17808833,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 335, in attention_fn\r\n # NOTE: jax.nn.dot_product_attention does not support dropout\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 1212, in dot_product_attention\r\n out = _dot_product_attention_xla(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 968, in _dot_product_attention_xla\r\n mask = _reshape_to_grouped(mask)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 964, in _reshape_to_grouped\r\n assert tN == N\r\nAssertionError\r\n",,terminal_output +6423,17809710,"TERMINAL",0,0,"(Pdb) ",,terminal_output +6424,17809931,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +6425,17811974,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +6426,17811974,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27735,0,"",python,selection_command +6427,17817326,"utils/nn.py",0,0,"",python,tab +6428,17820104,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +6429,17822310,"utils/nn.py",0,0,"",python,tab +6430,17822948,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +6431,17824996,"utils/nn.py",0,0,"",python,tab +6432,17827114,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +6433,17828553,"utils/nn.py",0,0,"",python,tab +6434,17851849,"utils/nn.py",9797,0,"",python,selection_command +6435,17852104,"utils/nn.py",9799,0,"",python,selection_command +6436,17852131,"utils/nn.py",9807,0,"",python,selection_command +6437,17852157,"utils/nn.py",9808,0,"",python,selection_command +6438,17852192,"utils/nn.py",9814,0,"",python,selection_command +6439,17852226,"utils/nn.py",9815,0,"",python,selection_command +6440,17852259,"utils/nn.py",9818,0,"",python,selection_command +6441,17852292,"utils/nn.py",9820,0,"",python,selection_command +6442,17852570,"utils/nn.py",9821,0,"",python,selection_command +6443,17852920,"utils/nn.py",9823,0,"",python,selection_command +6444,17853810,"utils/nn.py",9823,1,"",python,content +6445,17853950,"utils/nn.py",9823,1,"",python,content +6446,17854295,"utils/nn.py",9823,1,"",python,content +6447,17865642,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +6448,17868045,"utils/nn.py",0,0,"",python,tab +6449,17878315,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +6450,17879751,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27723,0,"",python,selection_command +6451,17879989,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27652,0,"",python,selection_command +6452,17880027,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27634,0,"",python,selection_command +6453,17880052,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,0,"",python,selection_command +6454,17880086,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27581,0,"",python,selection_command +6455,17880122,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27551,0,"",python,selection_command +6456,17880156,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27505,0,"",python,selection_command +6457,17880190,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27504,0,"",python,selection_command +6458,17880224,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27491,0,"",python,selection_command +6459,17880257,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27466,0,"",python,selection_command +6460,17880290,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27439,0,"",python,selection_command +6461,17880524,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27466,0,"",python,selection_command +6462,17880785,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27491,0,"",python,selection_command +6463,17880802,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27504,0,"",python,selection_command +6464,17880824,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27505,0,"",python,selection_command +6465,17880858,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27551,0,"",python,selection_command +6466,17880892,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27581,0,"",python,selection_command +6467,17880925,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,0,"",python,selection_command +6468,17881437,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27581,0,"",python,selection_command +6469,17881611,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,0,"",python,selection_command +6470,17885261,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27634,0,"",python,selection_command +6471,17885525,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27652,0,"",python,selection_command +6472,17885535,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27723,0,"",python,selection_command +6473,17885570,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27735,0,"",python,selection_command +6474,17896085,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27723,0,"",python,selection_command +6475,17896333,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27652,0,"",python,selection_command +6476,17896348,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27634,0,"",python,selection_command +6477,17896373,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,0,"",python,selection_command +6478,17896404,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27581,0,"",python,selection_command +6479,17896768,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,0,"",python,selection_command +6480,17896937,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27634,0,"",python,selection_command +6481,17897236,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,0,"",python,selection_command +6482,17897351,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27609,0,"",python,selection_command +6483,17897607,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27611,0,"",python,selection_command +6484,17897633,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27613,0,"",python,selection_command +6485,17897657,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27615,0,"",python,selection_command +6486,17897689,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27617,0,"",python,selection_command +6487,17897886,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27619,0,"",python,selection_command +6488,17898089,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27621,0,"",python,selection_command +6489,17898222,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27624,0,"",python,selection_command +6490,17898398,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27626,0,"",python,selection_command +6491,17901428,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27588,0,"",python,selection_command +6492,17901803,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27577,0,"",python,selection_command +6493,17902174,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27538,0,"",python,selection_command +6494,17903446,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27444,0,"",python,selection_command +6495,17904014,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",25973,0,"",python,selection_command +6496,17905298,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27444,0,"",python,selection_command +6497,17906791,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27538,0,"",python,selection_command +6498,17907306,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27579,0,"",python,selection_command +6499,17908058,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27578,0,"",python,selection_command +6500,17908463,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27577,0,"",python,selection_command +6501,17909068,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27576,0,"",python,selection_command +6502,17909428,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27557,0,"",python,selection_command +6503,17910364,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27587,0,"",python,selection_command +6504,17910614,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27609,0,"",python,selection_command +6505,17910628,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27640,0,"",python,selection_command +6506,17910659,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27658,0,"",python,selection_command +6507,17910693,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27729,0,"",python,selection_command +6508,17910724,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27741,0,"",python,selection_command +6509,17910756,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27764,0,"",python,selection_command +6510,17910788,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27811,0,"",python,selection_command +6511,17910957,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27824,0,"",python,selection_command +6512,17911182,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27859,0,"",python,selection_command +6513,17911274,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27860,0,"",python,selection_command +6514,17911379,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27825,0,"",python,selection_command +6515,17911677,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27827,0,"",python,selection_command +6516,17911963,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27846,0,"",python,selection_command +6517,17914165,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27853,0,"",python,selection_command +6518,17916249,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27735,0,"",python,selection_command +6519,17918629,"utils/nn.py",0,0,"",python,tab +6520,17919355,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +6521,17920034,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27723,0,"",python,selection_command +6522,17920168,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27652,0,"",python,selection_command +6523,17920315,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27634,0,"",python,selection_command +6524,17920443,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,0,"",python,selection_command +6525,17920620,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27609,0,"",python,selection_command +6526,17921404,"utils/nn.py",0,0,"",python,tab +6527,17922503,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +6528,17926516,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27611,0,"",python,selection_command +6529,17926607,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27613,0,"",python,selection_command +6530,17927761,"utils/nn.py",0,0,"",python,tab +6531,17950936,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +6532,17952239,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27644,0,"",python,selection_command +6533,17952463,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27662,0,"",python,selection_command +6534,17952623,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27733,0,"",python,selection_command +6535,17952749,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27745,0,"",python,selection_command +6536,17952894,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27768,0,"",python,selection_command +6537,17953186,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27745,0,"",python,selection_command +6538,17956311,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27735,22," assert tN == N",python,selection_command +6539,17980986,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27745,0,"",python,selection_command +6540,18753073,"TERMINAL",0,0,"salloc: Job 13998 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T14:31:11.013] error: *** STEP 13998.interactive ON hai007 CANCELLED AT 2025-07-27T14:31:11 DUE TO TIME LIMIT ***\r\n",,terminal_output +6541,18813068,"TERMINAL",0,0,"srun: error: hai007: task 0: Killed\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +6542,19084534,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27625,0,"",python,selection_mouse +6543,19086123,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,30," tB, tN, tT, tS = t.shape",python,selection_command +6544,19087664,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27625,0,"",python,selection_command +6545,19089778,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,30," tB, tN, tT, tS = t.shape",python,selection_command +6546,19089992,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,48," tB, tN, tT, tS = t.shape\n if tN == 1:",python,selection_command +6547,19090172,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,119," tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))",python,selection_command +6548,19090307,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,131," tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:",python,selection_command +6549,19090443,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,154," tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N",python,selection_command +6550,19090588,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27603,201," tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))",python,selection_command +6551,19094043,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27780,0,"",python,selection_command +6552,19117129,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27756,0,"",python,selection_command +6553,19117378,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27733,0,"",python,selection_command +6554,19117405,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27674,0,"",python,selection_command +6555,19117435,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27650,0,"",python,selection_command +6556,19117468,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27625,0,"",python,selection_command +6557,19117502,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27601,0,"",python,selection_command +6558,19117535,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27573,0,"",python,selection_command +6559,19117567,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27527,0,"",python,selection_command +6560,19117601,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27504,0,"",python,selection_command +6561,19117633,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27502,0,"",python,selection_command +6562,19117667,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27488,0,"",python,selection_command +6563,19117700,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27461,0,"",python,selection_command +6564,19117735,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27438,0,"",python,selection_command +6565,19117770,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27412,0,"",python,selection_command +6566,19117801,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27383,0,"",python,selection_command +6567,19117835,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27355,0,"",python,selection_command +6568,19117868,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27331,0,"",python,selection_command +6569,19117901,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27313,0,"",python,selection_command +6570,19117934,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27292,0,"",python,selection_command +6571,19117968,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27268,0,"",python,selection_command +6572,19118001,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27244,0,"",python,selection_command +6573,19118034,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27226,0,"",python,selection_command +6574,19118068,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27210,0,"",python,selection_command +6575,19118100,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27184,0,"",python,selection_command +6576,19118134,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27161,0,"",python,selection_command +6577,19118445,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27184,0,"",python,selection_command +6578,19118719,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,31,"def _dot_product_attention_xla(",python,selection_command +6579,19119646,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,276,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n",python,selection_command +6580,19119782,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,342,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n",python,selection_command +6581,19120030,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,1074,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n",python,selection_command +6582,19120057,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,1530,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n",python,selection_command +6583,19120088,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2037,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n",python,selection_command +6584,19120115,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2190,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n",python,selection_command +6585,19120150,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2362,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n",python,selection_command +6586,19120182,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2654,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n",python,selection_command +6587,19120215,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2886,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n",python,selection_command +6588,19120262,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,3446,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n\ndef bias_fwd_batch_rule(batched_args, batch_dims):\n x, query_head_num = batched_args\n a = batch_dims[0]\n output, _ = bias_fwd_rule(x, query_head_num)\n return output, a\ndef bias_bwd_batch_rule(batched_args, batch_dims):\n g, x, query_head_num = batched_args\n b = batch_dims[0]\n *Bs, _, _, _ = x.shape\n B = math.prod(Bs)\n x = jnp.reshape(x, (B,) + x.shape[-3:])\n output, = bias_bwd_rule(query_head_num, x, g)\n return output, b\nbatching.primitive_batchers[bias_fwd_p] = bias_fwd_batch_rule\nbatching.primitive_batchers[bias_bwd_p] = bias_bwd_batch_rule\n",python,selection_command +6589,19120292,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,3950,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n\ndef bias_fwd_batch_rule(batched_args, batch_dims):\n x, query_head_num = batched_args\n a = batch_dims[0]\n output, _ = bias_fwd_rule(x, query_head_num)\n return output, a\ndef bias_bwd_batch_rule(batched_args, batch_dims):\n g, x, query_head_num = batched_args\n b = batch_dims[0]\n *Bs, _, _, _ = x.shape\n B = math.prod(Bs)\n x = jnp.reshape(x, (B,) + x.shape[-3:])\n output, = bias_bwd_rule(query_head_num, x, g)\n return output, b\nbatching.primitive_batchers[bias_fwd_p] = bias_fwd_batch_rule\nbatching.primitive_batchers[bias_bwd_p] = bias_bwd_batch_rule\n\ndef dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n",python,selection_command +6590,19120330,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,4019,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n\ndef bias_fwd_batch_rule(batched_args, batch_dims):\n x, query_head_num = batched_args\n a = batch_dims[0]\n output, _ = bias_fwd_rule(x, query_head_num)\n return output, a\ndef bias_bwd_batch_rule(batched_args, batch_dims):\n g, x, query_head_num = batched_args\n b = batch_dims[0]\n *Bs, _, _, _ = x.shape\n B = math.prod(Bs)\n x = jnp.reshape(x, (B,) + x.shape[-3:])\n output, = bias_bwd_rule(query_head_num, x, g)\n return output, b\nbatching.primitive_batchers[bias_fwd_p] = bias_fwd_batch_rule\nbatching.primitive_batchers[bias_bwd_p] = bias_bwd_batch_rule\n\ndef dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n",python,selection_command +6591,19120361,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,4032,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n\ndef bias_fwd_batch_rule(batched_args, batch_dims):\n x, query_head_num = batched_args\n a = batch_dims[0]\n output, _ = bias_fwd_rule(x, query_head_num)\n return output, a\ndef bias_bwd_batch_rule(batched_args, batch_dims):\n g, x, query_head_num = batched_args\n b = batch_dims[0]\n *Bs, _, _, _ = x.shape\n B = math.prod(Bs)\n x = jnp.reshape(x, (B,) + x.shape[-3:])\n output, = bias_bwd_rule(query_head_num, x, g)\n return output, b\nbatching.primitive_batchers[bias_fwd_p] = bias_fwd_batch_rule\nbatching.primitive_batchers[bias_bwd_p] = bias_bwd_batch_rule\n\ndef dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n",python,selection_command +6592,19120876,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,4019,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n\ndef bias_fwd_batch_rule(batched_args, batch_dims):\n x, query_head_num = batched_args\n a = batch_dims[0]\n output, _ = bias_fwd_rule(x, query_head_num)\n return output, a\ndef bias_bwd_batch_rule(batched_args, batch_dims):\n g, x, query_head_num = batched_args\n b = batch_dims[0]\n *Bs, _, _, _ = x.shape\n B = math.prod(Bs)\n x = jnp.reshape(x, (B,) + x.shape[-3:])\n output, = bias_bwd_rule(query_head_num, x, g)\n return output, b\nbatching.primitive_batchers[bias_fwd_p] = bias_fwd_batch_rule\nbatching.primitive_batchers[bias_bwd_p] = bias_bwd_batch_rule\n\ndef dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n",python,selection_command +6593,19121131,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,3950,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n\ndef bias_fwd_batch_rule(batched_args, batch_dims):\n x, query_head_num = batched_args\n a = batch_dims[0]\n output, _ = bias_fwd_rule(x, query_head_num)\n return output, a\ndef bias_bwd_batch_rule(batched_args, batch_dims):\n g, x, query_head_num = batched_args\n b = batch_dims[0]\n *Bs, _, _, _ = x.shape\n B = math.prod(Bs)\n x = jnp.reshape(x, (B,) + x.shape[-3:])\n output, = bias_bwd_rule(query_head_num, x, g)\n return output, b\nbatching.primitive_batchers[bias_fwd_p] = bias_fwd_batch_rule\nbatching.primitive_batchers[bias_bwd_p] = bias_bwd_batch_rule\n\ndef dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n",python,selection_command +6594,19121156,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,3446,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n\ndef bias_fwd_batch_rule(batched_args, batch_dims):\n x, query_head_num = batched_args\n a = batch_dims[0]\n output, _ = bias_fwd_rule(x, query_head_num)\n return output, a\ndef bias_bwd_batch_rule(batched_args, batch_dims):\n g, x, query_head_num = batched_args\n b = batch_dims[0]\n *Bs, _, _, _ = x.shape\n B = math.prod(Bs)\n x = jnp.reshape(x, (B,) + x.shape[-3:])\n output, = bias_bwd_rule(query_head_num, x, g)\n return output, b\nbatching.primitive_batchers[bias_fwd_p] = bias_fwd_batch_rule\nbatching.primitive_batchers[bias_bwd_p] = bias_bwd_batch_rule\n",python,selection_command +6595,19121183,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2886,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n",python,selection_command +6596,19121215,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2654,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n",python,selection_command +6597,19121249,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2362,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n",python,selection_command +6598,19121283,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2190,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n",python,selection_command +6599,19121317,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2037,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n",python,selection_command +6600,19121699,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2190,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n",python,selection_command +6601,19122641,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,2037,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(jax.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n",python,selection_command +6602,19123000,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,1530,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n",python,selection_command +6603,19123789,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,1074,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n",python,selection_command +6604,19123921,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,342,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n",python,selection_command +6605,19124169,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,276,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n",python,selection_command +6606,19124829,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27161,32,"\ndef _dot_product_attention_xla(",python,selection_command +6607,19125488,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,276,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n",python,selection_command +6608,19125860,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,342,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n",python,selection_command +6609,19127664,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,1074,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n",python,selection_command +6610,19127872,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,1530,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n",python,selection_command +6611,19128495,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,1074,"def _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = jax.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n",python,selection_command +6612,19135011,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",28236,0,"",python,selection_command +6613,19138101,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",28692,0,"",python,selection_command +6614,19138408,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27504,0,"",python,selection_command +6615,19138571,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27438,0,"",python,selection_command +6616,19138818,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27161,0,"",python,selection_command +6617,19139051,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",29199,0,"",python,selection_command +6618,19139451,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",29352,0,"",python,selection_command +6619,19139623,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",29524,0,"",python,selection_command +6620,19139807,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",29816,0,"",python,selection_command +6621,19139982,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30048,0,"",python,selection_command +6622,19140130,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30608,0,"",python,selection_command +6623,19140296,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31112,0,"",python,selection_command +6624,19145754,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30608,0,"",python,selection_command +6625,19148490,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30048,0,"",python,selection_command +6626,19148951,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",29816,0,"",python,selection_command +6627,19149677,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",29524,0,"",python,selection_command +6628,19150180,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",29352,0,"",python,selection_command +6629,19150631,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",29199,0,"",python,selection_command +6630,19150855,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27161,0,"",python,selection_command +6631,19151014,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27438,0,"",python,selection_command +6632,19152225,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27504,0,"",python,selection_command +6633,19155473,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27491,0,"",python,selection_command +6634,19155724,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27466,0,"",python,selection_command +6635,19155752,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27439,0,"",python,selection_command +6636,19155771,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27438,0,"",python,selection_command +6637,19155806,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27390,0,"",python,selection_command +6638,19155839,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27361,0,"",python,selection_command +6639,19155876,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27333,0,"",python,selection_command +6640,19155911,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27315,0,"",python,selection_command +6641,19155944,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27294,0,"",python,selection_command +6642,19155978,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27270,0,"",python,selection_command +6643,19156011,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27246,0,"",python,selection_command +6644,19156043,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27228,0,"",python,selection_command +6645,19156205,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27212,0,"",python,selection_command +6646,19156344,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27194,0,"",python,selection_command +6647,19156508,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27162,0,"",python,selection_command +6648,19156640,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27166,0,"",python,selection_command +6649,19161897,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36353,26,"_dot_product_attention_xla",python,selection_command +6650,19162297,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36378,0,"",python,selection_command +6651,19164532,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34257,0,"",python,selection_command +6652,19165076,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31502,0,"",python,selection_command +6653,19165317,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",29866,0,"",python,selection_command +6654,19166059,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31502,0,"",python,selection_command +6655,19166498,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31497,0,"",python,selection_command +6656,19166734,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31468,0,"",python,selection_command +6657,19166791,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31394,0,"",python,selection_command +6658,19166794,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31389,0,"",python,selection_command +6659,19166828,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31343,0,"",python,selection_command +6660,19166854,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31275,0,"",python,selection_command +6661,19166922,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31270,0,"",python,selection_command +6662,19166927,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31199,0,"",python,selection_command +6663,19166955,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31194,0,"",python,selection_command +6664,19166994,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31186,0,"",python,selection_command +6665,19167052,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31181,0,"",python,selection_command +6666,19167061,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31117,0,"",python,selection_command +6667,19167088,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31112,0,"",python,selection_command +6668,19167181,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31071,0,"",python,selection_command +6669,19167182,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31002,0,"",python,selection_command +6670,19167208,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30942,0,"",python,selection_command +6671,19167233,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30890,0,"",python,selection_command +6672,19167253,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30842,0,"",python,selection_command +6673,19167312,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30813,0,"",python,selection_command +6674,19167319,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30781,0,"",python,selection_command +6675,19167356,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30774,0,"",python,selection_command +6676,19167442,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30739,0,"",python,selection_command +6677,19167442,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30704,0,"",python,selection_command +6678,19167454,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30682,0,"",python,selection_command +6679,19167615,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30662,0,"",python,selection_command +6680,19167786,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30640,0,"",python,selection_command +6681,19167931,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30613,0,"",python,selection_command +6682,19168224,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,26,"def dot_product_attention(",python,selection_command +6683,19168334,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,503,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n",python,selection_command +6684,19168462,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,572,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n",python,selection_command +6685,19168775,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,585,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n",python,selection_command +6686,19169035,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,661,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n",python,selection_command +6687,19169047,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,780,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n",python,selection_command +6688,19169161,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,888,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n",python,selection_command +6689,19169566,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,1145,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n",python,selection_command +6690,19169688,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,3553,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n",python,selection_command +6691,19169942,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,3862,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n",python,selection_command +6692,19169969,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,4365,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n",python,selection_command +6693,19169999,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,4927,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n\n def _check_shape_and_dtype(t: Array | None, shape: Sequence[int],\n dtype: DType | None, name: str) -> None:\n if t is None:\n return\n if t.ndim != len(shape):\n raise ValueError(f""{name} ndim should be {len(shape)}, but got {t.ndim}"")\n if dtype is not None and t.dtype != dtype:\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\n for i in range(t.ndim):\n if shape[i] != -1 and t.shape[i] != shape[i]:\n raise ValueError(f""{name} shape should be {shape}: but got {t.shape}"")\n",python,selection_command +6694,19170038,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,5629,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n\n def _check_shape_and_dtype(t: Array | None, shape: Sequence[int],\n dtype: DType | None, name: str) -> None:\n if t is None:\n return\n if t.ndim != len(shape):\n raise ValueError(f""{name} ndim should be {len(shape)}, but got {t.ndim}"")\n if dtype is not None and t.dtype != dtype:\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\n for i in range(t.ndim):\n if shape[i] != -1 and t.shape[i] != shape[i]:\n raise ValueError(f""{name} shape should be {shape}: but got {t.shape}"")\n\n B, S, K, H = key_arr.shape\n _check_shape_and_dtype(value_arr, [B, S, K, H], key_arr.dtype, 'value')\n _check_shape_and_dtype(query_arr, [B, -1, -1, H], key_arr.dtype, 'query')\n _check_shape_and_dtype(mask, [-1] * 4, jnp.bool_, 'mask')\n _check_shape_and_dtype(bias, [-1] * 4, None, 'bias')\n _check_shape_and_dtype(query_seq_lengths, [B], jnp.int32,\n 'query_seq_lengths')\n _check_shape_and_dtype(key_value_seq_lengths, [B], jnp.int32,\n 'key_value_seq_lengths')\n if query_arr.shape[-2] % K != 0:\n raise ValueError(f""The number of query heads must be a multiple of ""\n f""key/value heads, but got {query_arr.shape[-2]} vs {K}"")\n",python,selection_command +6695,19170457,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,5691,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n\n def _check_shape_and_dtype(t: Array | None, shape: Sequence[int],\n dtype: DType | None, name: str) -> None:\n if t is None:\n return\n if t.ndim != len(shape):\n raise ValueError(f""{name} ndim should be {len(shape)}, but got {t.ndim}"")\n if dtype is not None and t.dtype != dtype:\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\n for i in range(t.ndim):\n if shape[i] != -1 and t.shape[i] != shape[i]:\n raise ValueError(f""{name} shape should be {shape}: but got {t.shape}"")\n\n B, S, K, H = key_arr.shape\n _check_shape_and_dtype(value_arr, [B, S, K, H], key_arr.dtype, 'value')\n _check_shape_and_dtype(query_arr, [B, -1, -1, H], key_arr.dtype, 'query')\n _check_shape_and_dtype(mask, [-1] * 4, jnp.bool_, 'mask')\n _check_shape_and_dtype(bias, [-1] * 4, None, 'bias')\n _check_shape_and_dtype(query_seq_lengths, [B], jnp.int32,\n 'query_seq_lengths')\n _check_shape_and_dtype(key_value_seq_lengths, [B], jnp.int32,\n 'key_value_seq_lengths')\n if query_arr.shape[-2] % K != 0:\n raise ValueError(f""The number of query heads must be a multiple of ""\n f""key/value heads, but got {query_arr.shape[-2]} vs {K}"")\n\n scale_val = (1.0 / np.sqrt(H)) if scale is None else scale\n",python,selection_command +6696,19171146,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,6519,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n\n def _check_shape_and_dtype(t: Array | None, shape: Sequence[int],\n dtype: DType | None, name: str) -> None:\n if t is None:\n return\n if t.ndim != len(shape):\n raise ValueError(f""{name} ndim should be {len(shape)}, but got {t.ndim}"")\n if dtype is not None and t.dtype != dtype:\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\n for i in range(t.ndim):\n if shape[i] != -1 and t.shape[i] != shape[i]:\n raise ValueError(f""{name} shape should be {shape}: but got {t.shape}"")\n\n B, S, K, H = key_arr.shape\n _check_shape_and_dtype(value_arr, [B, S, K, H], key_arr.dtype, 'value')\n _check_shape_and_dtype(query_arr, [B, -1, -1, H], key_arr.dtype, 'query')\n _check_shape_and_dtype(mask, [-1] * 4, jnp.bool_, 'mask')\n _check_shape_and_dtype(bias, [-1] * 4, None, 'bias')\n _check_shape_and_dtype(query_seq_lengths, [B], jnp.int32,\n 'query_seq_lengths')\n _check_shape_and_dtype(key_value_seq_lengths, [B], jnp.int32,\n 'key_value_seq_lengths')\n if query_arr.shape[-2] % K != 0:\n raise ValueError(f""The number of query heads must be a multiple of ""\n f""key/value heads, but got {query_arr.shape[-2]} vs {K}"")\n\n scale_val = (1.0 / np.sqrt(H)) if scale is None else scale\n\n match implementation:\n case 'xla':\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case 'cudnn':\n if bias is not None:\n bias = check_valid_bias_batch(bias, query_arr.shape[-2])\n bias = jnp.asarray(bias)\n use_padding = (\n query_seq_lengths is not None or key_value_seq_lengths is not None\n )\n if use_padding:\n if query_seq_lengths is None:\n T = query_arr.shape[1]\n query_seq_lengths = jnp.full((B,), T, dtype=jnp.int32)\n if key_value_seq_lengths is None:\n key_value_seq_lengths = jnp.full((B,), S, dtype=jnp.int32)\n",python,selection_command +6697,19171411,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,7227,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n\n def _check_shape_and_dtype(t: Array | None, shape: Sequence[int],\n dtype: DType | None, name: str) -> None:\n if t is None:\n return\n if t.ndim != len(shape):\n raise ValueError(f""{name} ndim should be {len(shape)}, but got {t.ndim}"")\n if dtype is not None and t.dtype != dtype:\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\n for i in range(t.ndim):\n if shape[i] != -1 and t.shape[i] != shape[i]:\n raise ValueError(f""{name} shape should be {shape}: but got {t.shape}"")\n\n B, S, K, H = key_arr.shape\n _check_shape_and_dtype(value_arr, [B, S, K, H], key_arr.dtype, 'value')\n _check_shape_and_dtype(query_arr, [B, -1, -1, H], key_arr.dtype, 'query')\n _check_shape_and_dtype(mask, [-1] * 4, jnp.bool_, 'mask')\n _check_shape_and_dtype(bias, [-1] * 4, None, 'bias')\n _check_shape_and_dtype(query_seq_lengths, [B], jnp.int32,\n 'query_seq_lengths')\n _check_shape_and_dtype(key_value_seq_lengths, [B], jnp.int32,\n 'key_value_seq_lengths')\n if query_arr.shape[-2] % K != 0:\n raise ValueError(f""The number of query heads must be a multiple of ""\n f""key/value heads, but got {query_arr.shape[-2]} vs {K}"")\n\n scale_val = (1.0 / np.sqrt(H)) if scale is None else scale\n\n match implementation:\n case 'xla':\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case 'cudnn':\n if bias is not None:\n bias = check_valid_bias_batch(bias, query_arr.shape[-2])\n bias = jnp.asarray(bias)\n use_padding = (\n query_seq_lengths is not None or key_value_seq_lengths is not None\n )\n if use_padding:\n if query_seq_lengths is None:\n T = query_arr.shape[1]\n query_seq_lengths = jnp.full((B,), T, dtype=jnp.int32)\n if key_value_seq_lengths is None:\n key_value_seq_lengths = jnp.full((B,), S, dtype=jnp.int32)\n\n mask_type = MaskType.NO_MASK\n if use_padding and is_causal:\n mask_type = MaskType.PADDING_CAUSAL\n elif is_causal:\n mask_type = MaskType.CAUSAL\n elif use_padding:\n mask_type = MaskType.PADDING\n # CuDNN supports only the left window with an exclusive boundary when\n # causal mask is enabled.\n sliding_window = None\n if local_window_size is not None:\n l_window, r_window = local_window_size\n if r_window == 0 or mask_type == MaskType.CAUSAL:\n sliding_window = l_window + 1\n else:\n raise ValueError(f""cuDNN doesn't support right window: {r_window} ""\n ""when causal mask is not used."")\n",python,selection_command +6698,19171924,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,7942,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n\n def _check_shape_and_dtype(t: Array | None, shape: Sequence[int],\n dtype: DType | None, name: str) -> None:\n if t is None:\n return\n if t.ndim != len(shape):\n raise ValueError(f""{name} ndim should be {len(shape)}, but got {t.ndim}"")\n if dtype is not None and t.dtype != dtype:\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\n for i in range(t.ndim):\n if shape[i] != -1 and t.shape[i] != shape[i]:\n raise ValueError(f""{name} shape should be {shape}: but got {t.shape}"")\n\n B, S, K, H = key_arr.shape\n _check_shape_and_dtype(value_arr, [B, S, K, H], key_arr.dtype, 'value')\n _check_shape_and_dtype(query_arr, [B, -1, -1, H], key_arr.dtype, 'query')\n _check_shape_and_dtype(mask, [-1] * 4, jnp.bool_, 'mask')\n _check_shape_and_dtype(bias, [-1] * 4, None, 'bias')\n _check_shape_and_dtype(query_seq_lengths, [B], jnp.int32,\n 'query_seq_lengths')\n _check_shape_and_dtype(key_value_seq_lengths, [B], jnp.int32,\n 'key_value_seq_lengths')\n if query_arr.shape[-2] % K != 0:\n raise ValueError(f""The number of query heads must be a multiple of ""\n f""key/value heads, but got {query_arr.shape[-2]} vs {K}"")\n\n scale_val = (1.0 / np.sqrt(H)) if scale is None else scale\n\n match implementation:\n case 'xla':\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case 'cudnn':\n if bias is not None:\n bias = check_valid_bias_batch(bias, query_arr.shape[-2])\n bias = jnp.asarray(bias)\n use_padding = (\n query_seq_lengths is not None or key_value_seq_lengths is not None\n )\n if use_padding:\n if query_seq_lengths is None:\n T = query_arr.shape[1]\n query_seq_lengths = jnp.full((B,), T, dtype=jnp.int32)\n if key_value_seq_lengths is None:\n key_value_seq_lengths = jnp.full((B,), S, dtype=jnp.int32)\n\n mask_type = MaskType.NO_MASK\n if use_padding and is_causal:\n mask_type = MaskType.PADDING_CAUSAL\n elif is_causal:\n mask_type = MaskType.CAUSAL\n elif use_padding:\n mask_type = MaskType.PADDING\n # CuDNN supports only the left window with an exclusive boundary when\n # causal mask is enabled.\n sliding_window = None\n if local_window_size is not None:\n l_window, r_window = local_window_size\n if r_window == 0 or mask_type == MaskType.CAUSAL:\n sliding_window = l_window + 1\n else:\n raise ValueError(f""cuDNN doesn't support right window: {r_window} ""\n ""when causal mask is not used."")\n\n out = cudnn_dot_product_attention(\n query_arr, key_arr, value_arr, bias, mask, query_seq_lengths,\n key_value_seq_lengths, scale=scale_val, mask_type=mask_type,\n sliding_window_length=sliding_window,\n )\n case None:\n # TODO(kaixih@nvidia) Defaults to XLA for now. Will automatically select\n # best backend.\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case _:\n raise ValueError(f""Unsupported implementation option: {implementation}"")\n",python,selection_command +6699,19174996,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30609,7982,"def dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n\n def _check_shape_and_dtype(t: Array | None, shape: Sequence[int],\n dtype: DType | None, name: str) -> None:\n if t is None:\n return\n if t.ndim != len(shape):\n raise ValueError(f""{name} ndim should be {len(shape)}, but got {t.ndim}"")\n if dtype is not None and t.dtype != dtype:\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\n for i in range(t.ndim):\n if shape[i] != -1 and t.shape[i] != shape[i]:\n raise ValueError(f""{name} shape should be {shape}: but got {t.shape}"")\n\n B, S, K, H = key_arr.shape\n _check_shape_and_dtype(value_arr, [B, S, K, H], key_arr.dtype, 'value')\n _check_shape_and_dtype(query_arr, [B, -1, -1, H], key_arr.dtype, 'query')\n _check_shape_and_dtype(mask, [-1] * 4, jnp.bool_, 'mask')\n _check_shape_and_dtype(bias, [-1] * 4, None, 'bias')\n _check_shape_and_dtype(query_seq_lengths, [B], jnp.int32,\n 'query_seq_lengths')\n _check_shape_and_dtype(key_value_seq_lengths, [B], jnp.int32,\n 'key_value_seq_lengths')\n if query_arr.shape[-2] % K != 0:\n raise ValueError(f""The number of query heads must be a multiple of ""\n f""key/value heads, but got {query_arr.shape[-2]} vs {K}"")\n\n scale_val = (1.0 / np.sqrt(H)) if scale is None else scale\n\n match implementation:\n case 'xla':\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case 'cudnn':\n if bias is not None:\n bias = check_valid_bias_batch(bias, query_arr.shape[-2])\n bias = jnp.asarray(bias)\n use_padding = (\n query_seq_lengths is not None or key_value_seq_lengths is not None\n )\n if use_padding:\n if query_seq_lengths is None:\n T = query_arr.shape[1]\n query_seq_lengths = jnp.full((B,), T, dtype=jnp.int32)\n if key_value_seq_lengths is None:\n key_value_seq_lengths = jnp.full((B,), S, dtype=jnp.int32)\n\n mask_type = MaskType.NO_MASK\n if use_padding and is_causal:\n mask_type = MaskType.PADDING_CAUSAL\n elif is_causal:\n mask_type = MaskType.CAUSAL\n elif use_padding:\n mask_type = MaskType.PADDING\n # CuDNN supports only the left window with an exclusive boundary when\n # causal mask is enabled.\n sliding_window = None\n if local_window_size is not None:\n l_window, r_window = local_window_size\n if r_window == 0 or mask_type == MaskType.CAUSAL:\n sliding_window = l_window + 1\n else:\n raise ValueError(f""cuDNN doesn't support right window: {r_window} ""\n ""when causal mask is not used."")\n\n out = cudnn_dot_product_attention(\n query_arr, key_arr, value_arr, bias, mask, query_seq_lengths,\n key_value_seq_lengths, scale=scale_val, mask_type=mask_type,\n sliding_window_length=sliding_window,\n )\n case None:\n # TODO(kaixih@nvidia) Defaults to XLA for now. Will automatically select\n # best backend.\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case _:\n raise ValueError(f""Unsupported implementation option: {implementation}"")\n\n return jnp.reshape(out, output_shape)",python,selection_command +6700,19175610,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",38552,0,"",python,selection_command +6701,19181182,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36612,0,"",python,selection_command +6702,19181504,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",34462,0,"",python,selection_command +6703,19181953,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31709,0,"",python,selection_command +6704,19182646,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31675,0,"",python,selection_command +6705,19182898,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31633,0,"",python,selection_command +6706,19182924,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31599,0,"",python,selection_command +6707,19182950,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31562,0,"",python,selection_command +6708,19182985,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31521,0,"",python,selection_command +6709,19183019,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31502,0,"",python,selection_command +6710,19183059,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31497,0,"",python,selection_command +6711,19183092,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31468,0,"",python,selection_command +6712,19183124,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31394,0,"",python,selection_command +6713,19183155,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31389,0,"",python,selection_command +6714,19183190,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31343,0,"",python,selection_command +6715,19183224,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31275,0,"",python,selection_command +6716,19183258,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31270,0,"",python,selection_command +6717,19183289,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31199,0,"",python,selection_command +6718,19183325,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31194,0,"",python,selection_command +6719,19183358,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31186,0,"",python,selection_command +6720,19183390,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31181,0,"",python,selection_command +6721,19183424,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31117,0,"",python,selection_command +6722,19183458,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31112,0,"",python,selection_command +6723,19183491,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31071,0,"",python,selection_command +6724,19183523,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31002,0,"",python,selection_command +6725,19183554,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30942,0,"",python,selection_command +6726,19183588,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30890,0,"",python,selection_command +6727,19183621,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30842,0,"",python,selection_command +6728,19183655,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30813,0,"",python,selection_command +6729,19183689,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30781,0,"",python,selection_command +6730,19183722,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30774,0,"",python,selection_command +6731,19183756,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30739,0,"",python,selection_command +6732,19183788,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30704,0,"",python,selection_command +6733,19183822,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30682,0,"",python,selection_command +6734,19183856,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30662,0,"",python,selection_command +6735,19183890,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30640,0,"",python,selection_command +6736,19184111,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",30613,0,"",python,selection_command +6737,19227388,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",31027,0,"",python,selection_command +6738,19227731,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",32929,0,"",python,selection_command +6739,19228265,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",33913,0,"",python,selection_command +6740,19228611,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36335,0,"",python,selection_command +6741,19229803,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36351,0,"",python,selection_command +6742,19229900,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",36353,0,"",python,selection_command +6743,19230467,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27166,0,"",python,selection_command +6744,19232259,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27198,0,"",python,selection_command +6745,19232504,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27216,0,"",python,selection_command +6746,19232522,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27232,0,"",python,selection_command +6747,19232553,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27250,0,"",python,selection_command +6748,19232586,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27274,0,"",python,selection_command +6749,19232620,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27298,0,"",python,selection_command +6750,19232654,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27319,0,"",python,selection_command +6751,19232686,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27337,0,"",python,selection_command +6752,19232724,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27365,0,"",python,selection_command +6753,19232757,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27394,0,"",python,selection_command +6754,19232792,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27438,0,"",python,selection_command +6755,19232824,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27443,0,"",python,selection_command +6756,19232861,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27470,0,"",python,selection_command +6757,19232894,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27495,0,"",python,selection_command +6758,19232925,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27504,0,"",python,selection_command +6759,19232959,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27509,0,"",python,selection_command +6760,19232988,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27555,0,"",python,selection_command +6761,19233022,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27585,0,"",python,selection_command +6762,19233053,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27607,0,"",python,selection_command +6763,19233094,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27638,0,"",python,selection_command +6764,19233125,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27656,0,"",python,selection_command +6765,19233165,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27727,0,"",python,selection_command +6766,19233199,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27739,0,"",python,selection_command +6767,19233441,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27762,0,"",python,selection_command +6768,19233625,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27809,0,"",python,selection_command +6769,19233826,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27822,0,"",python,selection_command +6770,19234051,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27809,0,"",python,selection_command +6771,19234310,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27762,0,"",python,selection_command +6772,19234342,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27739,0,"",python,selection_command +6773,19234373,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27727,0,"",python,selection_command +6774,19234396,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27656,0,"",python,selection_command +6775,19234429,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27638,0,"",python,selection_command +6776,19234759,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27607,0,"",python,selection_command +6777,19368917,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27565,0,"",python,selection_mouse +6778,19370676,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27849,0,"",python,selection_mouse +6779,19376337,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",27181,0,"",python,selection_mouse +6780,19509832,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +6781,19509924,"/fast/home/franz.srambical/jafar/utils/nn.py",9789,496," # query_4d.shape (921, 4, 8, 64)\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n # Handle causal mask for cached decoder self-attention (from nnx.MultiHeadAttention)\n if mask is not None:\n # mask.shape (1, 921, 1, 1, 1)\n mask_4d = _pad(_rearrange(mask))\n mask_4d = mask_4d.astype(jnp.bool)\n # mask_4d.shape (921, 4, 1, 1)\n",python,content +6782,19515721,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"",python,selection_mouse +6783,19515853,"/fast/home/franz.srambical/jafar/utils/nn.py",10366,1,"1",python,selection_mouse +6784,19516055,"/fast/home/franz.srambical/jafar/utils/nn.py",10326,43," # mask_4d.shape (921, 4, 1, 1)\n",python,selection_mouse +6785,19676172,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"",python,selection_command +6786,19796086,"/fast/home/franz.srambical/jafar/utils/nn.py",10279,0,"",python,selection_command +6787,19796333,"/fast/home/franz.srambical/jafar/utils/nn.py",10234,0,"",python,selection_command +6788,19796970,"/fast/home/franz.srambical/jafar/utils/nn.py",10246,0,"",python,selection_command +6789,19797150,"/fast/home/franz.srambical/jafar/utils/nn.py",10254,0,"",python,selection_command +6790,19797351,"/fast/home/franz.srambical/jafar/utils/nn.py",10256,0,"",python,selection_command +6791,19797924,"/fast/home/franz.srambical/jafar/utils/nn.py",10234,0,"",python,selection_command +6792,19829626,"/fast/home/franz.srambical/jafar/utils/nn.py",10234,44," mask_4d = _pad(_rearrange(mask))",python,selection_command +6793,19833417,"/fast/home/franz.srambical/jafar/utils/nn.py",10234,0,"",python,selection_command +6794,19836540,"/fast/home/franz.srambical/jafar/utils/nn.py",10142,0,"",python,selection_command +6795,19836774,"/fast/home/franz.srambical/jafar/utils/nn.py",8537,0,"",python,selection_command +6796,19837563,"/fast/home/franz.srambical/jafar/utils/nn.py",2614,0,"",python,selection_command +6797,19838846,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +6798,19838846,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +6799,19841567,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",11667,0,"",python,selection_command +6800,19841881,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13131,0,"",python,selection_command +6801,19842531,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13143,0,"",python,selection_command +6802,19842689,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13145,0,"",python,selection_command +6803,19842826,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13153,0,"",python,selection_command +6804,19842981,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13159,0,"",python,selection_command +6805,19843128,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13164,0,"",python,selection_command +6806,19843260,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13166,0,"",python,selection_command +6807,19843449,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",13168,0,"",python,selection_command +6808,19843811,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5282,0,"",python,selection_command +6809,19844565,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,26,"def dot_product_attention(",python,selection_command +6810,19844668,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,482,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n",python,selection_command +6811,19844830,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,690,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n",python,selection_command +6812,19844995,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,801,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n",python,selection_command +6813,19845147,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,883,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n",python,selection_command +6814,19845302,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,2569,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n",python,selection_command +6815,19845410,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,3143,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n",python,selection_command +6816,19845614,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,3784,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n",python,selection_command +6817,19847216,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,4022,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n )\n",python,selection_command +6818,19848692,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",5278,4178,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: Array | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n promote_dtype: PromoteDtypeFn = dtypes.promote_dtype,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n Will use the more optimized `jax.nn.dot_product_attention` if dropout is\n not activated and `module=None`.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch..., q_length,\n num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape `[batch..., num_heads, q_length, kv_length]`. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is `False`.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see `jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n ``nnx.Intermediate`` collection. If ``module`` is None, the attention\n weights will not be sowed.\n promote_dtype: function to promote the dtype of the arrays to the desired\n dtype. The function should accept a tuple of ``(query, key, value)`` and a\n ``dtype`` keyword argument, and return a tuple of arrays with the promoted\n dtype.\n\n Returns:\n Output of shape `[batch..., q_length, num_heads, v_depth_per_head]`.\n """"""\n query, key, value = promote_dtype((query, key, value), dtype=dtype) # type: ignore[bad-unpacking]\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # Criteria that invoke the more optimized dot product attention\n if dropout_rate == 0.0 and module == None:\n # make sure qkv batch are compressed to one dim\n query_shape = query.shape\n if len(query_shape) > 4:\n def reshape_4d(x):\n return jnp.reshape(x, (math.prod(x.shape[:-3]), *x.shape[-3:]))\n query, key, value, bias, mask = jax.tree.map(\n reshape_4d, (query, key, value, bias, mask))\n if mask is not None:\n mask = mask.astype(jnp.bool)\n out = jax.nn.dot_product_attention(query, key, value, bias, mask)\n if len(query_shape) > 4:\n out = jnp.reshape(out, query_shape)\n return out\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n )\n\n # return weighted sum over values for each query position\n return jnp.einsum(\n '...hqk,...khd->...qhd', attn_weights, value, precision=precision\n )\n",python,selection_command +6819,19857836,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9456,0,"",python,selection_command +6820,19865360,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8063,0,"",python,selection_keyboard +6821,19865538,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",8041,0,"",python,selection_command +6822,19866063,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",7940,0,"",python,selection_command +6823,19884110,"train_tokenizer.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n # --- Compute loss ---\n # FIXME (f.srambical): Can we even do native int8 training without casting the video at all?\n # FIXME (f.srambical): If the tokenizer is the reason for the dynamics model being memory-bound,\n # should we at least train the tokenizer natively in int8?\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n model.train()\n outputs = model(inputs, training=True)\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@nnx.jit\ndef train_step(\n tokenizer: TokenizerVQVAE, optimizer: nnx.Optimizer, inputs: dict\n) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n return tokenizer_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n tokenizer\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(tokenizer, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n inputs = dict(videos=videos)\n loss, recon, metrics = train_step(tokenizer, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +6824,19886691,"train_tokenizer.py",0,0,"",python,selection_command +6825,19887460,"train_tokenizer.py",1999,0,"",python,selection_command +6826,19888464,"train_tokenizer.py",2022,0,"",python,selection_command +6827,19888716,"train_tokenizer.py",2062,0,"",python,selection_command +6828,19888743,"train_tokenizer.py",2109,0,"",python,selection_command +6829,19888777,"train_tokenizer.py",2136,0,"",python,selection_command +6830,19888812,"train_tokenizer.py",2233,0,"",python,selection_command +6831,19888846,"train_tokenizer.py",2334,0,"",python,selection_command +6832,19888880,"train_tokenizer.py",2397,0,"",python,selection_command +6833,19888918,"train_tokenizer.py",2464,0,"",python,selection_command +6834,19888951,"train_tokenizer.py",2482,0,"",python,selection_command +6835,19888983,"train_tokenizer.py",2525,0,"",python,selection_command +6836,19889017,"train_tokenizer.py",2590,0,"",python,selection_command +6837,19889079,"train_tokenizer.py",2675,0,"",python,selection_command +6838,19889103,"train_tokenizer.py",2709,0,"",python,selection_command +6839,19889274,"train_tokenizer.py",2767,0,"",python,selection_command +6840,19889463,"train_tokenizer.py",2783,0,"",python,selection_command +6841,19891666,"train_tokenizer.py",2773,0,"",python,selection_command +6842,19894269,"train_tokenizer.py",2760,0,"",python,selection_command +6843,19894515,"train_tokenizer.py",2699,0,"",python,selection_command +6844,19894547,"train_tokenizer.py",2665,0,"",python,selection_command +6845,19894572,"train_tokenizer.py",2580,0,"",python,selection_command +6846,19894605,"train_tokenizer.py",2515,0,"",python,selection_command +6847,19894639,"train_tokenizer.py",2472,0,"",python,selection_command +6848,19894676,"train_tokenizer.py",2454,0,"",python,selection_command +6849,19894706,"train_tokenizer.py",2387,0,"",python,selection_command +6850,19894738,"train_tokenizer.py",2324,0,"",python,selection_command +6851,19894771,"train_tokenizer.py",2223,0,"",python,selection_command +6852,19894807,"train_tokenizer.py",2126,0,"",python,selection_command +6853,19894839,"train_tokenizer.py",2099,0,"",python,selection_command +6854,19894873,"train_tokenizer.py",2052,0,"",python,selection_command +6855,19894906,"train_tokenizer.py",2012,0,"",python,selection_command +6856,19895029,"train_tokenizer.py",1989,0,"",python,selection_command +6857,19898169,"train_tokenizer.py",3783,0,"",python,selection_command +6858,19901275,"train_tokenizer.py",0,0,"",python,selection_command +6859,19902275,"train_tokenizer.py",3783,0,"",python,selection_command +6860,19903335,"train_tokenizer.py",1989,0,"",python,selection_command +6861,19903965,"train_tokenizer.py",2012,0,"",python,selection_command +6862,19904211,"train_tokenizer.py",2052,0,"",python,selection_command +6863,19904255,"train_tokenizer.py",2099,0,"",python,selection_command +6864,19904278,"train_tokenizer.py",2126,0,"",python,selection_command +6865,19904312,"train_tokenizer.py",2223,0,"",python,selection_command +6866,19904346,"train_tokenizer.py",2324,0,"",python,selection_command +6867,19904379,"train_tokenizer.py",2387,0,"",python,selection_command +6868,19904412,"train_tokenizer.py",2454,0,"",python,selection_command +6869,19904445,"train_tokenizer.py",2472,0,"",python,selection_command +6870,19904478,"train_tokenizer.py",2515,0,"",python,selection_command +6871,19904513,"train_tokenizer.py",2580,0,"",python,selection_command +6872,19904555,"train_tokenizer.py",2665,0,"",python,selection_command +6873,19904581,"train_tokenizer.py",2699,0,"",python,selection_command +6874,19904614,"train_tokenizer.py",2760,0,"",python,selection_command +6875,19904754,"train_tokenizer.py",2773,0,"",python,selection_command +6876,19912189,"train_lam.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(\n model: LatentActionModel, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n # --- Compute loss ---\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n model.train()\n outputs = model(inputs, training=True)\n gt_future_frames = inputs[""videos""][:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@nnx.jit\ndef train_step(\n lam: LatentActionModel,\n optimizer: nnx.Optimizer,\n inputs: dict,\n action_last_active: jax.Array,\n rng: jax.Array,\n) -> tuple[jax.Array, jax.Array, jax.Array, dict]:\n def loss_fn(\n model: LatentActionModel,\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n return lam_loss_fn(model, inputs)\n\n # --- Update model ---\n (loss, (recon, idx_counts, metrics)), grads = nnx.value_and_grad(\n loss_fn, has_aux=True\n )(lam)\n optimizer.update(grads)\n\n # --- Reset inactive latent actions ---\n codebook = lam.vq.codebook\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook.value\n )\n lam.vq.codebook.value = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return loss, recon, action_last_active, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n # Count parameters\n _, params, _ = nnx.split(lam, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(lam, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )\n print(f""Starting training from step {step}..."")\n action_last_active = jnp.zeros(args.num_latents, dtype=jnp.int32)\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n inputs = dict(videos=videos, rng=_rng)\n rng, _rng = jax.random.split(rng)\n loss, recon, action_last_active, metrics = train_step(\n lam, optimizer, inputs, action_last_active, _rng\n )\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +6877,19913233,"train_lam.py",0,0,"",python,selection_command +6878,19914219,"train_lam.py",2900,0,"",python,selection_command +6879,19914766,"train_lam.py",4506,0,"",python,selection_keyboard +6880,19915119,"train_lam.py",4505,0,"",python,selection_command +6881,19915364,"train_lam.py",4504,0,"",python,selection_command +6882,19915397,"train_lam.py",4452,0,"",python,selection_command +6883,19915429,"train_lam.py",4384,0,"",python,selection_command +6884,19915461,"train_lam.py",4343,0,"",python,selection_command +6885,19915498,"train_lam.py",4337,0,"",python,selection_command +6886,19915529,"train_lam.py",4261,0,"",python,selection_command +6887,19915563,"train_lam.py",4231,0,"",python,selection_command +6888,19915603,"train_lam.py",4173,0,"",python,selection_command +6889,19915654,"train_lam.py",4092,0,"",python,selection_command +6890,19915682,"train_lam.py",4045,0,"",python,selection_command +6891,19916119,"train_lam.py",4092,0,"",python,selection_command +6892,19916369,"train_lam.py",4173,0,"",python,selection_command +6893,19916533,"train_lam.py",4231,0,"",python,selection_command +6894,19916783,"train_lam.py",4261,0,"",python,selection_command +6895,19916814,"train_lam.py",4337,0,"",python,selection_command +6896,19916947,"train_lam.py",4343,0,"",python,selection_command +6897,19917133,"train_lam.py",4384,0,"",python,selection_command +6898,19917317,"train_lam.py",4452,0,"",python,selection_command +6899,19917667,"train_lam.py",4456,0,"",python,selection_command +6900,19917850,"train_lam.py",4463,0,"",python,selection_command +6901,19918001,"train_lam.py",4467,0,"",python,selection_command +6902,19918465,"train_lam.py",4463,0,"",python,selection_command +6903,19919137,"train_lam.py",3692,0,"",python,selection_command +6904,19920988,"train_lam.py",3762,0,"",python,selection_command +6905,19921104,"train_lam.py",3765,0,"",python,selection_command +6906,19921666,"train_lam.py",3512,0,"",python,selection_command +6907,19922191,"train_lam.py",3529,0,"",python,selection_command +6908,19922338,"train_lam.py",3563,0,"",python,selection_command +6909,19922486,"train_lam.py",3625,0,"",python,selection_command +6910,19922686,"train_lam.py",3632,0,"",python,selection_command +6911,19923005,"train_lam.py",3643,0,"",python,selection_command +6912,19923262,"train_lam.py",3632,0,"",python,selection_command +6913,19923539,"train_lam.py",1962,0,"",python,selection_command +6914,19926478,"train_lam.py",1979,0,"",python,selection_command +6915,19926726,"train_lam.py",2022,0,"",python,selection_command +6916,19926754,"train_lam.py",2080,0,"",python,selection_command +6917,19926788,"train_lam.py",2107,0,"",python,selection_command +6918,19926820,"train_lam.py",2174,0,"",python,selection_command +6919,19926855,"train_lam.py",2192,0,"",python,selection_command +6920,19926989,"train_lam.py",2235,0,"",python,selection_command +6921,19927144,"train_lam.py",2282,0,"",python,selection_command +6922,19927729,"train_lam.py",2235,0,"",python,selection_command +6923,19929511,"train_lam.py",2282,0,"",python,selection_command +6924,19933651,"train_lam.py",2286,0,"",python,selection_command +6925,19933895,"train_lam.py",2288,0,"",python,selection_command +6926,19933925,"train_lam.py",2291,0,"",python,selection_command +6927,19933954,"train_lam.py",2292,0,"",python,selection_command +6928,19933988,"train_lam.py",2298,0,"",python,selection_command +6929,19934023,"train_lam.py",2299,0,"",python,selection_command +6930,19934054,"train_lam.py",2316,0,"",python,selection_command +6931,19934268,"train_lam.py",2318,0,"",python,selection_command +6932,19936820,"train_lam.py",2282,0,"",python,selection_command +6933,19937056,"train_lam.py",2235,0,"",python,selection_command +6934,19937232,"train_lam.py",2192,0,"",python,selection_command +6935,19937435,"train_lam.py",2200,0,"",python,selection_command +6936,19937587,"train_lam.py",2202,0,"",python,selection_command +6937,19937999,"train_lam.py",1979,0,"",python,selection_command +6938,19938686,"train_lam.py",1984,0,"",python,selection_command +6939,19938837,"train_lam.py",1986,0,"",python,selection_command +6940,19939224,"models/lam.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nnx.Module):\n """"""Latent Action ST-ViVit VQ-VAE""""""\n\n def __init__(\n self,\n in_dim: int,\n model_dim: int,\n ffn_dim: int,\n latent_dim: int,\n num_latents: int,\n patch_size: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n codebook_dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.in_dim = in_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.patch_size = patch_size\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.codebook_dropout = codebook_dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.in_dim * self.patch_size**2,\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=False,\n decode=False,\n rngs=rngs,\n )\n self.action_in = nnx.Param(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (1, 1, 1, self.patch_token_dim)\n )\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n rngs=rngs,\n )\n self.patch_up = nnx.Linear(\n self.patch_token_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.action_up = nnx.Linear(\n self.latent_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=False,\n decode=False,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n video_action_patches = self.action_up(outputs[""z_q""]) + self.patch_up(\n outputs[""patches""][:, :-1]\n )\n del outputs[""patches""]\n\n # --- Decode ---\n video_recon = self.decoder(video_action_patches)\n video_recon = video_recon.astype(jnp.float32)\n video_recon = nnx.sigmoid(video_recon)\n video_recon = video_recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(video_recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(\n self, videos: jax.Array, training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(\n self.action_in.value, (B, T, 1, self.patch_token_dim)\n )\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,tab +6941,19939225,"models/lam.py",194,0,"",python,selection_command +6942,19939992,"models/lam.py",211,0,"",python,selection_command +6943,19940205,"models/lam.py",212,0,"",python,selection_command +6944,19941037,"models/lam.py",215,0,"",python,selection_command +6945,19941842,"models/lam.py",212,0,"",python,selection_command +6946,19942003,"models/lam.py",211,0,"",python,selection_command +6947,19942122,"models/lam.py",194,0,"",python,selection_command +6948,19942691,"models/lam.py",3185,0,"",python,selection_command +6949,19944859,"models/lam.py",3220,0,"",python,selection_command +6950,19945106,"models/lam.py",3234,0,"",python,selection_command +6951,19945138,"models/lam.py",3253,0,"",python,selection_command +6952,19945163,"models/lam.py",3266,0,"",python,selection_command +6953,19945195,"models/lam.py",3291,0,"",python,selection_command +6954,19945230,"models/lam.py",3348,0,"",python,selection_command +6955,19945264,"models/lam.py",3402,0,"",python,selection_command +6956,19945303,"models/lam.py",3449,0,"",python,selection_command +6957,19945556,"models/lam.py",3502,0,"",python,selection_command +6958,19946627,"models/lam.py",3505,0,"",python,selection_command +6959,19946875,"models/lam.py",3507,0,"",python,selection_command +6960,19946907,"models/lam.py",3512,0,"",python,selection_command +6961,19946937,"models/lam.py",3515,0,"",python,selection_command +6962,19946971,"models/lam.py",3517,0,"",python,selection_command +6963,19947003,"models/lam.py",3527,0,"",python,selection_command +6964,19947208,"models/lam.py",3528,0,"",python,selection_command +6965,19948056,"models/lam.py",3459,0,"",python,selection_command +6966,19949309,"models/lam.py",3445,0,"",python,selection_command +6967,19949660,"models/lam.py",3424,0,"",python,selection_command +6968,19949947,"models/lam.py",3398,0,"",python,selection_command +6969,19950228,"models/lam.py",3358,0,"",python,selection_command +6970,19950609,"models/lam.py",3344,0,"",python,selection_command +6971,19952568,"train_lam.py",0,0,"",python,tab +6972,19957617,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +6973,19986653,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +6974,19987883,"/fast/home/franz.srambical/jafar/utils/nn.py",11140,0,"",python,selection_command +6975,19988663,"/fast/home/franz.srambical/jafar/utils/nn.py",9251,0,"",python,selection_command +6976,19989747,"/fast/home/franz.srambical/jafar/utils/nn.py",9242,0,"",python,selection_command +6977,19989995,"/fast/home/franz.srambical/jafar/utils/nn.py",9220,0,"",python,selection_command +6978,19990023,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"",python,selection_command +6979,19990059,"/fast/home/franz.srambical/jafar/utils/nn.py",9118,0,"",python,selection_command +6980,19990093,"/fast/home/franz.srambical/jafar/utils/nn.py",9067,0,"",python,selection_command +6981,19990126,"/fast/home/franz.srambical/jafar/utils/nn.py",9017,0,"",python,selection_command +6982,19990157,"/fast/home/franz.srambical/jafar/utils/nn.py",8944,0,"",python,selection_command +6983,19990195,"/fast/home/franz.srambical/jafar/utils/nn.py",8935,0,"",python,selection_command +6984,19990225,"/fast/home/franz.srambical/jafar/utils/nn.py",8933,0,"",python,selection_command +6985,19990259,"/fast/home/franz.srambical/jafar/utils/nn.py",8895,0,"",python,selection_command +6986,19990289,"/fast/home/franz.srambical/jafar/utils/nn.py",8797,0,"",python,selection_command +6987,19990324,"/fast/home/franz.srambical/jafar/utils/nn.py",8712,0,"",python,selection_command +6988,19990441,"/fast/home/franz.srambical/jafar/utils/nn.py",8797,0,"",python,selection_command +6989,19990694,"/fast/home/franz.srambical/jafar/utils/nn.py",8895,0,"",python,selection_command +6990,19990726,"/fast/home/franz.srambical/jafar/utils/nn.py",8933,0,"",python,selection_command +6991,19990759,"/fast/home/franz.srambical/jafar/utils/nn.py",8935,0,"",python,selection_command +6992,19990796,"/fast/home/franz.srambical/jafar/utils/nn.py",8944,0,"",python,selection_command +6993,19991279,"/fast/home/franz.srambical/jafar/utils/nn.py",9017,0,"",python,selection_command +6994,19991446,"/fast/home/franz.srambical/jafar/utils/nn.py",9067,0,"",python,selection_command +6995,20300297,"/fast/home/franz.srambical/jafar/utils/nn.py",9817,0,"",python,selection_mouse +6996,20300455,"/fast/home/franz.srambical/jafar/utils/nn.py",9815,3,"921",python,selection_mouse +6997,20307672,"/fast/home/franz.srambical/jafar/utils/nn.py",9820,0,"",python,selection_mouse +6998,20308281,"/fast/home/franz.srambical/jafar/utils/nn.py",9823,0,"",python,selection_mouse +6999,20308660,"/fast/home/franz.srambical/jafar/utils/nn.py",9828,0,"",python,selection_mouse +7000,20309762,"/fast/home/franz.srambical/jafar/utils/nn.py",9816,0,"",python,selection_mouse +7001,20309914,"/fast/home/franz.srambical/jafar/utils/nn.py",9815,3,"921",python,selection_mouse +7002,20316528,"/fast/home/franz.srambical/jafar/utils/nn.py",10221,0,"",python,selection_mouse +7003,20316702,"/fast/home/franz.srambical/jafar/utils/nn.py",10220,3,"921",python,selection_mouse +7004,20319512,"/fast/home/franz.srambical/jafar/utils/nn.py",10357,0,"",python,selection_mouse +7005,20319674,"/fast/home/franz.srambical/jafar/utils/nn.py",10355,3,"921",python,selection_mouse +7006,20320370,"/fast/home/franz.srambical/jafar/utils/nn.py",10360,0,"",python,selection_mouse +7007,20325407,"/fast/home/franz.srambical/jafar/utils/nn.py",10256,0,"",python,selection_mouse +7008,20326507,"/fast/home/franz.srambical/jafar/utils/nn.py",10256,1,"_",python,selection_command +7009,20326576,"/fast/home/franz.srambical/jafar/utils/nn.py",10256,4,"_pad",python,selection_command +7010,20326774,"/fast/home/franz.srambical/jafar/utils/nn.py",10256,5,"_pad(",python,selection_command +7011,20327199,"/fast/home/franz.srambical/jafar/utils/nn.py",10256,5,"",python,content +7012,20327573,"/fast/home/franz.srambical/jafar/utils/nn.py",10273,0,"",python,selection_command +7013,20327720,"/fast/home/franz.srambical/jafar/utils/nn.py",10272,1,"",python,content +7014,20327837,"/fast/home/franz.srambical/jafar/utils/nn.py",10271,0,"",python,selection_command +7015,20328179,"/fast/home/franz.srambical/jafar/utils/nn.py",11134,0,"",python,selection_command +7016,20330585,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G --time=01:00:00",,terminal_command +7017,20330641,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 14001\r\n",,terminal_output +7018,20330749,"TERMINAL",0,0,"salloc: Nodes hai007 are ready for job\r\n",,terminal_output +7019,20331056,"TERMINAL",0,0,"Running inside SLURM, Job ID 14001.\r\n",,terminal_output +7020,20331133,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +7021,20332815,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +7022,20332990,"TERMINAL",0,0,"s': salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G ",,terminal_output +7023,20333072,"TERMINAL",0,0,"\ra': salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100Gm': bash experiments/sample.sh \r\n\r",,terminal_output +7024,20333154,"TERMINAL",0,0,"[1@p': bash experiments/samp",,terminal_output +7025,20333220,"TERMINAL",0,0,"[1@l': bash experiments/sampl",,terminal_output +7026,20333508,"TERMINAL",0,0,"\r[23@[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sampl\r\n[?2004l\r",,terminal_output +7027,20344825,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +7028,20355503,"TERMINAL",0,0,"2025-07-27 14:57:55.299127: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7029,20355964,"TERMINAL",0,0,"2025-07-27 14:57:55.766396: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7030,20358825,"TERMINAL",0,0,"2025-07-27 14:57:58.624824: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7031,20360290,"TERMINAL",0,0,"2025-07-27 14:58:00.092173: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7032,20365295,"TERMINAL",0,0,"2025-07-27 14:58:05.097182: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7033,20365961,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7034,20381202,"TERMINAL",0,0,"c",,terminal_output +7035,20381366,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7036,20382350,"TERMINAL",0,0,"c",,terminal_output +7037,20382698,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7038,20383118,"TERMINAL",0,0,"c",,terminal_output +7039,20383241,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7040,20383589,"TERMINAL",0,0,"c",,terminal_output +7041,20383712,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7042,20383984,"TERMINAL",0,0,"c",,terminal_output +7043,20384105,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7044,20384364,"TERMINAL",0,0,"c",,terminal_output +7045,20384436,"TERMINAL",0,0,"\r\n",,terminal_output +7046,20384702,"TERMINAL",0,0,"c",,terminal_output +7047,20384771,"TERMINAL",0,0,"\r\n",,terminal_output +7048,20384927,"TERMINAL",0,0,"2025-07-27 14:58:24.727898: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7049,20387439,"TERMINAL",0,0,"(Pdb) SSIM: 0.01034794095903635\r\n",,terminal_output +7050,20388633,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +7051,20391495,"/fast/home/franz.srambical/jafar/utils/nn.py",11110,0,"",python,selection_command +7052,20391728,"/fast/home/franz.srambical/jafar/utils/nn.py",11109,0,"",python,selection_command +7053,20391763,"/fast/home/franz.srambical/jafar/utils/nn.py",11030,0,"",python,selection_command +7054,20391797,"/fast/home/franz.srambical/jafar/utils/nn.py",11020,0,"",python,selection_command +7055,20391850,"/fast/home/franz.srambical/jafar/utils/nn.py",10987,0,"",python,selection_command +7056,20391863,"/fast/home/franz.srambical/jafar/utils/nn.py",10944,0,"",python,selection_command +7057,20391896,"/fast/home/franz.srambical/jafar/utils/nn.py",10918,0,"",python,selection_command +7058,20391924,"/fast/home/franz.srambical/jafar/utils/nn.py",10892,0,"",python,selection_command +7059,20391967,"/fast/home/franz.srambical/jafar/utils/nn.py",10864,0,"",python,selection_command +7060,20392012,"/fast/home/franz.srambical/jafar/utils/nn.py",10840,0,"",python,selection_command +7061,20392032,"/fast/home/franz.srambical/jafar/utils/nn.py",10812,0,"",python,selection_command +7062,20392056,"/fast/home/franz.srambical/jafar/utils/nn.py",10762,0,"",python,selection_command +7063,20392089,"/fast/home/franz.srambical/jafar/utils/nn.py",10692,0,"",python,selection_command +7064,20392120,"/fast/home/franz.srambical/jafar/utils/nn.py",10691,0,"",python,selection_command +7065,20392155,"/fast/home/franz.srambical/jafar/utils/nn.py",10620,0,"",python,selection_command +7066,20392188,"/fast/home/franz.srambical/jafar/utils/nn.py",10619,0,"",python,selection_command +7067,20392221,"/fast/home/franz.srambical/jafar/utils/nn.py",10594,0,"",python,selection_command +7068,20392257,"/fast/home/franz.srambical/jafar/utils/nn.py",10548,0,"",python,selection_command +7069,20392287,"/fast/home/franz.srambical/jafar/utils/nn.py",10451,0,"",python,selection_command +7070,20396942,"/fast/home/franz.srambical/jafar/utils/nn.py",10546,0,"",python,selection_command +7071,20397251,"/fast/home/franz.srambical/jafar/utils/nn.py",10451,0,"",python,selection_command +7072,20397568,"/fast/home/franz.srambical/jafar/utils/nn.py",10377,0,"",python,selection_command +7073,20397723,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,"",python,selection_command +7074,20397883,"/fast/home/franz.srambical/jafar/utils/nn.py",10320,0,"",python,selection_command +7075,20397987,"/fast/home/franz.srambical/jafar/utils/nn.py",10273,0,"",python,selection_command +7076,20398138,"/fast/home/franz.srambical/jafar/utils/nn.py",10234,0,"",python,selection_command +7077,20398285,"/fast/home/franz.srambical/jafar/utils/nn.py",10191,0,"",python,selection_command +7078,20398426,"/fast/home/franz.srambical/jafar/utils/nn.py",10162,0,"",python,selection_command +7079,20398572,"/fast/home/franz.srambical/jafar/utils/nn.py",10069,0,"",python,selection_command +7080,20403092,"/fast/home/franz.srambical/jafar/utils/nn.py",10162,0,"",python,selection_command +7081,20403329,"/fast/home/franz.srambical/jafar/utils/nn.py",10191,0,"",python,selection_command +7082,20403368,"/fast/home/franz.srambical/jafar/utils/nn.py",10234,0,"",python,selection_command +7083,20403391,"/fast/home/franz.srambical/jafar/utils/nn.py",10273,0,"",python,selection_command +7084,20403421,"/fast/home/franz.srambical/jafar/utils/nn.py",10320,0,"",python,selection_command +7085,20403446,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,"",python,selection_command +7086,20404124,"/fast/home/franz.srambical/jafar/utils/nn.py",10320,0,"",python,selection_command +7087,20404471,"/fast/home/franz.srambical/jafar/utils/nn.py",10332,0,"",python,selection_command +7088,20404770,"/fast/home/franz.srambical/jafar/utils/nn.py",10334,0,"",python,selection_command +7089,20405018,"/fast/home/franz.srambical/jafar/utils/nn.py",10341,0,"",python,selection_command +7090,20405173,"/fast/home/franz.srambical/jafar/utils/nn.py",10342,0,"",python,selection_command +7091,20406339,"/fast/home/franz.srambical/jafar/utils/nn.py",10348,0,"",python,selection_command +7092,20406451,"/fast/home/franz.srambical/jafar/utils/nn.py",10349,0,"",python,selection_command +7093,20406630,"/fast/home/franz.srambical/jafar/utils/nn.py",10352,0,"",python,selection_command +7094,20406806,"/fast/home/franz.srambical/jafar/utils/nn.py",10354,0,"",python,selection_command +7095,20407124,"/fast/home/franz.srambical/jafar/utils/nn.py",10354,1,"1",python,content +7096,20408561,"/fast/home/franz.srambical/jafar/utils/nn.py",10307,0,"",python,selection_command +7097,20408695,"/fast/home/franz.srambical/jafar/utils/nn.py",10268,0,"",python,selection_command +7098,20408853,"/fast/home/franz.srambical/jafar/utils/nn.py",10225,0,"",python,selection_command +7099,20409224,"/fast/home/franz.srambical/jafar/utils/nn.py",10268,0,"",python,selection_command +7100,20409389,"/fast/home/franz.srambical/jafar/utils/nn.py",10307,0,"",python,selection_command +7101,20409521,"/fast/home/franz.srambical/jafar/utils/nn.py",10354,0,"",python,selection_command +7102,20411078,"/fast/home/franz.srambical/jafar/utils/nn.py",10362,0,"",python,selection_command +7103,20411149,"/fast/home/franz.srambical/jafar/utils/nn.py",10362,0," ",python,content +7104,20411150,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,"",python,selection_keyboard +7105,20411290,"/fast/home/franz.srambical/jafar/utils/nn.py",10363,0,"()",python,content +7106,20411290,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0,"",python,selection_keyboard +7107,20411339,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,1,")",python,content +7108,20411339,"/fast/home/franz.srambical/jafar/utils/nn.py",10365,0,"",python,selection_keyboard +7109,20411610,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0,"",python,selection_command +7110,20411965,"/fast/home/franz.srambical/jafar/utils/nn.py",10364,0,"B",python,content +7111,20411965,"/fast/home/franz.srambical/jafar/utils/nn.py",10365,0,"",python,selection_keyboard +7112,20412209,"/fast/home/franz.srambical/jafar/utils/nn.py",10365,0,",",python,content +7113,20412209,"/fast/home/franz.srambical/jafar/utils/nn.py",10366,0,"",python,selection_keyboard +7114,20412328,"/fast/home/franz.srambical/jafar/utils/nn.py",10366,0," ",python,content +7115,20412328,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"",python,selection_keyboard +7116,20414870,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"H",python,content +7117,20414871,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"",python,selection_keyboard +7118,20416196,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,1,"",python,content +7119,20416337,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"1",python,content +7120,20416337,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"",python,selection_keyboard +7121,20416444,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,",",python,content +7122,20416444,"/fast/home/franz.srambical/jafar/utils/nn.py",10369,0,"",python,selection_keyboard +7123,20417234,"/fast/home/franz.srambical/jafar/utils/nn.py",10369,0," ",python,content +7124,20417234,"/fast/home/franz.srambical/jafar/utils/nn.py",10370,0,"",python,selection_keyboard +7125,20417903,"/fast/home/franz.srambical/jafar/utils/nn.py",10370,0,"1",python,content +7126,20417903,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_keyboard +7127,20417981,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,",",python,content +7128,20417982,"/fast/home/franz.srambical/jafar/utils/nn.py",10372,0,"",python,selection_keyboard +7129,20418149,"/fast/home/franz.srambical/jafar/utils/nn.py",10372,0," ",python,content +7130,20418150,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"",python,selection_keyboard +7131,20418234,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"1",python,content +7132,20418234,"/fast/home/franz.srambical/jafar/utils/nn.py",10374,0,"",python,selection_keyboard +7133,20418333,"/fast/home/franz.srambical/jafar/utils/nn.py",10374,0,",",python,content +7134,20418333,"/fast/home/franz.srambical/jafar/utils/nn.py",10375,0,"",python,selection_keyboard +7135,20418480,"/fast/home/franz.srambical/jafar/utils/nn.py",10374,0,"",python,selection_command +7136,20418863,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"",python,selection_command +7137,20419388,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_command +7138,20419561,"/fast/home/franz.srambical/jafar/utils/nn.py",10370,0,"",python,selection_command +7139,20419678,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"",python,selection_command +7140,20419813,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"",python,selection_command +7141,20420066,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,9,"",python,content +7142,20420210,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,")",python,content +7143,20420210,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"",python,selection_keyboard +7144,20420537,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"",python,selection_command +7145,20420898,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"H",python,content +7146,20420899,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"",python,selection_keyboard +7147,20421086,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,",",python,content +7148,20421086,"/fast/home/franz.srambical/jafar/utils/nn.py",10369,0,"",python,selection_keyboard +7149,20421167,"/fast/home/franz.srambical/jafar/utils/nn.py",10369,0," ",python,content +7150,20421167,"/fast/home/franz.srambical/jafar/utils/nn.py",10370,0,"",python,selection_keyboard +7151,20421587,"/fast/home/franz.srambical/jafar/utils/nn.py",10370,0,"Q",python,content +7152,20421588,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_keyboard +7153,20421874,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,",",python,content +7154,20421874,"/fast/home/franz.srambical/jafar/utils/nn.py",10372,0,"",python,selection_keyboard +7155,20422073,"/fast/home/franz.srambical/jafar/utils/nn.py",10372,0," ",python,content +7156,20422073,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"",python,selection_keyboard +7157,20422235,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"K",python,content +7158,20422236,"/fast/home/franz.srambical/jafar/utils/nn.py",10374,0,"",python,selection_keyboard +7159,20422432,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"",python,selection_command +7160,20422830,"/fast/home/franz.srambical/jafar/utils/nn.py",10375,0,"",python,selection_command +7161,20423420,"/fast/home/franz.srambical/jafar/utils/nn.py",10374,0,"",python,selection_command +7162,20423567,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"",python,selection_command +7163,20423770,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_command +7164,20423935,"/fast/home/franz.srambical/jafar/utils/nn.py",10370,0,"",python,selection_command +7165,20424035,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"",python,selection_command +7166,20424174,"/fast/home/franz.srambical/jafar/utils/nn.py",10367,0,"",python,selection_command +7167,20424722,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"",python,selection_command +7168,20425015,"/fast/home/franz.srambical/jafar/utils/nn.py",10368,0,"-",python,content +7169,20425015,"/fast/home/franz.srambical/jafar/utils/nn.py",10369,0,"",python,selection_keyboard +7170,20425219,"/fast/home/franz.srambical/jafar/utils/nn.py",10369,0,"b",python,content +7171,20425220,"/fast/home/franz.srambical/jafar/utils/nn.py",10370,0,"",python,selection_keyboard +7172,20425377,"/fast/home/franz.srambical/jafar/utils/nn.py",10370,0,"r",python,content +7173,20425377,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"",python,selection_keyboard +7174,20425484,"/fast/home/franz.srambical/jafar/utils/nn.py",10371,0,"o",python,content +7175,20425485,"/fast/home/franz.srambical/jafar/utils/nn.py",10372,0,"",python,selection_keyboard +7176,20425569,"/fast/home/franz.srambical/jafar/utils/nn.py",10372,0,"a",python,content +7177,20425570,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"",python,selection_keyboard +7178,20425655,"/fast/home/franz.srambical/jafar/utils/nn.py",10373,0,"d",python,content +7179,20425655,"/fast/home/franz.srambical/jafar/utils/nn.py",10374,0,"",python,selection_keyboard +7180,20425906,"/fast/home/franz.srambical/jafar/utils/nn.py",10374,0,"c",python,content +7181,20425907,"/fast/home/franz.srambical/jafar/utils/nn.py",10375,0,"",python,selection_keyboard +7182,20425971,"/fast/home/franz.srambical/jafar/utils/nn.py",10375,0,"a",python,content +7183,20425972,"/fast/home/franz.srambical/jafar/utils/nn.py",10376,0,"",python,selection_keyboard +7184,20426093,"/fast/home/franz.srambical/jafar/utils/nn.py",10376,0,"s",python,content +7185,20426093,"/fast/home/franz.srambical/jafar/utils/nn.py",10377,0,"",python,selection_keyboard +7186,20426292,"/fast/home/franz.srambical/jafar/utils/nn.py",10377,0,"t",python,content +7187,20426292,"/fast/home/franz.srambical/jafar/utils/nn.py",10378,0,"",python,selection_keyboard +7188,20426687,"/fast/home/franz.srambical/jafar/utils/nn.py",10377,0,"",python,selection_command +7189,20426925,"/fast/home/franz.srambical/jafar/utils/nn.py",10378,0,"",python,selection_command +7190,20427170,"/fast/home/franz.srambical/jafar/utils/nn.py",10379,0,"",python,selection_command +7191,20427356,"/fast/home/franz.srambical/jafar/utils/nn.py",10380,0,"",python,selection_command +7192,20427479,"/fast/home/franz.srambical/jafar/utils/nn.py",10381,0,"",python,selection_command +7193,20427693,"/fast/home/franz.srambical/jafar/utils/nn.py",10381,0,"-",python,content +7194,20427693,"/fast/home/franz.srambical/jafar/utils/nn.py",10382,0,"",python,selection_keyboard +7195,20427840,"/fast/home/franz.srambical/jafar/utils/nn.py",10382,0,"b",python,content +7196,20427840,"/fast/home/franz.srambical/jafar/utils/nn.py",10383,0,"",python,selection_keyboard +7197,20427908,"/fast/home/franz.srambical/jafar/utils/nn.py",10383,0,"r",python,content +7198,20427908,"/fast/home/franz.srambical/jafar/utils/nn.py",10384,0,"",python,selection_keyboard +7199,20427993,"/fast/home/franz.srambical/jafar/utils/nn.py",10384,0,"a",python,content +7200,20427994,"/fast/home/franz.srambical/jafar/utils/nn.py",10385,0,"",python,selection_keyboard +7201,20428053,"/fast/home/franz.srambical/jafar/utils/nn.py",10385,0,"o",python,content +7202,20428054,"/fast/home/franz.srambical/jafar/utils/nn.py",10386,0,"",python,selection_keyboard +7203,20428243,"/fast/home/franz.srambical/jafar/utils/nn.py",10386,0,"a",python,content +7204,20428243,"/fast/home/franz.srambical/jafar/utils/nn.py",10387,0,"",python,selection_keyboard +7205,20428303,"/fast/home/franz.srambical/jafar/utils/nn.py",10387,0,"d",python,content +7206,20428304,"/fast/home/franz.srambical/jafar/utils/nn.py",10388,0,"",python,selection_keyboard +7207,20428523,"/fast/home/franz.srambical/jafar/utils/nn.py",10387,1,"",python,content +7208,20428660,"/fast/home/franz.srambical/jafar/utils/nn.py",10386,1,"",python,content +7209,20428792,"/fast/home/franz.srambical/jafar/utils/nn.py",10385,1,"",python,content +7210,20428919,"/fast/home/franz.srambical/jafar/utils/nn.py",10384,1,"",python,content +7211,20429107,"/fast/home/franz.srambical/jafar/utils/nn.py",10384,0,"o",python,content +7212,20429108,"/fast/home/franz.srambical/jafar/utils/nn.py",10385,0,"",python,selection_keyboard +7213,20429171,"/fast/home/franz.srambical/jafar/utils/nn.py",10385,0,"a",python,content +7214,20429172,"/fast/home/franz.srambical/jafar/utils/nn.py",10386,0,"",python,selection_keyboard +7215,20429240,"/fast/home/franz.srambical/jafar/utils/nn.py",10386,0,"d",python,content +7216,20429241,"/fast/home/franz.srambical/jafar/utils/nn.py",10387,0,"",python,selection_keyboard +7217,20429359,"/fast/home/franz.srambical/jafar/utils/nn.py",10387,0,"c",python,content +7218,20429359,"/fast/home/franz.srambical/jafar/utils/nn.py",10388,0,"",python,selection_keyboard +7219,20429455,"/fast/home/franz.srambical/jafar/utils/nn.py",10388,0,"a",python,content +7220,20429455,"/fast/home/franz.srambical/jafar/utils/nn.py",10389,0,"",python,selection_keyboard +7221,20429538,"/fast/home/franz.srambical/jafar/utils/nn.py",10389,0,"s",python,content +7222,20429539,"/fast/home/franz.srambical/jafar/utils/nn.py",10390,0,"",python,selection_keyboard +7223,20429640,"/fast/home/franz.srambical/jafar/utils/nn.py",10390,0,"t",python,content +7224,20429640,"/fast/home/franz.srambical/jafar/utils/nn.py",10391,0,"",python,selection_keyboard +7225,20429827,"/fast/home/franz.srambical/jafar/utils/nn.py",10390,0,"",python,selection_command +7226,20430077,"/fast/home/franz.srambical/jafar/utils/nn.py",10395,0,"",python,selection_command +7227,20430523,"/fast/home/franz.srambical/jafar/utils/nn.py",10394,0,"",python,selection_command +7228,20430927,"/fast/home/franz.srambical/jafar/utils/nn.py",10394,0,"-",python,content +7229,20430928,"/fast/home/franz.srambical/jafar/utils/nn.py",10395,0,"",python,selection_keyboard +7230,20431062,"/fast/home/franz.srambical/jafar/utils/nn.py",10395,0,"b",python,content +7231,20431062,"/fast/home/franz.srambical/jafar/utils/nn.py",10396,0,"",python,selection_keyboard +7232,20431194,"/fast/home/franz.srambical/jafar/utils/nn.py",10396,0,"o",python,content +7233,20431194,"/fast/home/franz.srambical/jafar/utils/nn.py",10397,0,"",python,selection_keyboard +7234,20431311,"/fast/home/franz.srambical/jafar/utils/nn.py",10397,0,"r",python,content +7235,20431312,"/fast/home/franz.srambical/jafar/utils/nn.py",10398,0,"",python,selection_keyboard +7236,20431563,"/fast/home/franz.srambical/jafar/utils/nn.py",10397,1,"",python,content +7237,20431659,"/fast/home/franz.srambical/jafar/utils/nn.py",10396,1,"",python,content +7238,20431711,"/fast/home/franz.srambical/jafar/utils/nn.py",10396,0,"r",python,content +7239,20431712,"/fast/home/franz.srambical/jafar/utils/nn.py",10397,0,"",python,selection_keyboard +7240,20431857,"/fast/home/franz.srambical/jafar/utils/nn.py",10397,0,"o",python,content +7241,20431857,"/fast/home/franz.srambical/jafar/utils/nn.py",10398,0,"",python,selection_keyboard +7242,20431974,"/fast/home/franz.srambical/jafar/utils/nn.py",10398,0,"a",python,content +7243,20431974,"/fast/home/franz.srambical/jafar/utils/nn.py",10399,0,"",python,selection_keyboard +7244,20432417,"/fast/home/franz.srambical/jafar/utils/nn.py",10399,0,"d",python,content +7245,20432417,"/fast/home/franz.srambical/jafar/utils/nn.py",10400,0,"",python,selection_keyboard +7246,20432565,"/fast/home/franz.srambical/jafar/utils/nn.py",10400,0,"c",python,content +7247,20432565,"/fast/home/franz.srambical/jafar/utils/nn.py",10401,0,"",python,selection_keyboard +7248,20432663,"/fast/home/franz.srambical/jafar/utils/nn.py",10401,0,"s",python,content +7249,20432663,"/fast/home/franz.srambical/jafar/utils/nn.py",10402,0,"",python,selection_keyboard +7250,20432677,"/fast/home/franz.srambical/jafar/utils/nn.py",10402,0,"a",python,content +7251,20432678,"/fast/home/franz.srambical/jafar/utils/nn.py",10403,0,"",python,selection_keyboard +7252,20432827,"/fast/home/franz.srambical/jafar/utils/nn.py",10403,0,"t",python,content +7253,20432828,"/fast/home/franz.srambical/jafar/utils/nn.py",10404,0,"",python,selection_keyboard +7254,20433033,"/fast/home/franz.srambical/jafar/utils/nn.py",10403,0,"",python,selection_command +7255,20433535,"/fast/home/franz.srambical/jafar/utils/nn.py",10402,0,"",python,selection_command +7256,20433861,"/fast/home/franz.srambical/jafar/utils/nn.py",10401,0,"",python,selection_command +7257,20434077,"/fast/home/franz.srambical/jafar/utils/nn.py",10401,3,"",python,content +7258,20434347,"/fast/home/franz.srambical/jafar/utils/nn.py",10401,0,"a",python,content +7259,20434348,"/fast/home/franz.srambical/jafar/utils/nn.py",10402,0,"",python,selection_keyboard +7260,20434565,"/fast/home/franz.srambical/jafar/utils/nn.py",10402,0,"s",python,content +7261,20434565,"/fast/home/franz.srambical/jafar/utils/nn.py",10403,0,"",python,selection_keyboard +7262,20434629,"/fast/home/franz.srambical/jafar/utils/nn.py",10403,0,"t",python,content +7263,20434629,"/fast/home/franz.srambical/jafar/utils/nn.py",10404,0,"",python,selection_keyboard +7264,20434810,"/fast/home/franz.srambical/jafar/utils/nn.py",10403,0,"",python,selection_command +7265,20434930,"/fast/home/franz.srambical/jafar/utils/nn.py",10320,0,"",python,selection_command +7266,20478445,"experiments/sample.sh",0,0,"source .venv/bin/activate\n\ndata_dir=""$PWD/data_arrayrecord/dummy""\nckpt_dir=""$PWD/checkpoints/dynamics_openai_grain_tok_lam_restore""\n\nexport XLA_FLAGS=--xla_gpu_autotune_level=0\nsrun python sample.py \\n --dynamics_type ""causal"" \\n --batch_size 1 \\n --seq_len 2 \\n --start_frame 1 \\n --checkpoint $ckpt_dir \\n --data_dir $data_dir",shellscript,tab +7267,20479016,"experiments/sample.sh",251,0,"",shellscript,selection_command +7268,20479182,"experiments/sample.sh",269,0,"",shellscript,selection_command +7269,20480069,"experiments/sample.sh",267,0,"",shellscript,selection_command +7270,20485500,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +7271,20488554,"/fast/home/franz.srambical/jafar/utils/nn.py",10273,0,"",python,selection_command +7272,20488795,"/fast/home/franz.srambical/jafar/utils/nn.py",10234,0,"",python,selection_command +7273,20488826,"/fast/home/franz.srambical/jafar/utils/nn.py",10191,0,"",python,selection_command +7274,20488856,"/fast/home/franz.srambical/jafar/utils/nn.py",10162,0,"",python,selection_command +7275,20488889,"/fast/home/franz.srambical/jafar/utils/nn.py",10069,0,"",python,selection_command +7276,20488922,"/fast/home/franz.srambical/jafar/utils/nn.py",10068,0,"",python,selection_command +7277,20488956,"/fast/home/franz.srambical/jafar/utils/nn.py",9992,0,"",python,selection_command +7278,20488989,"/fast/home/franz.srambical/jafar/utils/nn.py",9916,0,"",python,selection_command +7279,20489024,"/fast/home/franz.srambical/jafar/utils/nn.py",9831,0,"",python,selection_command +7280,20489059,"/fast/home/franz.srambical/jafar/utils/nn.py",9830,0,"",python,selection_command +7281,20489092,"/fast/home/franz.srambical/jafar/utils/nn.py",9789,0,"",python,selection_command +7282,20489126,"/fast/home/franz.srambical/jafar/utils/nn.py",9746,0,"",python,selection_command +7283,20489165,"/fast/home/franz.srambical/jafar/utils/nn.py",9707,0,"",python,selection_command +7284,20489196,"/fast/home/franz.srambical/jafar/utils/nn.py",9664,0,"",python,selection_command +7285,20489230,"/fast/home/franz.srambical/jafar/utils/nn.py",9663,0,"",python,selection_command +7286,20489266,"/fast/home/franz.srambical/jafar/utils/nn.py",9610,0,"",python,selection_command +7287,20489299,"/fast/home/franz.srambical/jafar/utils/nn.py",9551,0,"",python,selection_command +7288,20489332,"/fast/home/franz.srambical/jafar/utils/nn.py",9512,0,"",python,selection_command +7289,20489363,"/fast/home/franz.srambical/jafar/utils/nn.py",9511,0,"",python,selection_command +7290,20489397,"/fast/home/franz.srambical/jafar/utils/nn.py",9468,0,"",python,selection_command +7291,20489429,"/fast/home/franz.srambical/jafar/utils/nn.py",9431,0,"",python,selection_command +7292,20489463,"/fast/home/franz.srambical/jafar/utils/nn.py",9430,0,"",python,selection_command +7293,20489504,"/fast/home/franz.srambical/jafar/utils/nn.py",9359,0,"",python,selection_command +7294,20489530,"/fast/home/franz.srambical/jafar/utils/nn.py",9338,0,"",python,selection_command +7295,20489563,"/fast/home/franz.srambical/jafar/utils/nn.py",9337,0,"",python,selection_command +7296,20489599,"/fast/home/franz.srambical/jafar/utils/nn.py",9270,0,"",python,selection_command +7297,20489634,"/fast/home/franz.srambical/jafar/utils/nn.py",9243,0,"",python,selection_command +7298,20489664,"/fast/home/franz.srambical/jafar/utils/nn.py",9242,0,"",python,selection_command +7299,20489704,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,0,"",python,selection_command +7300,20489732,"/fast/home/franz.srambical/jafar/utils/nn.py",9144,0,"",python,selection_command +7301,20489771,"/fast/home/franz.srambical/jafar/utils/nn.py",9110,0,"",python,selection_command +7302,20489798,"/fast/home/franz.srambical/jafar/utils/nn.py",9059,0,"",python,selection_command +7303,20489832,"/fast/home/franz.srambical/jafar/utils/nn.py",9009,0,"",python,selection_command +7304,20489863,"/fast/home/franz.srambical/jafar/utils/nn.py",8936,0,"",python,selection_command +7305,20489897,"/fast/home/franz.srambical/jafar/utils/nn.py",8935,0,"",python,selection_command +7306,20489931,"/fast/home/franz.srambical/jafar/utils/nn.py",8927,0,"",python,selection_command +7307,20489962,"/fast/home/franz.srambical/jafar/utils/nn.py",8887,0,"",python,selection_command +7308,20489997,"/fast/home/franz.srambical/jafar/utils/nn.py",8789,0,"",python,selection_command +7309,20490031,"/fast/home/franz.srambical/jafar/utils/nn.py",8704,0,"",python,selection_command +7310,20490063,"/fast/home/franz.srambical/jafar/utils/nn.py",8703,0,"",python,selection_command +7311,20490097,"/fast/home/franz.srambical/jafar/utils/nn.py",8624,0,"",python,selection_command +7312,20490131,"/fast/home/franz.srambical/jafar/utils/nn.py",8528,0,"",python,selection_command +7313,20490167,"/fast/home/franz.srambical/jafar/utils/nn.py",8527,0,"",python,selection_command +7314,20490198,"/fast/home/franz.srambical/jafar/utils/nn.py",8456,0,"",python,selection_command +7315,20490543,"/fast/home/franz.srambical/jafar/utils/nn.py",8448,0,"",python,selection_command +7316,20490725,"/fast/home/franz.srambical/jafar/utils/nn.py",8360,0,"",python,selection_command +7317,20491442,"/fast/home/franz.srambical/jafar/utils/nn.py",8364,0,"",python,selection_command +7318,20493954,"experiments/sample.sh",0,0,"",shellscript,tab +7319,20497621,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +7320,20498835,"/fast/home/franz.srambical/jafar/utils/nn.py",8452,0,"",python,selection_command +7321,20499102,"/fast/home/franz.srambical/jafar/utils/nn.py",8460,0,"",python,selection_command +7322,20499114,"/fast/home/franz.srambical/jafar/utils/nn.py",8527,0,"",python,selection_command +7323,20499143,"/fast/home/franz.srambical/jafar/utils/nn.py",8532,0,"",python,selection_command +7324,20499177,"/fast/home/franz.srambical/jafar/utils/nn.py",8628,0,"",python,selection_command +7325,20499210,"/fast/home/franz.srambical/jafar/utils/nn.py",8703,0,"",python,selection_command +7326,20499244,"/fast/home/franz.srambical/jafar/utils/nn.py",8708,0,"",python,selection_command +7327,20499277,"/fast/home/franz.srambical/jafar/utils/nn.py",8793,0,"",python,selection_command +7328,20499309,"/fast/home/franz.srambical/jafar/utils/nn.py",8891,0,"",python,selection_command +7329,20499342,"/fast/home/franz.srambical/jafar/utils/nn.py",8931,0,"",python,selection_command +7330,20499376,"/fast/home/franz.srambical/jafar/utils/nn.py",8935,0,"",python,selection_command +7331,20499411,"/fast/home/franz.srambical/jafar/utils/nn.py",8940,0,"",python,selection_command +7332,20499443,"/fast/home/franz.srambical/jafar/utils/nn.py",9013,0,"",python,selection_command +7333,20499476,"/fast/home/franz.srambical/jafar/utils/nn.py",9063,0,"",python,selection_command +7334,20499511,"/fast/home/franz.srambical/jafar/utils/nn.py",9114,0,"",python,selection_command +7335,20499544,"/fast/home/franz.srambical/jafar/utils/nn.py",9148,0,"",python,selection_command +7336,20499576,"/fast/home/franz.srambical/jafar/utils/nn.py",9216,0,"",python,selection_command +7337,20499866,"/fast/home/franz.srambical/jafar/utils/nn.py",9148,0,"",python,selection_command +7338,20500004,"/fast/home/franz.srambical/jafar/utils/nn.py",9216,0,"",python,selection_command +7339,20501073,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,30,"",python,content +7340,20501147,"/fast/home/franz.srambical/jafar/utils/nn.py",9144,0,"",python,selection_command +7341,20501407,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"",python,selection_command +7342,20501658,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,1,"",python,content +7343,20501765,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,1,"",python,content +7344,20504923,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +7345,20504977,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +7346,20505267,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +7347,20508822,"/fast/home/franz.srambical/jafar/utils/nn.py",9144,0,"",python,selection_command +7348,20516796,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +7349,20524956,"TERMINAL",0,0,"2025-07-27 15:00:44.760759: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7350,20526469,"TERMINAL",0,0,"2025-07-27 15:00:46.273026: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7351,20530025,"TERMINAL",0,0,"2025-07-27 15:00:49.829528: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7352,20530701,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(332)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7353,20534119,"TERMINAL",0,0,"c",,terminal_output +7354,20534526,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 335, in attention_fn\r\n output_4d = jax.nn.dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py"", line 1204, in dot_product_attention\r\n out = cudnn_dot_product_attention(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py"", line 1981, in dot_product_attention\r\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py"", line 332, in check_layout\r\n raise ValueError(\r\nValueError: Bias must have same seq length as QKV, got 1 and 1\r\n",,terminal_output +7355,20535343,"TERMINAL",0,0,"(Pdb) ",,terminal_output +7356,20535570,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +7357,20575462,"/fast/home/franz.srambical/jafar/utils/nn.py",6905,0,"",python,selection_command +7358,20583083,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# pytype: skip-file\nfrom __future__ import annotations\n\nfrom abc import abstractmethod\nimport dataclasses\nimport functools\nimport inspect\nimport typing as tp\n\nfrom jax._src import checkify as checkify_lib\n\nfrom flax.nnx import (\n extract,\n graph,\n)\nfrom flax.nnx.module import Module\nfrom flax.nnx.proxy_caller import (\n CallableProxy,\n DelayedAccessor,\n)\nfrom flax.nnx.transforms import general\nfrom flax.typing import MISSING, Leaf, Missing\nimport jax\nimport jax.core\nimport jax.stages\n\nA = tp.TypeVar('A')\nC = tp.TypeVar('C')\nB = tp.TypeVar('B')\nF = tp.TypeVar('F', bound=tp.Callable[..., tp.Any])\nG = tp.TypeVar('G', bound=tp.Callable[..., tp.Any])\nM = tp.TypeVar('M', bound=Module)\nMA = tp.TypeVar('MA', bound=Module)\nN = tp.TypeVar('N', bound=Module)\nStrInt = tp.TypeVar('StrInt', str, int)\nAxisName = tp.Hashable\nLeaves = tp.List[Leaf]\nIndex = int\n\n\n@tp.overload\ndef resolve_kwargs(\n fun: tp.Callable[..., tp.Any],\n args: tuple,\n kwargs: dict[str, tp.Any],\n) -> tuple: ...\n@tp.overload\ndef resolve_kwargs() -> tp.Callable[[F], F]: ...\ndef resolve_kwargs(\n fun: tp.Callable[..., tp.Any] | Missing = MISSING,\n args: tuple | Missing = MISSING,\n kwargs: dict[str, tp.Any] | Missing = MISSING,\n) -> tuple | tp.Callable[[F], F]:\n if isinstance(fun, Missing):\n\n def resolve_kwargs_decorator(f):\n @functools.wraps(f)\n def resolve_kwargs_wrapper(*args, **kwargs):\n args = resolve_kwargs(f, args, kwargs)\n return f(*args)\n\n return resolve_kwargs_wrapper\n\n return resolve_kwargs_decorator # type: ignore\n\n if isinstance(args, Missing):\n raise ValueError('args must be provided')\n if isinstance(kwargs, Missing):\n raise ValueError('kwargs must be provided')\n\n if isinstance(fun, functools.partial):\n # functools.partial should have an opaque signature.\n fun = lambda *args, **kwargs: None\n ba = inspect.signature(fun).bind(*args, **kwargs)\n ba.apply_defaults()\n if ba.kwargs:\n raise TypeError('keyword arguments could not be resolved to positions')\n else:\n return ba.args\n\n\n\nclass LiftedModule(tp.Generic[M], Module): # type: ignore[ignored-abstractmethod]\n @abstractmethod\n def _call(self, accessor: DelayedAccessor, *args, **kwargs) -> tp.Any:\n pass\n\n @property\n @abstractmethod\n def _submodule(self) -> M:\n pass # type: ignore[bad-return-type] # why pytype?\n\n def __call__(self, *args, **kwargs) -> tp.Any:\n return self.call(*args, **kwargs) # type: ignore\n\n @property\n def call(self) -> tp.Any:\n module = self\n\n def check_and_call(accessor: DelayedAccessor, *args, **kwargs):\n return self._call(accessor, *args, **kwargs)\n\n proxy = CallableProxy(check_and_call) # type: ignore[arg-type]\n\n while isinstance(module._submodule, LiftedModule):\n module = module._submodule\n proxy = proxy.call\n\n return proxy # type: ignore\n\n\n# -------------------------------\n# simple transforms\n# -------------------------------\n\n\ndef eval_shape(\n f: tp.Callable[..., A],\n *args: tp.Any,\n **kwargs: tp.Any,\n) -> A:\n """"""A ""lifted"" version of `jax.eval_shape `_\n that can handle `flax.nnx.Module `_\n / graph nodes as arguments.\n\n Similar to ``jax.eval_shape``, it computes the shape/dtype of a function `f` without\n performing any floating point operations (FLOPs) which can be expensive. This can be\n useful for performing shape inference, for example.\n """"""\n args, kwargs = extract.to_tree((args, kwargs))\n\n @functools.wraps(f)\n def _eval_shape_fn(*args, **kwargs):\n args, kwargs = extract.from_tree((args, kwargs))\n out = f(*args, **kwargs)\n return extract.to_tree(graph.freeze(out))\n\n out = jax.eval_shape(_eval_shape_fn, *args, **kwargs)\n return extract.from_tree(out)\n\n@dataclasses.dataclass(eq=False)\nclass CheckifyFn:\n f: tp.Callable[..., tp.Any]\n\n def __post_init__(self):\n functools.update_wrapper(self, self.f)\n\n def __call__(self, *pure_args, **pure_kwargs):\n args, kwargs = extract.from_tree(\n (pure_args, pure_kwargs), ctxtag='checkify', is_inner=True\n )\n out = self.f(*args, **kwargs)\n\n args_out, kwargs_out = extract.clear_non_graph_nodes((args, kwargs))\n pure_args_out, pure_kwargs_out, pure_out = extract.to_tree(\n (args, kwargs, out), ctxtag='checkify'\n )\n return pure_args_out, pure_kwargs_out, pure_out\n\ndef checkify(\n f: tp.Callable[..., checkify_lib.Out],\n errors: frozenset[type[checkify_lib.JaxException]] = checkify_lib.user_checks, # type: ignore\n) -> tp.Callable[..., tuple[checkify_lib.Error, checkify_lib.Out]]:\n """"""Reference-aware version of `jax.experimental.checkify\n `_.\n\n Example::\n\n >>> import jax\n >>> import jax.numpy as jnp\n >>> from jax.experimental import checkify\n >>> import dataclasses\n >>> from flax import nnx\n ...\n >>> class Foo(nnx.Module):\n ... def __init__(self, a):\n ... self.a = nnx.Param(a)\n ...\n >>> @nnx.jit\n ... def f(m):\n ... y = jnp.sin(m.a.value) # error\n ... return m.a + y\n ...\n >>> m = Foo(a=jnp.inf)\n >>> err, out = nnx.checkify(f, errors=checkify.float_checks)(m)\n >>> # err.throw()\n >>> print(err)\n Error(nan generated by primitive: sin.)\n """"""\n checkify_fn = checkify_lib.checkify(CheckifyFn(f), errors)\n\n @functools.wraps(f)\n @graph.update_context('checkify')\n def jit_wrapper(*args, **kwargs):\n pure_args, pure_kwargs = extract.to_tree(\n (args, kwargs),\n ctxtag='checkify',\n )\n error, (pure_args_out, pure_kwargs_out, pure_out) = checkify_fn(\n *pure_args, **pure_kwargs\n )\n\n args_out, kwargs_out, out = extract.from_tree(\n (pure_args_out, pure_kwargs_out, pure_out),\n ctxtag='checkify',\n is_inner=False,\n )\n\n return error, out\n\n return jit_wrapper # type: ignore\n\n\n@general.split_inputs(ctxtag='cond')\ndef cond(\n pred,\n true_fun: tp.Callable[..., A],\n false_fun: tp.Callable[..., A],\n *operands,\n **kwargs,\n) -> A:\n return jax.lax.cond(\n pred,\n general.merge_inputs(true_fun, ctxtag='cond'),\n general.merge_inputs(false_fun, ctxtag='cond'),\n *operands,\n **kwargs,\n )\n\n\n@general.split_inputs(ctxtag='switch')\ndef switch(\n index,\n branches: tp.Sequence[tp.Callable[..., A]],\n *operands,\n) -> A:\n return jax.lax.switch(\n index,\n [general.merge_inputs(f, ctxtag='switch') for f in branches],\n *operands,\n )\n\n",python,tab +7359,20583083,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py",2014,0,"",python,selection_command +7360,20585110,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import annotations\n\nimport contextlib\nimport dataclasses\nimport functools\nimport threading\nimport typing as tp\n\nfrom flax import config\nfrom flax.nnx import filterlib, reprlib, traversals, variablelib\nfrom flax.nnx import statelib\nfrom flax.nnx.proxy_caller import (\n ApplyCaller,\n CallableProxy,\n DelayedAccessor,\n)\nfrom flax.nnx.statelib import EmptyState, FlatState, State\nfrom flax.nnx.variablelib import Variable, VariableState\nfrom flax.typing import Key, PathParts, is_key_like\nimport jax\nimport numpy as np\nimport treescope # type: ignore[import-not-found,import-untyped]\nimport typing_extensions as tpe\n\nA = tp.TypeVar('A')\nB = tp.TypeVar('B')\nC = tp.TypeVar('C')\nF = tp.TypeVar('F', bound=tp.Callable)\n\nHA = tp.TypeVar('HA', bound=tp.Hashable)\nHB = tp.TypeVar('HB', bound=tp.Hashable)\nKeyT = tp.TypeVar('KeyT', bound=Key)\n\nIndex = int\nNames = tp.Sequence[int]\nNode = tp.TypeVar('Node')\nLeaf = tp.TypeVar('Leaf')\nAuxData = tp.TypeVar('AuxData')\n\n\n@jax.tree_util.register_static\n@dataclasses.dataclass(frozen=True, slots=True)\nclass NoUpdate: ...\n\n\nNO_UPDATE = NoUpdate()\n\n\n@jax.tree_util.register_static\n@dataclasses.dataclass(frozen=True, slots=True)\nclass Repeated: ...\n\n\nREPEATED = Repeated()\n\n\n@jax.tree_util.register_dataclass\n@dataclasses.dataclass(frozen=True, slots=True, repr=False)\nclass MutableArrayOutput(reprlib.Representable):\n value: jax.Array | NoUpdate | Repeated\n\n def __nnx_repr__(self):\n yield reprlib.Object(type=type(self))\n yield reprlib.Attr('value', self.value)\n\n def __treescope_repr__(self, path, subtree_renderer):\n return treescope.repr_lib.render_object_constructor(\n object_type=type(self),\n attributes={\n 'value': self.value,\n },\n path=path,\n subtree_renderer=subtree_renderer,\n )\n\n\nLeafType = tp.Union[\n Variable,\n VariableState,\n jax.Array,\n np.ndarray,\n variablelib.MutableArray,\n MutableArrayOutput,\n NoUpdate,\n]\nGraphState = State[Key, LeafType]\nGraphFlatState = FlatState[LeafType]\n\n\ndef is_node_leaf(x: tp.Any) -> tpe.TypeGuard[LeafType]:\n return isinstance(x, LeafType) or variablelib.is_mutable_array(x) # type: ignore[misc, arg-type]\n\n\nclass IndexMap(dict[Index, tp.Any]):\n @staticmethod\n def from_refmap(refmap: RefMap) -> IndexMap:\n return IndexMap((index, value) for value, index in refmap.items())\n\n\nif config.flax_use_flaxlib:\n import flaxlib # type: ignore[import]\n\n globals()['IndexMap'] = flaxlib.IndexMap\n\n\n# RefMap = dict\nclass RefMap(tp.MutableMapping[tp.Any, int], reprlib.MappingReprMixin):\n """"""A mapping that hashes keys by their identity.""""""\n\n def __init__(\n self,\n mapping: tp.Mapping[tp.Any, int]\n | tp.Iterable[tuple[tp.Any, int]]\n | None = None,\n /,\n ):\n self._mapping: dict[int, tuple[tp.Any, int]] = dict()\n if mapping is not None:\n self.update(mapping)\n\n @staticmethod\n def from_indexmap(indexmap: IndexMap) -> RefMap:\n refmap = RefMap()\n refmap.update((value, index) for index, value in indexmap.items())\n return refmap\n\n def get(self, key: tp.Any, default: int | None = None) -> int | None: # type: ignore[override]\n return self._mapping.get(id(key), (None, default))[1]\n\n def __getitem__(self, key: tp.Any) -> int:\n return self._mapping[id(key)][1]\n\n def __setitem__(self, key: tp.Any, value: int):\n self._mapping[id(key)] = (key, value)\n\n def __delitem__(self, key: tp.Any):\n del self._mapping[id(key)]\n\n def __len__(self) -> int:\n return len(self._mapping)\n\n def __contains__(self, key: tp.Any) -> bool:\n return id(key) in self._mapping\n\n def __iter__(self) -> tp.Iterator[tp.Any]:\n for key, _ in self._mapping.values():\n yield key\n\n def items(self) -> tp.ItemsView[tp.Any, int]:\n return self._mapping.values() # type: ignore\n\n\n# save python version\nPythonRefMap = RefMap\n\nif config.flax_use_flaxlib:\n import flaxlib # type: ignore[import]\n\n globals()['RefMap'] = flaxlib.RefMap\n\n\n@dataclasses.dataclass(frozen=True, slots=True)\nclass NodeImplBase(tp.Generic[Node, Leaf, AuxData]):\n type: type[Node]\n flatten: tp.Callable[[Node], tuple[tp.Sequence[tuple[Key, Leaf]], AuxData]]\n\n def node_dict(self, node: Node) -> dict[Key, Leaf]:\n nodes, _ = self.flatten(node)\n return dict(nodes)\n\n\n@dataclasses.dataclass(frozen=True, slots=True)\nclass GraphNodeImpl(NodeImplBase[Node, Leaf, AuxData]):\n set_key: tp.Callable[[Node, Key, Leaf], None]\n pop_key: tp.Callable[[Node, Key], Leaf]\n create_empty: tp.Callable[[AuxData], Node]\n clear: tp.Callable[[Node], None]\n init: tp.Callable[[Node, tp.Iterable[tuple[Key, Leaf]]], None]\n\n\n@dataclasses.dataclass(frozen=True, slots=True)\nclass PytreeNodeImpl(NodeImplBase[Node, Leaf, AuxData]):\n unflatten: tp.Callable[[tp.Sequence[tuple[Key, Leaf]], AuxData], Node]\n\n\nNodeImpl = tp.Union[\n GraphNodeImpl[Node, Leaf, AuxData], PytreeNodeImpl[Node, Leaf, AuxData]\n]\n\n\nGRAPH_REGISTRY: dict[type, NodeImpl[tp.Any, tp.Any, tp.Any]] = {}\nPYTREE_REGISTRY: dict[type, PytreeNodeImpl[tp.Any, tp.Any, tp.Any]] = {}\n\n\ndef register_graph_node_type(\n type: type,\n flatten: tp.Callable[[Node], tuple[tp.Sequence[tuple[Key, Leaf]], AuxData]],\n set_key: tp.Callable[[Node, Key, Leaf], None],\n pop_key: tp.Callable[[Node, Key], Leaf],\n create_empty: tp.Callable[[AuxData], Node],\n clear: tp.Callable[[Node], None],\n init: tp.Callable[[Node, tp.Iterable[tuple[Key, Leaf]]], None],\n):\n if type in GRAPH_REGISTRY:\n raise ValueError(f'Node type {type} is already registered.')\n\n GRAPH_REGISTRY[type] = GraphNodeImpl(\n type=type,\n flatten=flatten,\n set_key=set_key,\n pop_key=pop_key,\n create_empty=create_empty,\n clear=clear,\n init=init,\n )\n\n\ndef register_pytree_node_type(\n type: type,\n flatten: tp.Callable[[Node], tuple[tp.Sequence[tuple[Key, Leaf]], AuxData]],\n unflatten: tp.Callable[[tp.Sequence[tuple[Key, Leaf]], AuxData], Node],\n):\n if type in PYTREE_REGISTRY:\n raise ValueError(f'Node type {type} is already registered.')\n\n PYTREE_REGISTRY[type] = PytreeNodeImpl(\n type=type, flatten=flatten, unflatten=unflatten\n )\n\n\ndef is_node(x: tp.Any) -> bool:\n if isinstance(x, Variable):\n return False\n if type(x) in GRAPH_REGISTRY:\n return True\n return is_pytree_node(x)\n\n\ndef is_graph_node(x: tp.Any) -> bool:\n return type(x) in GRAPH_REGISTRY or variablelib.is_mutable_array(x)\n\n\ndef is_node_type(x: type[tp.Any]) -> bool:\n return x in GRAPH_REGISTRY or x in PYTREE_REGISTRY or x is GenericPytree\n\n\ndef get_node_impl(x: Node) -> NodeImpl[Node, tp.Any, tp.Any] | None:\n if isinstance(x, Variable):\n return None\n\n node_type = type(x)\n\n if node_type in GRAPH_REGISTRY:\n return GRAPH_REGISTRY[node_type]\n elif node_type in PYTREE_REGISTRY:\n return PYTREE_REGISTRY[node_type]\n elif node_type in JAX_PYTREE_REGISTRY or issubclass(node_type, tuple):\n return PYTREE_NODE_IMPL # type: ignore\n else:\n return None\n\n\ndef get_node_impl_for_type(\n x: type[Node],\n) -> NodeImpl[Node, tp.Any, tp.Any] | None:\n if x is GenericPytree:\n return PYTREE_NODE_IMPL # type: ignore\n elif x in PYTREE_REGISTRY:\n return PYTREE_REGISTRY[x]\n elif x in GRAPH_REGISTRY:\n return GRAPH_REGISTRY[x]\n else:\n return None\n\n\nclass HashableMapping(tp.Mapping[HA, HB], tp.Hashable):\n _mapping: dict[HA, HB] | tp.Mapping[HA, HB]\n\n def __init__(self, mapping: tp.Mapping[HA, HB], copy: bool = True):\n self._mapping = dict(mapping) if copy else mapping\n\n def __contains__(self, key: object) -> bool:\n return key in self._mapping\n\n def __getitem__(self, key: HA) -> HB:\n return self._mapping[key]\n\n def __iter__(self) -> tp.Iterator[HA]:\n return iter(self._mapping)\n\n def __len__(self) -> int:\n return len(self._mapping)\n\n def __hash__(self) -> int:\n return hash(tuple(sorted(self._mapping.items())))\n\n def __eq__(self, other: tp.Any) -> bool:\n return (\n isinstance(other, HashableMapping) and self._mapping == other._mapping\n )\n\n def __repr__(self) -> str:\n return repr(self._mapping)\n\n\n@jax.tree_util.register_static\n@dataclasses.dataclass(frozen=True, repr=False)\nclass NodeRef(tp.Generic[Node], reprlib.Representable):\n index: int\n\n def __nnx_repr__(self):\n yield reprlib.Object(type=type(self))\n yield reprlib.Attr('index', self.index)\n\n def __treescope_repr__(self, path, subtree_renderer):\n return treescope.repr_lib.render_object_constructor(\n object_type=type(self),\n attributes={'index': self.index},\n path=path,\n subtree_renderer=subtree_renderer,\n )\n\n\nif config.flax_use_flaxlib:\n import flaxlib # type: ignore[import]\n\n jax.tree_util.register_static(flaxlib.NodeRef)\n globals()['NodeRef'] = flaxlib.NodeRef\n\n\n@dataclasses.dataclass(frozen=True, repr=False)\nclass VariableDef(reprlib.Representable, tp.Generic[Node]):\n type: type[Node]\n index: int\n outer_index: int | None\n metadata: HashableMapping[str, tp.Any]\n mutable_arraydef: MutableArrayDef | NodeRef | None\n\n def with_no_outer_index(self) -> VariableDef:\n return VariableDef(\n type=self.type,\n index=self.index,\n outer_index=None,\n metadata=self.metadata,\n mutable_arraydef=self.mutable_arraydef.with_no_outer_index()\n if isinstance(self.mutable_arraydef, MutableArrayDef)\n else self.mutable_arraydef,\n )\n\n def with_same_outer_index(self) -> VariableDef:\n return VariableDef(\n type=self.type,\n index=self.index,\n outer_index=self.index,\n metadata=self.metadata,\n mutable_arraydef=self.mutable_arraydef.with_same_outer_index()\n if isinstance(self.mutable_arraydef, MutableArrayDef)\n else self.mutable_arraydef,\n )\n\n def __nnx_repr__(self):\n yield reprlib.Object(type=type(self))\n yield reprlib.Attr('type', self.type.__name__)\n yield reprlib.Attr('index', self.index)\n yield reprlib.Attr('outer_index', self.outer_index)\n yield reprlib.Attr('metadata', reprlib.PrettyMapping(self.metadata))\n\n def __treescope_repr__(self, path, subtree_renderer):\n return treescope.repr_lib.render_object_constructor(\n object_type=type(self),\n attributes={\n 'type': self.type,\n 'index': self.index,\n 'outer_index': self.outer_index,\n 'metadata': self.metadata,\n },\n path=path,\n subtree_renderer=subtree_renderer,\n )\n\n\nif config.flax_use_flaxlib:\n import flaxlib # type: ignore[import]\n\n jax.tree_util.register_static(flaxlib.VariableDef)\n globals()['VariableDef'] = flaxlib.VariableDef\n\n\n@dataclasses.dataclass(frozen=True, repr=False)\nclass MutableArrayDef(reprlib.Representable):\n index: int\n outer_index: int | None\n\n def with_no_outer_index(self):\n return MutableArrayDef(\n index=self.index,\n outer_index=None,\n )\n\n def with_same_outer_index(self):\n return MutableArrayDef(\n index=self.index,\n outer_index=self.index,\n )\n\n def __nnx_repr__(self):\n yield reprlib.Object(type=type(self))\n yield reprlib.Attr('index', self.index)\n yield reprlib.Attr('outer_index', self.outer_index)\n\n def __treescope_repr__(self, path, subtree_renderer):\n return treescope.repr_lib.render_object_constructor(\n object_type=type(self),\n attributes={\n 'index': self.index,\n 'outer_index': self.outer_index,\n },\n path=path,\n subtree_renderer=subtree_renderer,\n )\n\n\n@jax.tree_util.register_static\n@dataclasses.dataclass(frozen=True, repr=False, slots=True)\nclass NodeDef(tp.Generic[Node], reprlib.Representable):\n """"""A dataclass that denotes the tree structure of a\n :class:`Module`. A ``GraphDef`` can be generated by either\n calling :func:`split` or :func:`graphdef` on the :class:`Module`.""""""\n\n type: tp.Type[Node]\n index: int | None\n outer_index: int | None\n num_attributes: int\n metadata: tp.Any\n\n def with_no_outer_index(self) -> NodeDef[Node]:\n return NodeDef(\n type=self.type,\n index=self.index,\n outer_index=None,\n num_attributes=self.num_attributes,\n metadata=self.metadata,\n )\n\n def with_same_outer_index(self) -> NodeDef[Node]:\n return NodeDef(\n type=self.type,\n index=self.index,\n outer_index=self.index,\n num_attributes=self.num_attributes,\n metadata=self.metadata,\n )\n\n def __nnx_repr__(self):\n yield reprlib.Object(type=type(self))\n\n yield reprlib.Attr('type', self.type.__name__)\n yield reprlib.Attr('index', self.index)\n yield reprlib.Attr('outer_index', self.outer_index)\n yield reprlib.Attr('num_attributes', self.num_attributes)\n yield reprlib.Attr('metadata', self.metadata)\n\n def __treescope_repr__(self, path, subtree_renderer):\n return treescope.repr_lib.render_object_constructor(\n object_type=type(self),\n attributes={\n 'type': self.type,\n 'index': self.index,\n 'outer_index': self.outer_index,\n 'num_attributes': self.num_attributes,\n 'metadata': self.metadata,\n },\n path=path,\n subtree_renderer=subtree_renderer,\n )\n\n\nif config.flax_use_flaxlib:\n import flaxlib # type: ignore[import]\n\n jax.tree_util.register_static(flaxlib.NodeDef)\n globals()['NodeDef'] = flaxlib.NodeDef\n\nNodeDefType = tp.Union[\n NodeDef[Node],\n NodeRef[Node],\n VariableDef[Node],\n MutableArrayDef,\n]\n\n\n@dataclasses.dataclass(frozen=True, slots=True)\nclass ArrayAttr:\n pass\n\n\nARRAY_ATTR = ArrayAttr()\n\n@dataclasses.dataclass(frozen=True, slots=True)\nclass MutableArrayAttr:\n pass\n\n\nMUTABLE_ARRAY_ATTR = MutableArrayAttr()\n\n\n@dataclasses.dataclass(frozen=True, slots=True)\nclass NodeAttr:\n pass\n\n\nNODE_ATTR = NodeAttr()\n\nAttrType = tp.Union[\n NodeAttr,\n ArrayAttr,\n MutableArrayAttr,\n 'Static[tp.Any]',\n]\n\n# GraphDef = tp.Union[NodeDef[Node], NodeRef[Node], VariableDef[Node]]\n@jax.tree_util.register_static\n@dataclasses.dataclass(frozen=True, slots=True)\nclass GraphDef(tp.Generic[Node]):\n nodes: list[NodeDefType[tp.Any]]\n attributes: list[tuple[Key, AttrType]]\n num_leaves: int\n\n def __hash__(self) -> int:\n return hash((tuple(self.nodes), tuple(self.attributes)))\n\n def with_no_outer_index(self) -> GraphDef[Node]:\n return GraphDef(\n nodes=[\n node.with_no_outer_index() if not isinstance(node, NodeRef) else node\n for node in self.nodes\n ],\n attributes=self.attributes,\n num_leaves=self.num_leaves,\n )\n\n def with_same_outer_index(self) -> GraphDef[Node]:\n return GraphDef(\n nodes=[\n node.with_same_outer_index() if not isinstance(node, NodeRef) else node\n for node in self.nodes\n ],\n attributes=self.attributes,\n num_leaves=self.num_leaves,\n )\n\n # TODO(cgarciae): remove this method\n def apply(\n self, state: GraphState, *states: GraphState\n ) -> ApplyCaller[tuple[GraphDef[Node], GraphState]]:\n accessor = DelayedAccessor()\n\n def _apply(\n accessor: DelayedAccessor, *args, **kwargs\n ) -> tuple[tp.Any, tuple[GraphDef[Node], GraphState]]:\n module = merge(self, state, *states)\n fn = accessor(module)\n out = fn(*args, **kwargs)\n graphdef, flat_state = flatten(module)\n state_ = statelib.from_flat_state(flat_state)\n return out, (graphdef, state_)\n\n return CallableProxy(_apply, accessor) # type: ignore\n\n\nPureState = tuple[GraphDef[Node], GraphState]\n\n\n@tp.overload\ndef flatten( # type: ignore[invalid-annotation]\n node: Node,\n /,\n *,\n ref_index: RefMap | None = None,\n ref_outer_index: RefMap | None = None,\n) -> tuple[GraphDef[Node], FlatState[VariableState[tp.Any]]]: ...\n@tp.overload\ndef flatten( # type: ignore[invalid-annotation]\n node: Node,\n /,\n *,\n with_paths: tp.Literal[True],\n return_variables: tp.Literal[True],\n ref_index: RefMap | None = None,\n ref_outer_index: RefMap | None = None,\n) -> tuple[\n GraphDef[Node],\n FlatState[Variable[tp.Any]],\n]: ...\n@tp.overload\ndef flatten( # type: ignore[invalid-annotation]\n node: Node,\n /,\n *,\n with_paths: tp.Literal[False],\n return_variables: tp.Literal[True],\n ref_index: RefMap | None = None,\n ref_outer_index: RefMap | None = None,\n) -> tuple[\n GraphDef[Node],\n list[Variable[tp.Any]],\n]: ...\n@tp.overload\ndef flatten( # type: ignore[invalid-annotation]\n node: Node,\n /,\n *,\n return_variables: tp.Literal[True],\n ref_index: RefMap | None = None,\n ref_outer_index: RefMap | None = None,\n) -> tuple[\n GraphDef[Node],\n FlatState[Variable[tp.Any]],\n]: ...\n@tp.overload\ndef flatten( # type: ignore[invalid-annotation]\n node: Node,\n /,\n *,\n with_paths: bool,\n ref_index: RefMap | None = None,\n ref_outer_index: RefMap | None = None,\n) -> tuple[\n GraphDef[Node],\n FlatState[VariableState[tp.Any]] | list[tp.Any],\n]: ...\ndef flatten( # type: ignore[invalid-annotation]\n node: Node,\n /,\n *,\n with_paths: bool = True,\n return_variables: bool = False,\n ref_index: RefMap | None = None,\n ref_outer_index: RefMap | None = None,\n) -> tuple[\n GraphDef[Node],\n FlatState[VariableState[tp.Any]] | FlatState[Variable[tp.Any]] | list[tp.Any],\n]:\n """"""Flattens a graph node into a (graphdef, state) pair.\n\n Args:\n x: A graph node.\n ref_index: A mapping from nodes to indexes, defaults to None. If not provided, a new\n empty dictionary is created. This argument can be used to flatten a sequence of graph\n nodes that share references.\n with_paths: A boolean that indicates whether to return a FlatState object that includes\n the paths to VariableState objects, or just a list of the Variable's inner values.\n """"""\n if ref_index is None:\n ref_index = RefMap()\n\n leaves: list[LeafType] = []\n path: list[Key] | None = [] if with_paths else None\n paths: list[PathParts] | None = [] if with_paths else None\n nodes: list[NodeDefType[tp.Any]] = []\n attributes: list[tuple[Key, AttrType]] = []\n node_impl = get_node_impl(node)\n if node_impl is None and not (\n isinstance(node, Variable) or variablelib.is_mutable_array(node)\n ):\n raise RuntimeError(f'Unsupported type: {type(node)}, this is a bug.')\n _graph_flatten(\n node,\n node_impl,\n path,\n ref_index,\n ref_outer_index,\n nodes,\n attributes,\n leaves,\n paths,\n return_variables,\n )\n graphdef: GraphDef = GraphDef(\n nodes=nodes, attributes=attributes, num_leaves=len(leaves)\n )\n\n if paths is not None:\n return graphdef, FlatState.from_sorted_keys_values(tuple(paths), leaves) # type: ignore[return-value]\n else:\n return graphdef, leaves\n\n\ndef _graph_flatten(\n node: Node,\n node_impl: NodeImpl[Node, Leaf, AuxData] | None,\n path: list[Key] | None,\n ref_index: RefMap,\n ref_outer_index: RefMap | None,\n nodes: list[NodeDefType[tp.Any]],\n attributes: list[tuple[Key, AttrType]],\n leaves: list[LeafType],\n paths: list[PathParts] | None,\n return_variables: bool,\n) -> None:\n is_pytree_node_ = type(node_impl) is PytreeNodeImpl\n\n index: int | None\n if not is_pytree_node_ and node in ref_index:\n nodes.append(NodeRef(index := ref_index[node]))\n return\n\n is_graph_node_ = type(node_impl) is GraphNodeImpl\n is_variable = isinstance(node, Variable)\n is_mutable_array = variablelib.is_mutable_array(node)\n\n # only cache graph nodes, we don't add mutable arrays here\n # as they are added in the make_mutable_arraydef function\n if is_graph_node_ or is_variable:\n index = len(ref_index)\n ref_index[node] = index\n else:\n index = None\n\n def make_mutable_arraydef(value: variablelib.MutableArray):\n if value in ref_index:\n index = ref_index[value]\n return NodeRef(index), REPEATED\n else:\n index = len(ref_index)\n ref_index[value] = index\n output_value: NoUpdate | MutableArrayOutput | variablelib.MutableArray\n if ref_outer_index is not None:\n if value in ref_outer_index:\n outer_index = ref_outer_index[value]\n output_value = NO_UPDATE\n mutable_arraydef = MutableArrayDef(index=index, outer_index=outer_index)\n else:\n output_value = MutableArrayOutput(value[...])\n mutable_arraydef = MutableArrayDef(index=index, outer_index=None)\n else:\n output_value = value\n mutable_arraydef = MutableArrayDef(index=index, outer_index=None)\n return mutable_arraydef, output_value\n\n if is_variable:\n assert isinstance(node, Variable)\n assert index is not None\n inner_value = node.raw_value\n if variablelib.is_mutable_array(inner_value):\n mutable_arraydef, inner_value = make_mutable_arraydef(inner_value)\n else:\n mutable_arraydef = None\n if return_variables:\n leaf = node\n leaf.raw_value = inner_value\n elif path is None:\n leaf = inner_value\n else:\n leaf = node.to_state() # type: ignore[assignment]\n leaf.raw_value = inner_value\n\n variabledef = VariableDef(\n type=type(node),\n index=index,\n outer_index=ref_outer_index.get(node, None) if ref_outer_index else None,\n metadata=HashableMapping(node._var_metadata),\n mutable_arraydef=mutable_arraydef,\n )\n if type(inner_value) is not Repeated:\n assert not isinstance(leaf, Repeated)\n leaves.append(leaf)\n if path is not None:\n assert paths is not None\n paths.append(tuple(path))\n nodes.append(variabledef)\n return\n elif is_mutable_array:\n mutable_arraydef, leaf = make_mutable_arraydef(node) # type: ignore[arg-type]\n if not isinstance(leaf, Repeated):\n leaves.append(leaf)\n if path is not None:\n assert paths is not None\n paths.append(tuple(path))\n nodes.append(mutable_arraydef)\n return\n\n if node_impl is None:\n raise RuntimeError(f'Unsupported type: {type(node)}, this is a bug.')\n\n values, metadata = node_impl.flatten(node)\n num_attributes = len(values)\n nodedef = NodeDef(\n node_impl.type,\n index,\n ref_outer_index[node]\n if is_graph_node_ and ref_outer_index and node in ref_outer_index\n else None,\n num_attributes,\n metadata,\n )\n nodes.append(nodedef)\n\n for key, value in values:\n value_node_impl = get_node_impl(value)\n if path is not None:\n path.append(key)\n if value_node_impl is not None or isinstance(value, Variable):\n attributes.append((key, NODE_ATTR))\n _graph_flatten(\n value,\n value_node_impl,\n path,\n ref_index,\n ref_outer_index,\n nodes,\n attributes,\n leaves,\n paths,\n return_variables,\n )\n elif variablelib.is_mutable_array(value):\n attributes.append((key, MUTABLE_ARRAY_ATTR))\n mutable_arraydef, leaf = make_mutable_arraydef(value)\n if not isinstance(leaf, Repeated):\n leaves.append(leaf)\n if paths is not None:\n paths.append(tuple(path)) # type: ignore\n nodes.append(mutable_arraydef)\n elif isinstance(value, (jax.Array, np.ndarray)):\n attributes.append((key, ARRAY_ATTR))\n if paths is not None:\n paths.append(tuple(path)) # type: ignore\n leaves.append(value)\n else:\n attributes.append((key, Static(value)))\n\n if path is not None:\n path.pop()\n\n return\n\n\n@dataclasses.dataclass(slots=True)\nclass FingerprintContext:\n next_index: int\n\n\n# TODO(cgarciae): the actual fingerprint object is not being used,\n# only the traversal process is still relevant\ndef fingerprint(\n node,\n /,\n *,\n ref_index: RefMap | None = None,\n new_ref_index: RefMap | None = None,\n) -> list[tp.Hashable]:\n """""" """"""\n if ref_index is None:\n ref_index = RefMap()\n\n if new_ref_index is None:\n new_ref_index = RefMap()\n node_impl = get_node_impl(node)\n if node_impl is None:\n raise RuntimeError(f'Unsupported type: {type(node)}, this is a bug.')\n ctx = FingerprintContext(len(ref_index) + len(new_ref_index))\n fp: list[tp.Hashable] = []\n _graph_fingerprint(ctx, fp.append, node, node_impl, ref_index, new_ref_index)\n return fp\n\n\ndef _graph_fingerprint(\n ctx: FingerprintContext,\n append_fn: tp.Callable[[tp.Any], None],\n node,\n node_impl: NodeImpl[Node, Leaf, AuxData],\n ref_index: RefMap,\n new_ref_index: RefMap,\n):\n is_pytree_node_ = type(node_impl) is PytreeNodeImpl\n is_graph_node_ = type(node_impl) is GraphNodeImpl\n\n append_fn(type(node))\n\n if is_graph_node_:\n append_fn(id(node))\n if node in ref_index:\n append_fn(ref_index[node])\n return\n elif node in new_ref_index:\n append_fn(new_ref_index[node])\n return\n index = new_ref_index[node] = ctx.next_index\n ctx.next_index += 1\n else:\n index = -1\n\n values, metadata = node_impl.flatten(node)\n\n append_fn(index)\n append_fn(metadata)\n\n for key, value in values:\n value_node_impl = get_node_impl(value)\n append_fn(key)\n if value_node_impl is not None:\n _graph_fingerprint(\n ctx,\n append_fn,\n value,\n value_node_impl,\n ref_index,\n new_ref_index,\n )\n elif isinstance(value, Variable):\n append_fn(id(value))\n append_fn(type(value))\n if value in ref_index:\n append_fn(ref_index[value])\n elif value in new_ref_index:\n append_fn(new_ref_index[value])\n else:\n variable_index = new_ref_index[value] = ctx.next_index\n ctx.next_index += 1\n append_fn(variable_index)\n for key_value in value._var_metadata.items():\n append_fn(key_value)\n elif not isinstance(value, (jax.Array, np.ndarray)):\n append_fn(value)\n\n\ndef check_fingerprint(\n node,\n fp: list[tp.Hashable],\n /,\n *,\n ref_index: RefMap | None = None,\n new_ref_index: RefMap | None = None,\n) -> bool:\n """""" """"""\n if ref_index is None:\n ref_index = RefMap()\n\n if new_ref_index is None:\n new_ref_index = RefMap()\n node_impl = get_node_impl(node)\n if node_impl is None:\n raise RuntimeError(f'Unsupported type: {type(node)}, this is a bug.')\n ctx = FingerprintContext(len(ref_index) + len(new_ref_index))\n fp_matches = _check_graph_fingerprint(\n ctx, iter(fp), node, node_impl, ref_index, new_ref_index\n )\n return fp_matches\n\n\ndef _check_graph_fingerprint(\n ctx: FingerprintContext,\n fp_iterator: tp.Iterator[tp.Hashable],\n node,\n node_impl: NodeImpl[Node, Leaf, AuxData],\n ref_index: RefMap,\n new_ref_index: RefMap,\n) -> bool:\n is_pytree_node_ = type(node_impl) is PytreeNodeImpl\n is_graph_node_ = type(node_impl) is GraphNodeImpl\n\n if type(node) != next(fp_iterator):\n return False\n\n if is_graph_node_:\n # append_fn(id(node))\n if id(node) != next(fp_iterator):\n return False\n if node in ref_index:\n # append_fn(ref_index[node])\n return ref_index[node] == next(fp_iterator)\n elif node in new_ref_index:\n # append_fn(new_ref_index[node])\n return new_ref_index[node] == next(fp_iterator)\n index = new_ref_index[node] = ctx.next_index\n ctx.next_index += 1\n else:\n index = -1\n\n values, metadata = node_impl.flatten(node)\n\n # append_fn(index)\n if index != next(fp_iterator):\n return False\n # append_fn(metadata)\n if metadata != next(fp_iterator):\n return False\n\n for key, value in values:\n value_node_impl = get_node_impl(value)\n # append_fn(key)\n if key != next(fp_iterator):\n return False\n if value_node_impl is not None:\n if not _check_graph_fingerprint(\n ctx,\n fp_iterator,\n value,\n value_node_impl,\n ref_index,\n new_ref_index,\n ):\n return False\n elif isinstance(value, Variable):\n # append_fn(id(value))\n if id(value) != next(fp_iterator):\n return False\n # append_fn(type(value))\n if type(value) != next(fp_iterator):\n return False\n if value in ref_index:\n # append_fn(ref_index[value])\n if ref_index[value] != next(fp_iterator):\n return False\n elif value in new_ref_index:\n # append_fn(new_ref_index[value])\n if new_ref_index[value] != next(fp_iterator):\n return False\n else:\n variable_index = new_ref_index[value] = ctx.next_index\n ctx.next_index += 1\n # append_fn(variable_index)\n if variable_index != next(fp_iterator):\n return False\n for key_value in value._var_metadata.items():\n # append_fn(key_value)\n if key_value != next(fp_iterator):\n return False\n else:\n if isinstance(value, (jax.Array, np.ndarray)):\n raise ValueError(f'Arrays leaves are not supported: {value}')\n # append_fn(value)\n if value != next(fp_iterator):\n return False\n\n return True\n\n\ndef _get_sorted_leaves(\n xs: tp.Mapping[tp.Any, tp.Any],\n) -> list[tp.Any]:\n if not isinstance(xs, tp.Mapping): # type: ignore\n raise TypeError(f'expected Mapping; got {type(xs).__qualname__}')\n leaves: list[tp.Any] = []\n\n def _flatten(xs):\n if not isinstance(xs, tp.Mapping):\n leaves.append(xs)\n else:\n for _, value in sorted(xs.items()):\n _flatten(value)\n\n _flatten(xs)\n return leaves\n\n\ndef unflatten( # type: ignore[invalid-annotation]\n graphdef: GraphDef[Node],\n state: State[Key, tp.Any] | FlatState[tp.Any] | list[tp.Any],\n /,\n *,\n index_ref: IndexMap | None = None,\n outer_index_outer_ref: IndexMap | None = None,\n) -> Node:\n """"""Unflattens a graphdef into a node with the given state.\n\n Args:\n graphdef: A GraphDef instance.\n state: A State instance.\n index_ref: A mapping from indexes to nodes references found during the graph\n traversal, defaults to None. If not provided, a new empty dictionary is\n created. This argument can be used to unflatten a sequence of (graphdef, state)\n pairs that share the same index space.\n index_ref_cache: A mapping from indexes to existing nodes that can be reused.\n When an reference is reused, ``GraphNodeImpl.clear`` is called to leave the\n object in an empty state and then filled by the unflatten process, as a result\n existing graph nodes are mutated to have the new content/topology\n specified by the graphdef.\n """"""\n if isinstance(state, (State, dict)):\n leaves = _get_sorted_leaves(state)\n elif isinstance(state, FlatState):\n leaves = state.leaves\n elif isinstance(state, list): # type: ignore\n leaves = state\n else:\n raise ValueError(f'Unsupported state type: {type(state)}')\n if index_ref is None:\n index_ref = IndexMap()\n\n if len(leaves) != graphdef.num_leaves:\n raise ValueError(\n f'Incorrect number of leaves, expected {graphdef.num_leaves} leaves, but got {len(leaves)}.'\n )\n\n if isinstance(nodedef := graphdef.nodes[0], NodeRef):\n node = index_ref[nodedef.index]\n else:\n node_iter = iter(graphdef.nodes)\n attribute_iter = iter(graphdef.attributes)\n leaves_iter = iter(leaves)\n nodedef = next(node_iter)\n assert not isinstance(nodedef, NodeRef)\n if isinstance(nodedef, MutableArrayDef):\n node_impl = None\n else:\n node_impl = get_node_impl_for_type(nodedef.type)\n node = _graph_unflatten(\n nodedef,\n node_impl,\n node_iter,\n attribute_iter,\n leaves_iter,\n index_ref,\n outer_index_outer_ref,\n )\n\n try:\n next(leaves_iter)\n except StopIteration:\n pass\n else:\n raise ValueError('Incorrect number of leaves in state.')\n\n return node\n\n\ndef _graph_unflatten(\n nodedef: NodeDefType[Node],\n node_impl: NodeImpl[Node, Leaf, AuxData] | None,\n node_iter: tp.Iterator[NodeDefType[Node]],\n attribute_iter: tp.Iterator[tuple[Key, AttrType]],\n leaves_iter: tp.Iterator[tp.Any],\n index_ref: IndexMap,\n outer_index_outer_ref: IndexMap | None,\n) -> Node:\n """"""Recursive helper for graph_unflatten.\n\n Args:\n nodedef: A GraphDef instance or an index to a node in the cache.\n state: A mapping from attribute names to variables or subgraphs.\n index_to_ref: A mapping from indexes to nodes that have been traversed.\n If a node is already in the cache, it won't be traversed again.\n index_ref_cache: A mapping from indexes to existing nodes that can be reused.\n When an reference is reused, ``GraphNodeImpl.clear`` is called to leave the\n object in an empty state and then filled by the unflatten process, as a result\n existing graph nodes are mutated to have the new content/topology\n specified by the nodedef.\n """"""\n\n def get_mutable_array(mutable_arraydef: MutableArrayDef, leaf):\n assert type(mutable_arraydef) is MutableArrayDef\n if (\n outer_index_outer_ref is not None\n and mutable_arraydef.outer_index is not None\n and mutable_arraydef.outer_index in outer_index_outer_ref\n ):\n # if mutable array exists, update it\n mutable_array = outer_index_outer_ref[mutable_arraydef.outer_index]\n if not variablelib.is_mutable_array(mutable_array):\n raise RuntimeError(\n f'Expected a MutableArray type but got {mutable_array}.'\n )\n if type(leaf) is not NoUpdate:\n raise RuntimeError(\n f'Expected a no update for MutableArray but got {leaf}.'\n )\n elif type(leaf) in (NoUpdate, Repeated):\n raise ValueError(\n 'Expected a MutableArrayOutput type but got ' f""'{leaf.value}.'""\n )\n elif type(leaf) is MutableArrayOutput:\n mutable_array = variablelib.mutable_array(leaf.value)\n elif variablelib.is_mutable_array(leaf):\n mutable_array = leaf\n elif isinstance(leaf, jax.Array):\n # here we allow merging frozen arrays and will not create a new mutable array\n mutable_array = leaf\n else:\n raise ValueError(f'Found unexpected type for MutableArray, got {leaf}')\n\n index_ref[mutable_arraydef.index] = mutable_array\n return mutable_array\n\n if type(nodedef) is NodeRef:\n return index_ref[nodedef.index]\n\n if type(nodedef) is VariableDef:\n variabledef = tp.cast(VariableDef[Variable], nodedef)\n # its a unseen variable, create a new one\n\n if variabledef.mutable_arraydef is not None:\n if type(variabledef.mutable_arraydef) is NodeRef:\n value = index_ref[variabledef.mutable_arraydef.index]\n else:\n value = next(leaves_iter)\n assert type(variabledef.mutable_arraydef) is MutableArrayDef\n if isinstance(value, Variable | VariableState):\n inner_value = value.raw_value\n mutable_array = get_mutable_array(\n variabledef.mutable_arraydef, inner_value\n )\n value.raw_value = mutable_array\n else:\n # if value is an array or mutable array, we need call get_mutable_array\n # to register it in the index_ref\n value = get_mutable_array(variabledef.mutable_arraydef, value)\n else:\n value = next(leaves_iter)\n\n # when idxmap is present, check if the Varable exists there\n # and update existing variables if it does\n if (\n outer_index_outer_ref is not None\n and variabledef.outer_index is not None\n and variabledef.outer_index in outer_index_outer_ref\n ):\n # if variable exists, update it\n variable = outer_index_outer_ref[variabledef.outer_index]\n if not isinstance(variable, Variable):\n raise ValueError(f'Expected a Variable type but got {type(variable)}.')\n elif isinstance(value, Variable):\n raise ValueError(\n f'Cannot unflatten flat_state containing Variables when using `outer_index_outer_ref`. '\n f'Got {value!r}'\n )\n elif isinstance(value, VariableState):\n variable.update_from_state(value)\n else:\n variable.raw_value = value\n else: # variabledef.index not in index_ref_cache\n # variable reference does not exist outside, create a new one\n if isinstance(value, Variable):\n variable = value\n elif isinstance(value, VariableState):\n variable = value.to_variable()\n else:\n variable = variabledef.type.from_metadata(\n value, dict(variabledef.metadata)\n )\n index_ref[variabledef.index] = variable\n return variable # type: ignore[return-value]\n\n if type(nodedef) is MutableArrayDef:\n leaf = next(leaves_iter)\n mutable_array = get_mutable_array(nodedef, leaf)\n return mutable_array # type: ignore[return-value]\n\n assert type(nodedef) is NodeDef\n if node_impl is None:\n raise RuntimeError(f'Unsupported type: {nodedef.type}, this is a bug.')\n if nodedef.index is not None and nodedef.index in index_ref:\n raise RuntimeError(f'GraphDef index {nodedef.index} already used.')\n\n def _get_children() -> list[tuple[Key, tp.Any]]:\n children: list[tuple[Key, LeafType | Node]] = [] # type: ignore[invalid-annotation]\n\n assert type(nodedef) is NodeDef\n for _ in range(nodedef.num_attributes):\n key, value = next(attribute_iter)\n if type(value) is Static:\n children.append((key, value.value)) # type: ignore[attribute-error]\n elif type(value) is MutableArrayAttr:\n mutable_arraydef = next(node_iter)\n assert (\n type(mutable_arraydef) is MutableArrayDef\n or type(mutable_arraydef) is NodeRef\n )\n if type(mutable_arraydef) is NodeRef:\n mutable_array = index_ref[mutable_arraydef.index]\n else:\n assert type(mutable_arraydef) is MutableArrayDef\n leaf = next(leaves_iter)\n mutable_array = get_mutable_array(mutable_arraydef, leaf)\n children.append((key, mutable_array))\n elif type(value) is ArrayAttr:\n array = next(leaves_iter)\n children.append((key, array))\n elif type(value) is NodeRef:\n children.append((key, index_ref[value.index])) # type: ignore[attribute-error]\n elif type(value) is NodeAttr:\n # if the key is a subgraph we create an empty node\n subgraphdef = next(node_iter)\n if type(subgraphdef) is NodeDef:\n value_node_impl = get_node_impl_for_type(subgraphdef.type) # type: ignore[attribute-error]\n else:\n value_node_impl = None\n subnode = _graph_unflatten(\n subgraphdef,\n value_node_impl,\n node_iter,\n attribute_iter,\n leaves_iter,\n index_ref,\n outer_index_outer_ref,\n )\n children.append((key, subnode))\n else:\n raise RuntimeError(f'Unknown static field: {key!r}')\n\n return children\n\n if isinstance(node_impl, GraphNodeImpl):\n # we create an empty node first and add it to the index\n # this avoids infinite recursion when there is a reference cycle\n assert type(nodedef) is NodeDef\n if (\n outer_index_outer_ref is not None\n and nodedef.outer_index is not None\n and nodedef.outer_index in outer_index_outer_ref\n ):\n node = outer_index_outer_ref[nodedef.outer_index]\n if type(node) != nodedef.type:\n raise ValueError(\n f'Expected a node of type {nodedef.type} for index '\n f'{nodedef.index}, but got a node of type {type(node)}.'\n )\n node_impl.clear(node)\n else:\n node = node_impl.create_empty(nodedef.metadata)\n assert nodedef.index is not None\n index_ref[nodedef.index] = node\n node_impl.init(node, _get_children())\n else:\n # if the node type does not support the creation of an empty object it means\n # that it cannot reference itself, so we can create its children first\n node = node_impl.unflatten(_get_children(), nodedef.metadata)\n\n return node\n\n\ndef graph_pop(\n node: tp.Any,\n filters: tuple[filterlib.Filter, ...],\n) -> tuple[GraphState, ...]:\n id_to_index: dict[int, Index] = {}\n path_parts: PathParts = ()\n predicates = tuple(filterlib.to_predicate(filter) for filter in filters)\n flat_states: tuple[dict[PathParts, LeafType], ...] = tuple(\n {} for _ in predicates\n )\n _graph_pop(node, id_to_index, path_parts, flat_states, predicates)\n return tuple(\n statelib.from_flat_state(flat_state) for flat_state in flat_states\n )\n\n\ndef _graph_pop(\n node: tp.Any,\n id_to_index: dict[int, Index],\n path_parts: PathParts,\n flat_states: tuple[dict[PathParts, LeafType], ...],\n predicates: tuple[filterlib.Predicate, ...],\n) -> None:\n if not is_node(node):\n raise RuntimeError(f'Unsupported type: {type(node)}, this is a bug.')\n\n if id(node) in id_to_index:\n return\n\n id_to_index[id(node)] = len(id_to_index)\n node_impl = get_node_impl(node)\n if node_impl is None:\n raise TypeError(f'Unknown node type: {type(node)}')\n node_dict = node_impl.node_dict(node)\n\n for name, value in node_dict.items():\n if is_node(value):\n _graph_pop(\n node=value,\n id_to_index=id_to_index,\n path_parts=(*path_parts, name),\n flat_states=flat_states,\n predicates=predicates,\n )\n continue\n elif not is_node_leaf(value):\n continue\n elif id(value) in id_to_index:\n continue\n\n node_path = (*path_parts, name)\n node_impl = get_node_impl(node)\n if node_impl is None:\n raise TypeError(f'Unknown node type: {type(node)}')\n\n for state, predicate in zip(flat_states, predicates):\n if predicate(node_path, value):\n if isinstance(node_impl, PytreeNodeImpl):\n raise ValueError(\n f'Cannot pop key {name!r} from node of type {type(node).__name__}'\n )\n id_to_index[id(value)] = len(id_to_index)\n node_impl.pop_key(node, name)\n if isinstance(value, Variable):\n value = value.to_state()\n state[node_path] = value # type: ignore[index] # mypy is wrong here?\n break\n else:\n # NOTE: should we raise an error here?\n pass\n\n\ndef _graph_update_dynamic(node: tp.Any, state: tp.Mapping[KeyT, tp.Any]):\n def _update_variable(node: Variable, value):\n if isinstance(value, VariableState):\n # updated from VariableState\n node.update_from_state(value)\n else:\n # updated from raw value\n if isinstance(value, State) and not value:\n # NOTE: this is a special case when trying to update a Variable from state\n # created when flattening into a NodeRef, which creates an empty State. This\n # can happen when using standalone Variables with `grad`\n pass\n else:\n node.raw_value = value\n\n if isinstance(node, Variable):\n _update_variable(node, state)\n return\n\n if not is_node(node):\n raise RuntimeError(f'Unsupported type: {type(node)}')\n\n node_impl = get_node_impl(node)\n if node_impl is None:\n raise TypeError(f'Unknown node type: {type(node)}')\n node_dict = node_impl.node_dict(node)\n for key, value in state.items():\n # case 1: new state is being added\n if key not in node_dict:\n if isinstance(node_impl, PytreeNodeImpl):\n raise ValueError(\n f'Cannot set key {key!r} on immutable node of '\n f'type {type(node).__name__}'\n )\n if isinstance(value, Variable):\n value = value.copy()\n node_impl.set_key(node, key, value)\n continue\n\n current_value = node_dict[key]\n\n # case 2: subgraph is being updated\n if is_node(current_value):\n if is_node_leaf(value):\n raise ValueError(f'Expected a subgraph for {key!r}, but got: {value!r}')\n _graph_update_dynamic(current_value, value)\n else:\n if isinstance(current_value, jax.Array | np.ndarray):\n if isinstance(node_impl, PytreeNodeImpl):\n raise ValueError(\n f'Cannot set key {key!r} on immutable node of '\n f'type {type(node).__name__}'\n )\n node_impl.set_key(node, key, value)\n continue\n elif not isinstance(current_value, Variable):\n # case 3: state leaf is being updated\n raise ValueError(\n f'Trying to update a non-Variable attribute {key!r} with a Variable: '\n f'{value!r}'\n )\n _update_variable(current_value, value)\n\n\n# --------------------------------------------------------\n# UpdateContext\n# --------------------------------------------------------\n\n\nclass StaticCache(tp.NamedTuple):\n graphdef: GraphDef[tp.Any]\n final_graphdef: GraphDef[tp.Any]\n paths: tuple[PathParts, ...]\n variables: list[Variable[tp.Any]]\n new_ref_index: RefMap\n new_index_ref: IndexMap\n\n @staticmethod\n def create(\n graphdef: GraphDef[tp.Any],\n paths: tuple[PathParts, ...],\n variables: list[Variable[tp.Any]],\n new_ref_index: RefMap,\n ):\n new_index_ref = IndexMap.from_refmap(new_ref_index)\n final_graphdef: GraphDef[tp.Any]\n final_graphdef = graphdef.with_same_outer_index()\n return StaticCache(\n graphdef=graphdef,\n final_graphdef=final_graphdef,\n paths=paths,\n variables=variables,\n new_ref_index=new_ref_index,\n new_index_ref=new_index_ref,\n )\n\n\n@dataclasses.dataclass\nclass GraphContext(threading.local):\n update_context_stacks: dict[tp.Hashable, list[UpdateContext]] = (\n dataclasses.field(default_factory=dict)\n )\n ref_index_stack: list[SplitContext] = dataclasses.field(default_factory=list)\n index_ref_stack: list[MergeContext] = dataclasses.field(default_factory=list)\n tmp_static_cache: tp.MutableMapping[tp.Any, StaticCache] | None = None\n caching: bool = False\n\n\nGRAPH_CONTEXT = GraphContext()\n\n\n@contextlib.contextmanager\ndef static_cache(static_cache: tp.MutableMapping[tp.Any, StaticCache]):\n if GRAPH_CONTEXT.caching:\n yield\n return\n\n GRAPH_CONTEXT.tmp_static_cache = static_cache\n\n try:\n yield\n finally:\n if GRAPH_CONTEXT.tmp_static_cache is not None:\n raise ValueError(\n 'GRAPH_CONTEXT.tmp_static_cache should be None, no context consumed it.'\n )\n\n\ndef _cached_partial(f: tp.Callable[..., tp.Any], *cached_args):\n """"""Create a partial from a NNX transformed function alog with some cached input arguments\n and reduces the python overhead by caching the traversal of NNX graph nodes. This is useful\n for speed up function that are called repeatedly with the same subset of inputs e.g. a\n ``train_step`` with a ``model`` and ``optimizer``::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n >>> import optax\n ...\n >>> model = nnx.Linear(2, 3, rngs=nnx.Rngs(0))\n >>> optimizer = nnx.Optimizer(model, optax.adamw(1e-3))\n ...\n >>> @nnx.jit\n ... def train_step(model, optimizer, x, y):\n ... def loss_fn(model):\n ... return jnp.mean((model(x) - y) ** 2)\n ...\n ... loss, grads = nnx.value_and_grad(loss_fn)(model)\n ... optimizer.update(grads)\n ... return loss\n ...\n >>> cached_train_step = nnx.cached_partial(train_step, model, optimizer)\n ...\n >>> for step in range(total_steps:=2):\n ... x, y = jnp.ones((10, 2)), jnp.ones((10, 3))\n ... # loss = train_step(model, optimizer, x, y)\n ... loss = cached_train_step(x, y)\n ... print(f'Step {step}: loss={loss:.3f}')\n Step 0: loss=2.669\n Step 1: loss=2.660\n\n Note that ``cached_partial`` will clone all cached graph nodes to gurantee the validity\n of the cache, and these clones will contain references to the same Variable objects\n which guarantees that state is propagated correctly back to the original graph nodes.\n Because of the previous, the final structure of all graph nodes must be the same\n after each call to the cached function, otherswise an error will be raised. Temporary\n mutations are allowed (e.g. the use of ``Module.sow``) as long as they are cleaned up before\n the function returns (e.g. via ``nnx.pop``).\n\n Args:\n f: A function to cache.\n *cached_args: A subset of the input arguments containing the graph nodes to cache.\n\n Returns:\n A partial function expecting the remaining arguments to the original function.\n """"""\n cache: tp.MutableMapping[tp.Any, StaticCache] = PythonRefMap() # type: ignore\n original_ref_index: RefMap = RefMap()\n index_ref: IndexMap = IndexMap()\n cached_ref_index: RefMap = RefMap()\n\n def create_static_cache(x):\n # TODO(cgarciae): support Array attribute updates for graph nodes\n if is_graph_node(x) or isinstance(x, Variable):\n graphdef, flat_state = flatten(\n x, with_paths=True, return_variables=True, ref_index=original_ref_index\n )\n paths = flat_state.paths\n variables = flat_state.leaves\n # clone but keep the same variable references\n node_cache = unflatten(graphdef, flat_state, index_ref=index_ref)\n cached_new_ref_index = RefMap()\n _fp = fingerprint(\n node_cache,\n ref_index=cached_ref_index,\n new_ref_index=cached_new_ref_index,\n )\n cached_ref_index.update(cached_new_ref_index)\n cache[node_cache] = StaticCache.create(\n graphdef, paths, variables, cached_new_ref_index\n )\n return node_cache\n return x\n\n cached_args = jax.tree.map(create_static_cache, cached_args)\n\n @functools.wraps(f)\n def cache_args_wrapper(*args, **kwargs):\n with static_cache(cache):\n return f(*cached_args, *args, **kwargs)\n\n return cache_args_wrapper\n\n\nif tp.TYPE_CHECKING:\n cached_partial = functools.partial\nelse:\n cached_partial = _cached_partial\n\n\n@dataclasses.dataclass\nclass SplitContext:\n ctxtag: tp.Hashable | None\n ref_index: RefMap\n is_inner: bool | None\n\n @tp.overload\n def split(self, graph_node: A, /) -> tuple[GraphDef[A], GraphState]: ... # type: ignore[invalid-annotation]\n\n @tp.overload\n def split( # type: ignore[invalid-annotation]\n self, graph_node: A, first: filterlib.Filter, /\n ) -> tuple[GraphDef[A], GraphState]: ...\n\n @tp.overload\n def split(\n self,\n graph_node: A,\n first: filterlib.Filter,\n second: filterlib.Filter,\n /,\n *filters: filterlib.Filter,\n ) -> tuple[GraphDef[A], GraphState, tpe.Unpack[tuple[GraphState, ...]]]: ... # type: ignore[not-supported-yet]\n\n def split(\n self, node: A, *filters: filterlib.Filter\n ) -> tuple[GraphDef[A], tpe.Unpack[tuple[GraphState, ...]]]: # type: ignore[not-supported-yet]\n ctx = (\n current_update_context(self.ctxtag) if self.ctxtag is not None else None\n )\n inner_ref_outer_index = (\n ctx.inner_ref_outer_index if ctx and ctx.inner_ref_outer_index else None\n )\n graphdef, flat_state = flatten(\n node, ref_index=self.ref_index, ref_outer_index=inner_ref_outer_index\n )\n flat_states = _split_state(flat_state, filters)\n states = _to_nested_state(graphdef, flat_states)\n\n return graphdef, *states\n\n @tp.overload\n def flatten( # type: ignore[invalid-annotation]\n self,\n graph_node: A,\n /,\n *,\n with_paths: tp.Literal[False],\n ) -> tuple[GraphDef[A], list[tp.Any]]: ...\n\n @tp.overload\n def flatten( # type: ignore[invalid-annotation]\n self,\n graph_node: A,\n /,\n ) -> tuple[GraphDef[A], FlatState[VariableState[tp.Any]]]: ...\n\n @tp.overload\n def flatten( # type: ignore[invalid-annotation]\n self,\n graph_node: A,\n first: filterlib.Filter,\n /,\n ) -> tuple[GraphDef[A], FlatState[VariableState[tp.Any]]]: ...\n\n @tp.overload\n def flatten( # type: ignore[invalid-annotation]\n self,\n graph_node: A,\n first: filterlib.Filter,\n second: filterlib.Filter,\n /,\n *filters: filterlib.Filter,\n ) -> tuple[\n GraphDef[A],\n FlatState[VariableState[tp.Any]],\n tpe.Unpack[tuple[FlatState[VariableState[tp.Any]], ...]],\n ]: ...\n\n def flatten( # type: ignore[invalid-annotation]\n self,\n node: A,\n *filters: filterlib.Filter,\n with_paths: bool = True,\n ) -> tuple[\n GraphDef[A],\n FlatState[VariableState[tp.Any]] | list[tp.Any],\n tpe.Unpack[tuple[FlatState[VariableState[tp.Any]], ...]],\n ]:\n if not with_paths and filters:\n raise ValueError('Cannot use filters with with_paths=False')\n\n ctx = (\n current_update_context(self.ctxtag) if self.ctxtag is not None else None\n )\n static_cache = (\n ctx.static_cache if ctx is not None and self.is_inner is False else None\n )\n ref_outer_index = (\n ctx.inner_ref_outer_index if ctx and ctx.inner_ref_outer_index else None\n )\n flat_state: (\n FlatState[VariableState[tp.Any]]\n | FlatState[Variable[tp.Any]]\n | list[tp.Any]\n )\n leaves: list[tp.Any]\n if node in self.ref_index:\n # node is already in the ref_index, call flatten which will return a NodeRef\n graphdef, flat_state = flatten(\n node,\n ref_index=self.ref_index,\n ref_outer_index=ref_outer_index,\n with_paths=with_paths,\n )\n if with_paths:\n assert isinstance(flat_state, FlatState)\n paths = flat_state.paths\n leaves = flat_state.leaves\n else:\n assert isinstance(flat_state, list)\n paths = None\n leaves = flat_state\n elif static_cache is not None and node in static_cache:\n node_static_cache = static_cache[node]\n graphdef = node_static_cache.graphdef\n # add the new references to the ref_index\n self.ref_index.update(node_static_cache.new_ref_index)\n\n if with_paths:\n paths = node_static_cache.paths\n leaves = [\n variable.to_state() for variable in node_static_cache.variables\n ]\n else:\n paths = None\n leaves = [\n variable.raw_value for variable in node_static_cache.variables\n ]\n else:\n graphdef, flat_state = flatten(\n node,\n ref_index=self.ref_index,\n ref_outer_index=ref_outer_index,\n with_paths=with_paths,\n )\n if with_paths:\n assert isinstance(flat_state, FlatState)\n paths = flat_state.paths\n leaves = flat_state.leaves\n else:\n assert isinstance(flat_state, list)\n paths = None\n leaves = flat_state\n\n if with_paths:\n assert paths is not None\n flat_state = FlatState.from_sorted_keys_values(paths, leaves)\n flat_states = _split_state(flat_state, filters)\n return graphdef, *flat_states # type: ignore[bad-return-type]\n else:\n return graphdef, leaves\n\n\n@contextlib.contextmanager\ndef split_context(ctxtag: tp.Hashable | None = None):\n ctx = current_update_context(ctxtag) if ctxtag is not None else None\n is_inner = ctx.outer_ref_outer_index is not None if ctx is not None else None\n GRAPH_CONTEXT.ref_index_stack.append(SplitContext(ctxtag, RefMap(), is_inner))\n\n try:\n yield GRAPH_CONTEXT.ref_index_stack[-1]\n finally:\n flatten_ctx = GRAPH_CONTEXT.ref_index_stack.pop()\n if ctxtag is not None:\n ctx = current_update_context(ctxtag)\n ctx.flatten_end(flatten_ctx.ref_index)\n del flatten_ctx.ref_index\n del flatten_ctx.ctxtag\n\n\n@dataclasses.dataclass\nclass MergeContext:\n ctxtag: tp.Hashable | None\n index_ref: IndexMap\n is_inner: bool | None\n\n def merge( # type: ignore[invalid-annotation]\n self,\n graphdef: GraphDef[A],\n state: GraphState | VariableState,\n /,\n *states: GraphState | VariableState,\n ) -> A:\n ctx = (\n current_update_context(self.ctxtag) if self.ctxtag is not None else None\n )\n outer_index_outer_ref = (\n ctx.outer_index_outer_ref if ctx and ctx.outer_index_outer_ref else None\n )\n\n _state = _merge_to_flat_state((state, *states))\n node = unflatten(\n graphdef,\n _state,\n index_ref=self.index_ref,\n outer_index_outer_ref=outer_index_outer_ref,\n )\n return node\n\n def unflatten( # type: ignore[invalid-annotation]\n self,\n graphdef: GraphDef[A],\n flat_state: GraphFlatState | list[tp.Any],\n /,\n *flat_states: GraphFlatState,\n ) -> A:\n ctx = (\n current_update_context(self.ctxtag) if self.ctxtag is not None else None\n )\n static_cache = (\n ctx.static_cache if ctx is not None and self.is_inner is False else None\n )\n state: FlatState[tp.Any] | list[tp.Any]\n if type(flat_state) is list:\n if flat_states:\n raise ValueError(\n 'Cannot use multiple flat_states when flat_state is a list, '\n f'got flat_state: {flat_state!r}, flat_states: {flat_states!r}'\n )\n state = flat_state\n else:\n state = FlatState.merge(flat_state, *flat_states)\n\n if type(graphdef.nodes[0]) is NodeRef:\n node = unflatten(\n graphdef,\n state,\n index_ref=self.index_ref,\n )\n\n elif static_cache is not None:\n assert isinstance(graphdef.nodes[0], NodeDef)\n assert ctx is not None\n if (outer_index := graphdef.nodes[0].outer_index) is not None:\n outer_index_outer_ref = ctx.outer_index_outer_ref\n assert outer_index_outer_ref is not None\n node = outer_index_outer_ref[outer_index]\n\n if node in static_cache:\n static_cache_node = static_cache[node]\n if static_cache_node.final_graphdef != graphdef:\n raise ValueError(\n 'The graph structure of a node added to cached_partial was mutated inside the transformation, '\n f'this is not allowed.\nNode: {node}\nOuput graphdef: {graphdef}\nExpected graphdef: {static_cache_node.final_graphdef}'\n )\n if type(state) is list:\n leaves = state\n elif type(state) is FlatState:\n leaves = state.leaves\n else:\n raise ValueError(f'Unsupported state type: {type(state)}')\n\n if len(leaves) != len(static_cache_node.variables):\n raise ValueError(\n f'Incorrect number of leaves: expected {len(static_cache_node.variables)} '\n f'leaves in the state, got {len(leaves)}'\n )\n for variable, leaf in zip(static_cache_node.variables, leaves):\n if type(leaf) is VariableState:\n variable.update_from_state(leaf)\n else:\n variable.raw_value = leaf\n self.index_ref.update(static_cache_node.new_index_ref)\n else:\n # uncached node, create it\n node = unflatten(\n graphdef,\n state,\n index_ref=self.index_ref,\n outer_index_outer_ref=outer_index_outer_ref,\n )\n else: # graphdef.outer_index is None\n # its a new node, create it\n node = unflatten(\n graphdef,\n state,\n index_ref=self.index_ref,\n )\n else:\n outer_index_outer_ref = (\n ctx.outer_index_outer_ref if ctx and ctx.outer_index_outer_ref else None\n )\n node = unflatten(\n graphdef,\n state,\n index_ref=self.index_ref,\n outer_index_outer_ref=outer_index_outer_ref,\n )\n return node\n\n\n@tp.overload\n@contextlib.contextmanager\ndef merge_context() -> tp.Generator[MergeContext, None, None]: ... # type: ignore[bad-return-type]\n@tp.overload\n@contextlib.contextmanager\ndef merge_context(\n ctxtag: tp.Hashable | None, inner: bool | None\n) -> tp.Generator[MergeContext, None, None]: ... # type: ignore[bad-return-type]\n@contextlib.contextmanager\ndef merge_context(ctxtag: tp.Hashable | None = None, inner: bool | None = None):\n GRAPH_CONTEXT.index_ref_stack.append(MergeContext(ctxtag, IndexMap(), inner))\n\n try:\n yield GRAPH_CONTEXT.index_ref_stack[-1]\n finally:\n unflatten_ctx = GRAPH_CONTEXT.index_ref_stack.pop()\n index_ref = unflatten_ctx.index_ref\n if ctxtag is not None:\n if inner is None:\n raise ValueError('inner_merge must be specified when using ctxtag')\n ctx = current_update_context(ctxtag)\n ctx.unflatten_end(index_ref, inner)\n del unflatten_ctx.index_ref\n del unflatten_ctx.ctxtag\n\n\n@jax.tree_util.register_static\n@dataclasses.dataclass\nclass UpdateContext:\n """"""A context manager for handling complex state updates.""""""\n\n tag: tp.Hashable\n outer_ref_outer_index: RefMap | None\n outer_index_inner_ref: IndexMap | None\n # reverse caches\n outer_index_outer_ref: IndexMap | None\n inner_ref_outer_index: RefMap | None\n static_cache: tp.MutableMapping[tp.Any, StaticCache] | None\n\n # define hash and eq to make this an opaque object\n def __hash__(self):\n return 0\n\n def __eq__(self, other):\n return isinstance(other, UpdateContext)\n\n def flatten_end(self, ref_index: RefMap):\n if self.outer_ref_outer_index is None:\n # outer split (1), store the references\n self.outer_ref_outer_index = ref_index\n self.outer_index_outer_ref = IndexMap.from_refmap(\n self.outer_ref_outer_index\n )\n else:\n # inner split (3), clear index_ref\n self.outer_index_inner_ref = None\n self.inner_ref_outer_index = None\n\n def unflatten_end(self, index_ref: IndexMap, inner_merge: bool):\n if inner_merge:\n # inner merge (2)\n self.outer_index_inner_ref = index_ref\n self.inner_ref_outer_index = RefMap.from_indexmap(index_ref)\n\n\n@dataclasses.dataclass\nclass UpdateContextManager:\n tag: tp.Hashable\n\n def __enter__(self):\n if GRAPH_CONTEXT.tmp_static_cache is not None:\n # take current static cache\n static_cache = GRAPH_CONTEXT.tmp_static_cache\n GRAPH_CONTEXT.tmp_static_cache = None\n else:\n static_cache = None\n ctx = UpdateContext(\n tag=self.tag,\n outer_ref_outer_index=None,\n outer_index_inner_ref=None,\n outer_index_outer_ref=None,\n inner_ref_outer_index=None,\n static_cache=static_cache,\n )\n if self.tag not in GRAPH_CONTEXT.update_context_stacks:\n GRAPH_CONTEXT.update_context_stacks[self.tag] = [ctx]\n else:\n GRAPH_CONTEXT.update_context_stacks[self.tag].append(ctx)\n return ctx\n\n def __exit__(self, *args):\n if self.tag not in GRAPH_CONTEXT.update_context_stacks:\n raise RuntimeError(\n f'No update context found for tag {self.tag!r}, this is a bug.'\n )\n stack = GRAPH_CONTEXT.update_context_stacks[self.tag]\n\n ctx = stack.pop()\n # clear references\n del ctx.outer_ref_outer_index\n del ctx.outer_index_inner_ref\n del ctx.outer_index_outer_ref\n del ctx.inner_ref_outer_index\n\n if not stack:\n del GRAPH_CONTEXT.update_context_stacks[self.tag]\n\n def __call__(self, f: F) -> F:\n @functools.wraps(f)\n def update_context_manager_wrapper(*args, **kwargs):\n with self:\n return f(*args, **kwargs)\n\n return update_context_manager_wrapper # type: ignore\n\n\ndef update_context(tag: tp.Hashable):\n """"""Creates an :class:`UpdateContext` context manager which can be used to handle\n more complex state updates beyond what ``nnx.update`` can handle, including\n updates to static properties and graph structure.\n\n UpdateContext exposes a ``split`` and ``merge`` API with the same\n signature as ``nnx.split`` / ``nnx.merge`` but performs some bookkeeping\n to have the necessary information in order to perfectly update the input\n objects based on the changes made inside the transform. The UpdateContext\n must call split and merge a total of 4 times, the first\n and last calls happen outside the transform and the second and third calls\n happen inside the transform as shown in the diagram below::\n\n\n idxmap\n (2) merge ─────────────────────────────► split (3)\n ▲ │\n │ inside │\n │. . . . . . . . . . . . . . . . . . │ index_mapping\n │ outside │\n │ ▼\n (1) split──────────────────────────────► merge (4)\n refmap\n\n\n The first call to split ``(1)`` creates a ``refmap`` which keeps track of the\n outer references, and the first call to merge ``(2)`` creates an ``idxmap`` which\n keeps track of the inner references. The second call to split ``(3)`` combines\n the refmap and idxmap to produce the ``index_mapping`` which indicates\n how the outer references map to the inner references. Finally, the last call to\n merge ``(4)`` uses the index_mapping and the refmap to reconstruct the\n output of the transform while reusing/updating the inner references. To avoid\n memory leaks, the idxmap is cleared after ``(3)`` and the refmap is\n cleared after ``(4)``, and both are cleared after the context manager exits.\n\n Here is a simple example showing the use of ``update_context``::\n\n >>> from flax import nnx\n ...\n >>> m1 = nnx.Dict({})\n >>> with nnx.update_context('example'):\n ... with nnx.split_context('example') as ctx:\n ... graphdef, state = ctx.split(m1)\n ... @jax.jit\n ... def f(graphdef, state):\n ... with nnx.merge_context('example', inner=True) as ctx:\n ... m2 = ctx.merge(graphdef, state)\n ... m2.a = 1\n ... m2.ref = m2 # create a reference cycle\n ... with nnx.split_context('example') as ctx:\n ... return ctx.split(m2)\n ... graphdef_out, state_out = f(graphdef, state)\n ... with nnx.merge_context('example', inner=False) as ctx:\n ... m3 = ctx.merge(graphdef_out, state_out)\n ...\n >>> assert m1 is m3\n >>> assert m1.a == 1\n >>> assert m1.ref is m1\n\n Note that ``update_context`` takes in a ``tag`` argument which is used\n primarily as a safety mechanism reduce the risk of accidentally using the\n wrong UpdateContext when using :func:`current_update_context` to access the\n current active context. ``update_context`` can also be used as a\n decorator that creates/activates an UpdateContext context for the\n duration of the function::\n\n >>> from flax import nnx\n ...\n >>> m1 = nnx.Dict({})\n >>> @jax.jit\n ... def f(graphdef, state):\n ... with nnx.merge_context('example', inner=True) as ctx:\n ... m2 = ctx.merge(graphdef, state)\n ... m2.a = 1 # insert static attribute\n ... m2.ref = m2 # create a reference cycle\n ... with nnx.split_context('example') as ctx:\n ... return ctx.split(m2)\n ...\n >>> @nnx.update_context('example')\n ... def g(m1):\n ... with nnx.split_context('example') as ctx:\n ... graphdef, state = ctx.split(m1)\n ... graphdef_out, state_out = f(graphdef, state)\n ... with nnx.merge_context('example', inner=False) as ctx:\n ... return ctx.merge(graphdef_out, state_out)\n ...\n >>> m3 = g(m1)\n >>> assert m1 is m3\n >>> assert m1.a == 1\n >>> assert m1.ref is m1\n\n The context can be accessed using :func:`current_update_context`.\n\n Args:\n tag: A string tag to identify the context.\n """"""\n return UpdateContextManager(tag=tag)\n\n\ndef current_update_context(tag: tp.Hashable) -> UpdateContext:\n """"""Returns the current active :class:`UpdateContext` for the given tag.""""""\n if tag not in GRAPH_CONTEXT.update_context_stacks:\n raise ValueError(f'No update context found for tag {tag!r}.')\n return GRAPH_CONTEXT.update_context_stacks[tag][-1]\n\n\n# --------------------------------------------------------\n# Functional API\n# --------------------------------------------------------\n\n\ndef _split_state(\n state: FlatState[tp.Any],\n filters: tuple[filterlib.Filter, ...],\n) -> tuple[FlatState[tp.Any], tpe.Unpack[tuple[FlatState[tp.Any], ...]]]:\n if not filters:\n return (state,) # type: ignore[bad-return-type]\n states = state.split(*filters)\n if not isinstance(states, tuple):\n return (states,) # type: ignore[bad-return-type]\n assert len(states) > 0\n return states # type: ignore[return-value]\n\n\n@tp.overload\ndef split( # type: ignore[invalid-annotation]\n graph_node: A, /\n) -> tuple[GraphDef[A], GraphState | VariableState]: ...\n@tp.overload\ndef split( # type: ignore[invalid-annotation]\n graph_node: A, first: filterlib.Filter, /\n) -> tuple[GraphDef[A], GraphState | VariableState]: ...\n@tp.overload\ndef split( # type: ignore[invalid-annotation]\n graph_node: A,\n first: filterlib.Filter,\n second: filterlib.Filter,\n /,\n *filters: filterlib.Filter,\n) -> tuple[\n GraphDef[A],\n GraphState | VariableState,\n tpe.Unpack[tuple[GraphState | VariableState, ...]],\n]: ...\ndef split( # type: ignore[invalid-annotation]\n node: A, *filters: filterlib.Filter\n) -> tuple[\n GraphDef[A],\n GraphState | VariableState,\n tpe.Unpack[tuple[GraphState | VariableState, ...]],\n]:\n """"""Split a graph node into a :class:`GraphDef` and one or more :class:`State`s. State is\n a ``Mapping`` from strings or integers to ``Variables``, Arrays or nested States. GraphDef\n contains all the static information needed to reconstruct a ``Module`` graph, it is analogous\n to JAX’s ``PyTreeDef``. :func:`split` is used in conjunction with :func:`merge` to switch\n seamlessly between stateful and stateless representations of the graph.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax, jax.numpy as jnp\n ...\n >>> class Foo(nnx.Module):\n ... def __init__(self, rngs):\n ... self.batch_norm = nnx.BatchNorm(2, rngs=rngs)\n ... self.linear = nnx.Linear(2, 3, rngs=rngs)\n ...\n >>> node = Foo(nnx.Rngs(0))\n >>> graphdef, params, batch_stats = nnx.split(node, nnx.Param, nnx.BatchStat)\n ...\n >>> jax.tree.map(jnp.shape, params)\n State({\n 'batch_norm': {\n 'bias': VariableState(\n type=Param,\n value=(2,)\n ),\n 'scale': VariableState(\n type=Param,\n value=(2,)\n )\n },\n 'linear': {\n 'bias': VariableState(\n type=Param,\n value=(3,)\n ),\n 'kernel': VariableState(\n type=Param,\n value=(2, 3)\n )\n }\n })\n >>> jax.tree.map(jnp.shape, batch_stats)\n State({\n 'batch_norm': {\n 'mean': VariableState(\n type=BatchStat,\n value=(2,)\n ),\n 'var': VariableState(\n type=BatchStat,\n value=(2,)\n )\n }\n })\n\n :func:`split` and :func:`merge` are primarily used to interact directly with JAX\n transformations, see\n `Functional API `__\n for more information.\n\n Arguments:\n node: graph node to split.\n *filters: some optional filters to group the state into mutually exclusive substates.\n Returns:\n ``GraphDef`` and one or more ``States`` equal to the number of filters passed. If no\n filters are passed, a single ``State`` is returned.\n """"""\n graphdef, flat_state = flatten(node)\n flat_states = _split_state(flat_state, filters)\n states = _to_nested_state(graphdef, flat_states)\n return graphdef, *states # type: ignore[return-value]\n\n\ndef _to_nested_state(\n graphdef: GraphDef[A], flat_states: tp.Iterable[tp.Any]\n) -> tuple[tp.Any, ...]:\n if type(graphdef.nodes[0]) in (VariableDef, MutableArrayDef):\n states = tuple(\n flat_state[0][1] if flat_state else EmptyState()\n for flat_state in flat_states\n )\n else:\n states = tuple(\n statelib.from_flat_state(flat_state) for flat_state in flat_states\n )\n return states\n\n\ndef _merge_to_flat_state(states: tp.Iterable[tp.Any]):\n flat_state: list[tuple[PathParts, tp.Any]] = []\n\n for state in states:\n if isinstance(state, dict | State):\n flat_state.extend(traversals.flatten_to_sequence(state))\n elif isinstance(state, FlatState):\n flat_state.extend(state)\n else:\n flat_state.append(((), state))\n\n flat_state.sort()\n return [value for _, value in flat_state]\n\n\ndef merge( # type: ignore[invalid-annotation]\n graphdef: GraphDef[A],\n state: tp.Any,\n /,\n *states: tp.Any,\n) -> A:\n """"""The inverse of :func:`flax.nnx.split`.\n\n ``nnx.merge`` takes a :class:`flax.nnx.GraphDef` and one or more :class:`flax.nnx.State`'s\n and creates a new node with the same structure as the original node.\n\n Recall: :func:`flax.nnx.split` is used to represent a :class:`flax.nnx.Module`\n by: 1) a static ``nnx.GraphDef`` that captures its Pythonic static information;\n and 2) one or more :class:`flax.nnx.Variable` ``nnx.State``'(s) that capture\n its ``jax.Array``'s in the form of JAX pytrees.\n\n ``nnx.merge`` is used in conjunction with ``nnx.split`` to switch seamlessly\n between stateful and stateless representations of the graph.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax, jax.numpy as jnp\n ...\n >>> class Foo(nnx.Module):\n ... def __init__(self, rngs):\n ... self.batch_norm = nnx.BatchNorm(2, rngs=rngs)\n ... self.linear = nnx.Linear(2, 3, rngs=rngs)\n ...\n >>> node = Foo(nnx.Rngs(0))\n >>> graphdef, params, batch_stats = nnx.split(node, nnx.Param, nnx.BatchStat)\n ...\n >>> new_node = nnx.merge(graphdef, params, batch_stats)\n >>> assert isinstance(new_node, Foo)\n >>> assert isinstance(new_node.batch_norm, nnx.BatchNorm)\n >>> assert isinstance(new_node.linear, nnx.Linear)\n\n ``nnx.split`` and ``nnx.merge`` are primarily used to interact directly with JAX\n transformations (refer to\n `Functional API `__\n for more information.\n\n Args:\n graphdef: A :class:`flax.nnx.GraphDef` object.\n state: A :class:`flax.nnx.State` object.\n *states: Additional :class:`flax.nnx.State` objects.\n Returns:\n The merged :class:`flax.nnx.Module`.\n """"""\n if isinstance(state, list):\n if len(states) != 0:\n raise ValueError(\n f'Only one state can be passed as a list.'\n )\n _state = state\n else:\n _state = _merge_to_flat_state((state, *states))\n node = unflatten(graphdef, _state)\n return node\n\n\ndef update(node, state: tp.Any, /, *states: tp.Any) -> None:\n """"""Update the given graph node with a new state(s) in-place.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax, jax.numpy as jnp\n\n >>> x = jnp.ones((1, 2))\n >>> y = jnp.ones((1, 3))\n >>> model = nnx.Linear(2, 3, rngs=nnx.Rngs(0))\n\n >>> def loss_fn(model, x, y):\n ... return jnp.mean((y - model(x))**2)\n >>> prev_loss = loss_fn(model, x, y)\n\n >>> grads = nnx.grad(loss_fn)(model, x, y)\n >>> new_state = jax.tree.map(lambda p, g: p - 0.1*g, nnx.state(model), grads)\n >>> nnx.update(model, new_state)\n >>> assert loss_fn(model, x, y) < prev_loss\n\n Args:\n node: A graph node to update.\n state: A :class:`State` object.\n *states: Additional :class:`State` objects.\n """"""\n if states:\n if isinstance(node, Variable):\n non_empty_states = [\n _state\n for _state in (state, *states)\n if not isinstance(_state, tp.Mapping) or _state\n ]\n if len(non_empty_states) != 1:\n all_states = (state, *states)\n raise ValueError(\n f'Expected exactly one non-empty state, got: {all_states!r}'\n )\n state = non_empty_states[0]\n else:\n state = statelib.merge_state(state, *states)\n _graph_update_dynamic(node, state)\n\n\ndef _variables_generator(node) -> tp.Iterable[tuple[PathParts, Variable]]:\n for path, value in iter_graph(node):\n if isinstance(value, Variable):\n yield path, value\n\n\n@tp.overload\ndef variables(node, /) -> State[Key, Variable]: ...\n@tp.overload\ndef variables(node, first: filterlib.Filter, /) -> State[Key, Variable]: ...\n@tp.overload\ndef variables(\n node,\n first: filterlib.Filter,\n second: filterlib.Filter,\n /,\n *filters: filterlib.Filter,\n) -> tuple[State[Key, Variable], ...]: ...\ndef variables(\n node,\n *filters: filterlib.Filter,\n) -> tp.Union[State[Key, Variable], tuple[State[Key, Variable], ...]]:\n """"""Similar to :func:`state` but returns the current :class:`Variable` objects instead\n of new :class:`VariableState` instances.\n\n Example::\n\n >>> from flax import nnx\n ...\n >>> model = nnx.Linear(2, 3, rngs=nnx.Rngs(0))\n >>> params = nnx.variables(model, nnx.Param)\n ...\n >>> assert params['kernel'] is model.kernel\n >>> assert params['bias'] is model.bias\n\n Args:\n node: A graph node object.\n *filters: One or more :class:`Variable` objects to filter by.\n Returns:\n One or more :class:`State` mappings containing the :class:`Variable` objects.\n """"""\n num_filters = len(filters)\n if num_filters == 0:\n filters = (..., ...)\n else:\n filters = (*filters, ...)\n\n variables_iterable = _variables_generator(node)\n flat_states = variablelib.split_flat_state(\n variables_iterable, (*filters, ...)\n )\n states = tuple(\n statelib.from_flat_state(flat_state) for flat_state in flat_states\n )\n if num_filters < 2:\n return states[0]\n return states\n\n\n@tp.overload\ndef state(node, /) -> GraphState: ...\n@tp.overload\ndef state(node, first: filterlib.Filter, /) -> GraphState: ...\n@tp.overload\ndef state(\n node,\n first: filterlib.Filter,\n second: filterlib.Filter,\n /,\n *filters: filterlib.Filter,\n) -> tuple[GraphState, ...]: ...\ndef state(\n node,\n *filters: filterlib.Filter,\n) -> tp.Union[GraphState, tuple[GraphState, ...]]:\n """"""Similar to :func:`split` but only returns the :class:`State`'s indicated by the filters.\n\n Example usage::\n\n >>> from flax import nnx\n\n >>> class Model(nnx.Module):\n ... def __init__(self, rngs):\n ... self.batch_norm = nnx.BatchNorm(2, rngs=rngs)\n ... self.linear = nnx.Linear(2, 3, rngs=rngs)\n ... def __call__(self, x):\n ... return self.linear(self.batch_norm(x))\n\n >>> model = Model(rngs=nnx.Rngs(0))\n >>> # get the learnable parameters from the batch norm and linear layer\n >>> params = nnx.state(model, nnx.Param)\n >>> # get the batch statistics from the batch norm layer\n >>> batch_stats = nnx.state(model, nnx.BatchStat)\n >>> # get them separately\n >>> params, batch_stats = nnx.state(model, nnx.Param, nnx.BatchStat)\n >>> # get them together\n >>> state = nnx.state(model)\n\n Args:\n node: A graph node object.\n *filters: One or more :class:`Variable` objects to filter by.\n Returns:\n One or more :class:`State` mappings.\n """"""\n _, flat_state = flatten(node)\n state = flat_state.to_nested_state()\n\n states: GraphState | tuple[GraphState, ...]\n if len(filters) == 0:\n states = state # type: ignore[assignment]\n elif len(filters) == 1:\n states = statelib.filter_state(state, filters[0])\n else:\n states = statelib.filter_state(state, filters[0], filters[1], *filters[2:])\n\n return states\n\n\ndef graphdef(node: tp.Any, /) -> GraphDef[tp.Any]:\n """"""Get the :class:`GraphDef` of the given graph node.\n\n Example usage::\n\n >>> from flax import nnx\n\n >>> model = nnx.Linear(2, 3, rngs=nnx.Rngs(0))\n >>> graphdef, _ = nnx.split(model)\n >>> assert graphdef == nnx.graphdef(model)\n\n Args:\n node: A graph node object.\n Returns:\n The :class:`GraphDef` of the :class:`Module` object.\n """"""\n graphdef, _ = flatten(node)\n return graphdef\n\n\n@tp.overload\ndef pop(\n node,\n filter: filterlib.Filter,\n /,\n) -> GraphState: ...\n\n\n@tp.overload\ndef pop(\n node,\n filter: filterlib.Filter,\n filter2: filterlib.Filter,\n /,\n *filters: filterlib.Filter,\n) -> tuple[GraphState, ...]: ...\n\n\ndef pop(\n node, *filters: filterlib.Filter\n) -> tp.Union[GraphState, tuple[GraphState, ...]]:\n """"""Pop one or more :class:`Variable` types from the graph node.\n\n Example usage::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n\n >>> class Model(nnx.Module):\n ... def __init__(self, rngs):\n ... self.linear1 = nnx.Linear(2, 3, rngs=rngs)\n ... self.linear2 = nnx.Linear(3, 4, rngs=rngs)\n ... def __call__(self, x):\n ... x = self.linear1(x)\n ... self.sow(nnx.Intermediate, 'i', x)\n ... x = self.linear2(x)\n ... return x\n\n >>> x = jnp.ones((1, 2))\n >>> model = Model(rngs=nnx.Rngs(0))\n >>> assert not hasattr(model, 'i')\n >>> y = model(x)\n >>> assert hasattr(model, 'i')\n\n >>> intermediates = nnx.pop(model, nnx.Intermediate)\n >>> assert intermediates['i'].value[0].shape == (1, 3)\n >>> assert not hasattr(model, 'i')\n\n Args:\n node: A graph node object.\n *filters: One or more :class:`Variable` objects to filter by.\n Returns:\n The popped :class:`State` containing the :class:`Variable`\n objects that were filtered for.\n """"""\n if len(filters) == 0:\n raise ValueError('Expected at least one filter')\n\n id_to_index: dict[int, Index] = {}\n path_parts: PathParts = ()\n predicates = tuple(filterlib.to_predicate(filter) for filter in filters)\n flat_states: tuple[dict[PathParts, LeafType], ...] = tuple(\n {} for _ in predicates\n )\n _graph_pop(\n node=node,\n id_to_index=id_to_index,\n path_parts=path_parts,\n flat_states=flat_states,\n predicates=predicates,\n )\n states = tuple(\n statelib.from_flat_state(flat_state) for flat_state in flat_states\n )\n\n if len(states) == 1:\n return states[0]\n else:\n return states\n\n\ndef clone(node: Node) -> Node:\n """"""Create a deep copy of the given graph node.\n\n Example usage::\n\n >>> from flax import nnx\n\n >>> model = nnx.Linear(2, 3, rngs=nnx.Rngs(0))\n >>> cloned_model = nnx.clone(model)\n >>> model.bias.value += 1\n >>> assert (model.bias.value != cloned_model.bias.value).all()\n\n Args:\n node: A graph node object.\n Returns:\n A deep copy of the :class:`Module` object.\n """"""\n graphdef, state = split(node)\n return merge(graphdef, state)\n\n\ndef _mutable_like(path, x):\n return (\n isinstance(x, Variable) and x.mutable\n ) or variablelib.is_mutable_array(x)\n\n\ndef freeze(tree: A, /, only: filterlib.Filter = _mutable_like) -> A:\n """"""Converts a pytree of mutable arrays to regular arrays.\n\n Example::\n >>> from flax import nnx\n >>> import jax\n >>> import jax.numpy as jnp\n ...\n >>> tree = [nnx.mutable_array(jnp.array(1.0)), jnp.array(2.0)]\n >>> assert nnx.is_mutable_array(tree[0])\n ...\n >>> frozen_tree = nnx.freeze(tree)\n >>> assert isinstance(frozen_tree[0], jax.Array)\n\n If the tree contains duplicate mutable arrays, a ValueError is raised::\n\n >>> shared_array = nnx.mutable_array(jnp.array(1.0))\n >>> tree = [shared_array, shared_array]\n >>> try:\n ... nnx.freeze(tree)\n ... except ValueError as e:\n ... print(e)\n Found duplicate MutableArray found at path [1] and [0] ...\n\n ``only`` is a `Filter `__\n that can be used to specify which mutable arrays to freeze::\n\n >>> tree = [nnx.mutable_array(jnp.array(1.0)), nnx.mutable_array(jnp.array(2.0))]\n >>> frozen_tree = nnx.freeze(tree, only=lambda path, x: path[0] == 0)\n ...\n >>> assert isinstance(frozen_tree[0], jax.Array)\n >>> assert isinstance(frozen_tree[1], nnx.MutableArray)\n\n Args:\n tree: A pytree potentially containing mutable arrays.\n only: A Filter to specify which mutable arrays to freeze.\n Returns:\n A pytree with the frozen arrays.\n """"""\n freeze_filter = filterlib.to_predicate(only)\n mutable_arrays: dict[int, str] = {}\n\n def check_mutable_array(path, x):\n m_array_id = id(x)\n if m_array_id in mutable_arrays:\n current_path_str = jax.tree_util.keystr(path)\n previous_path_str = mutable_arrays[m_array_id]\n raise ValueError(\n f'Found duplicate MutableArray found at path {current_path_str} '\n f'and {previous_path_str} at object {x}.'\n )\n mutable_arrays[m_array_id] = jax.tree_util.keystr(path)\n\n def _freeze_fn(jax_path, x):\n path = tuple(_key_path_to_key(part) for part in jax_path)\n if freeze_filter(path, x):\n if isinstance(x, Variable):\n check_mutable_array(jax_path, x.raw_value)\n return x.from_metadata(x[...], x.get_metadata().copy())\n elif variablelib.is_mutable_array(x):\n check_mutable_array(jax_path, x)\n return x[...]\n return x\n\n tree = jax.tree.map_with_path(\n _freeze_fn, tree, is_leaf=lambda x: isinstance(x, Variable)\n )\n return tree\n\n\ndef _array_like(path, x):\n return (\n isinstance(x, Variable) and isinstance(x.raw_value, jax.Array)\n ) or isinstance(x, jax.Array)\n\n\ndef mutable(tree: A, /, only: filterlib.Filter = _array_like) -> A:\n """"""Converts a pytree of arrays to mutable arrays.\n\n Example::\n\n >>> from flax import nnx\n >>> import jax\n >>> import jax.numpy as jnp\n ...\n >>> tree = [jnp.array(1.0), nnx.mutable_array(jnp.array(2.0))]\n >>> mutable_tree = nnx.mutable(tree)\n >>> assert nnx.is_mutable_array(mutable_tree[0])\n >>> assert nnx.is_mutable_array(mutable_tree[1])\n\n If the tree contains duplicate arrays a ValueError is raised::\n\n >>> shared_array = jnp.array(1.0)\n >>> tree = [shared_array, shared_array]\n >>> try:\n ... nnx.mutable(tree)\n ... except ValueError as e:\n ... print(e)\n Found duplicate Array found at path [1] and [0] ...\n\n ``only`` is a `Filter `__\n that can be used to specify which arrays to convert to mutable arrays.\n\n >>> tree = [jnp.array(1.0), jnp.array(2.0)]\n >>> mutable_tree = nnx.mutable(tree, only=lambda path, x: path[0] == 0)\n ...\n >>> assert isinstance(mutable_tree[0], nnx.MutableArray)\n >>> assert isinstance(mutable_tree[1], jax.Array)\n\n Args:\n tree: A pytree potentially containing arrays.\n only: A Filter to specify which arrays to convert to mutable arrays.\n Returns:\n A pytree with the mutable arrays.\n """"""\n mutable_filter = filterlib.to_predicate(only)\n arrays: dict[int, str] = {}\n\n def check_array(path, x):\n m_array_id = id(x)\n if m_array_id in arrays:\n current_path_str = jax.tree_util.keystr(path)\n previous_path_str = arrays[m_array_id]\n raise ValueError(\n f'Found duplicate Array found at path {current_path_str} '\n f'and {previous_path_str} at object {x}.'\n )\n arrays[m_array_id] = jax.tree_util.keystr(path)\n\n def _mutable_fn(jax_path, x):\n path = tuple(_key_path_to_key(part) for part in jax_path)\n if mutable_filter(path, x):\n if isinstance(x, Variable) and isinstance(x.raw_value, jax.Array):\n check_array(jax_path, x.raw_value)\n mutable_array = variablelib.mutable_array(x.raw_value)\n return x.from_metadata(mutable_array, x.get_metadata().copy())\n elif isinstance(x, jax.Array):\n check_array(jax_path, x)\n return variablelib.mutable_array(x)\n return x\n\n return jax.tree.map_with_path(\n _mutable_fn, tree, is_leaf=lambda x: isinstance(x, Variable)\n )\n\n\ndef pure(tree: A) -> A:\n """"""Returns a new tree with all ``Variable`` and ``VariableState`` objects replaced with inner values.\n\n This can be used to remove Variable metadata when its is not needed for tasks like\n serialization or exporting.\n\n Example::\n\n >>> from flax import nnx\n >>> import jax\n >>> import jax.numpy as jnp\n ...\n >>> model = nnx.Linear(2, 3, rngs=nnx.Rngs(0))\n >>> graphdef, state = nnx.split(model)\n >>> jax.tree.map(jnp.shape, state)\n State({\n 'bias': VariableState(\n type=Param,\n value=(3,)\n ),\n 'kernel': VariableState(\n type=Param,\n value=(2, 3)\n )\n })\n >>> pure_state = nnx.pure(state)\n >>> jax.tree.map(jnp.shape, pure_state)\n State({\n 'bias': (3,),\n 'kernel': (2, 3)\n })\n\n Args:\n tree: A pytree potentially containing ``Variable`` and ``VariableState`` objects.\n Returns:\n A new pytree with all ``Variable`` and ``VariableState`` objects replaced with their\n inner values.\n """"""\n def _pure_fn(x):\n if isinstance(x, Variable | VariableState):\n return x.raw_value\n return x\n\n return jax.tree.map(\n _pure_fn,\n tree,\n is_leaf=lambda x: isinstance(x, Variable | VariableState),\n )\n\n\ndef call(\n graphdef_state: tuple[GraphDef[A], GraphState], /\n) -> ApplyCaller[tuple[GraphDef[A], GraphState]]:\n """"""Calls a method underlying graph node defined by a (GraphDef, State) pair.\n\n ``call`` takes a ``(GraphDef, State)`` pair and creates a proxy object that can be\n used to call methods on the underlying graph node. When a method is called, the\n output is returned along with a new (GraphDef, State) pair that represents the\n updated state of the graph node. ``call`` is equivalent to :func:`merge` > ``method``\n > :func:`split`` but is more convenient to use in pure JAX functions.\n\n Example::\n\n >>> from flax import nnx\n >>> import jax\n >>> import jax.numpy as jnp\n ...\n >>> class StatefulLinear(nnx.Module):\n ... def __init__(self, din, dout, rngs):\n ... self.w = nnx.Param(jax.random.uniform(rngs(), (din, dout)))\n ... self.b = nnx.Param(jnp.zeros((dout,)))\n ... self.count = Variable(jnp.array(0, dtype=jnp.uint32))\n ...\n ... def increment(self):\n ... self.count += 1\n ...\n ... def __call__(self, x):\n ... self.increment()\n ... return x @ self.w + self.b\n ...\n >>> linear = StatefulLinear(3, 2, nnx.Rngs(0))\n >>> linear_state = nnx.split(linear)\n ...\n >>> @jax.jit\n ... def forward(x, linear_state):\n ... y, linear_state = nnx.call(linear_state)(x)\n ... return y, linear_state\n ...\n >>> x = jnp.ones((1, 3))\n >>> y, linear_state = forward(x, linear_state)\n >>> y, linear_state = forward(x, linear_state)\n ...\n >>> linear = nnx.merge(*linear_state)\n >>> linear.count.value\n Array(2, dtype=uint32)\n\n The proxy object returned by ``call`` supports indexing and attribute access\n to access nested methods. In the example below, the ``increment`` method indexing\n is used to call the ``increment`` method of the ``StatefulLinear`` module\n at the ``b`` key of a ``nodes`` dictionary.\n\n >>> class StatefulLinear(nnx.Module):\n ... def __init__(self, din, dout, rngs):\n ... self.w = nnx.Param(jax.random.uniform(rngs(), (din, dout)))\n ... self.b = nnx.Param(jnp.zeros((dout,)))\n ... self.count = nnx.Variable(jnp.array(0, dtype=jnp.uint32))\n ...\n ... def increment(self):\n ... self.count += 1\n ...\n ... def __call__(self, x):\n ... self.increment()\n ... return x @ self.w + self.b\n ...\n >>> rngs = nnx.Rngs(0)\n >>> nodes = dict(\n ... a=StatefulLinear(3, 2, rngs),\n ... b=StatefulLinear(2, 1, rngs),\n ... )\n ...\n >>> node_state = nnx.split(nodes)\n >>> # use attribute access\n >>> _, node_state = nnx.call(node_state)['b'].increment()\n ...\n >>> nodes = nnx.merge(*node_state)\n >>> nodes['a'].count.value\n Array(0, dtype=uint32)\n >>> nodes['b'].count.value\n Array(1, dtype=uint32)\n """"""\n\n def pure_caller(accessor: DelayedAccessor, *args, **kwargs):\n node = merge(*graphdef_state)\n method = accessor(node)\n out = method(*args, **kwargs)\n return out, split(node)\n\n return CallableProxy(pure_caller) # type: ignore\n\n\ndef iter_graph(node: tp.Any, /) -> tp.Iterator[tuple[PathParts, tp.Any]]:\n """"""Iterates over all nested nodes and leaves of the given graph node, including the current node.\n\n ``iter_graph`` creates a generator that yields path and value pairs, where\n the path is a tuple of strings or integers representing the path to the value from the\n root. Repeated nodes are visited only once. Leaves include static values.\n\n Example::\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n ...\n >>> class Linear(nnx.Module):\n ... def __init__(self, din, dout, *, rngs: nnx.Rngs):\n ... self.din, self.dout = din, dout\n ... self.w = nnx.Param(jax.random.uniform(rngs.next(), (din, dout)))\n ... self.b = nnx.Param(jnp.zeros((dout,)))\n ...\n >>> module = Linear(3, 4, rngs=nnx.Rngs(0))\n >>> graph = [module, module]\n ...\n >>> for path, value in nnx.iter_graph(graph):\n ... print(path, type(value).__name__)\n ...\n (0, '_object__state') ObjectState\n (0, 'b') Param\n (0, 'din') int\n (0, 'dout') int\n (0, 'w') Param\n (0,) Linear\n () list\n """"""\n visited: set[int] = set()\n path_parts: PathParts = ()\n yield from _iter_graph(node, visited, path_parts)\n\n\ndef _iter_graph(\n node: tp.Any, visited: set[int], path_parts: PathParts\n) -> tp.Iterator[tuple[PathParts, tp.Any]]:\n if is_node(node):\n if id(node) in visited:\n return\n visited.add(id(node))\n node_impl = get_node_impl(node)\n if node_impl is None and not (\n isinstance(node, Variable) or variablelib.is_mutable_array(node)\n ):\n raise RuntimeError(f'Unsupported type: {type(node)}, this is a bug.')\n assert node_impl is not None\n node_dict = node_impl.node_dict(node)\n for key, value in node_dict.items():\n yield from _iter_graph(value, visited, (*path_parts, key))\n\n yield path_parts, node\n\n\n@jax.tree_util.register_static\n@dataclasses.dataclass(frozen=True, slots=True)\nclass Static(tp.Generic[A]):\n """"""An empty pytree node that treats its inner value as static.\n ``value`` must define ``__eq__`` and ``__hash__``.\n """"""\n\n value: A\n\n\n# ---------------------------------------------------------\n# Pytree\n# ---------------------------------------------------------\nclass GenericPytree: ...\n\n\nfrom jax._src.tree_util import _registry as JAX_PYTREE_REGISTRY\n\n\ndef is_pytree_node(x: tp.Any) -> bool:\n if isinstance(x, Variable):\n return False\n elif type(x) in JAX_PYTREE_REGISTRY:\n return True\n elif isinstance(x, tuple):\n return True\n else:\n return False\n\n\ndef _key_path_to_key(key: tp.Any) -> Key:\n if isinstance(key, jax.tree_util.SequenceKey):\n return key.idx\n elif isinstance(\n key, (jax.tree_util.DictKey, jax.tree_util.FlattenedIndexKey)\n ):\n if not is_key_like(key.key): # type: ignore[not-supported-yet]\n raise ValueError(\n f'Invalid key: {key.key}. May be due to its type not being hashable or comparable.'\n )\n return key.key\n elif isinstance(key, jax.tree_util.GetAttrKey):\n return key.name\n else:\n return str(key)\n\nclass IndexesPytreeDef(tp.NamedTuple):\n key_index: HashableMapping[Key, int]\n treedef: jax.tree_util.PyTreeDef\n\ndef _flatten_pytree(pytree: tp.Any):\n leaves, treedef = jax.tree_util.tree_flatten_with_path(\n pytree, is_leaf=lambda x: x is not pytree\n )\n nodes = [(_key_path_to_key(path[0]), value) for path, value in leaves]\n key_index = HashableMapping(\n {key: i for i, (key, _) in enumerate(nodes)}, copy=False\n )\n nodes.sort() # sort by key\n return nodes, IndexesPytreeDef(key_index, treedef)\n\n\ndef _unflatten_pytree(\n nodes: tuple[tuple[Key, tp.Any], ...], metadata: IndexesPytreeDef\n):\n # sort to original order\n sorted_nodes = sorted(nodes, key=lambda x: metadata.key_index[x[0]])\n pytree = metadata.treedef.unflatten(value for _, value in sorted_nodes)\n return pytree\n\n\nPYTREE_NODE_IMPL = PytreeNodeImpl(\n type=GenericPytree,\n flatten=_flatten_pytree,\n unflatten=_unflatten_pytree, # type: ignore\n)\n\n# common pytrees\n# list\nregister_pytree_node_type(\n list,\n flatten=lambda x: (list(enumerate(x)), None),\n unflatten=lambda nodes, _: [value for _, value in nodes], # type: ignore\n)\n# tuple\nregister_pytree_node_type(\n tuple,\n flatten=lambda x: (list(enumerate(x)), None),\n unflatten=lambda nodes, _: tuple(value for _, value in nodes), # type: ignore\n)\n# dict\nregister_pytree_node_type(\n dict,\n flatten=lambda x: (sorted(x.items()), None),\n unflatten=lambda nodes, _: {key: value for key, value in nodes}, # type: ignore\n)\n# None\nregister_pytree_node_type(\n type(None),\n flatten=lambda x: ([], None),\n unflatten=lambda _, __: None, # type: ignore\n)\n",python,tab +7361,20585111,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py",61873,0,"",python,selection_command +7362,20589574,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport functools\nimport typing as tp\n\nfrom flax.nnx import (\n extract,\n graph,\n)\nfrom flax.typing import MISSING, Missing\n\nA = tp.TypeVar('A')\nF = tp.TypeVar('F', bound=tp.Callable[..., tp.Any])\n\n\n# -------------------------------\n# (split|merge)_inputs\n# -------------------------------\n\n\n@tp.overload\ndef split_inputs(\n *,\n ctxtag: str = 'split_merge_inputs',\n) -> tp.Callable[[F], F]: ...\n@tp.overload\ndef split_inputs(\n f: F,\n *,\n ctxtag: str = 'split_merge_inputs',\n) -> F: ...\ndef split_inputs(\n f: F | Missing = MISSING,\n *,\n ctxtag: str = 'split_merge_inputs',\n) -> F | tp.Callable[[F], F]:\n """"""Takes in a function that contains graph nodes in the inputs and outputs, and\n returns a function that replaces the graph nodes with some jax-compatible data\n structures. Must be used in conjunction with :func:`merge_inputs`.\n\n Args:\n f: The function to be transformed.\n ctxtag: The context tag to be used for the transformation. Defaults to\n 'split_merge_inputs'.\n\n Returns:\n The transformed function.\n\n ``split_inputs`` and ``merge_inputs`` can be used to lift functions that operate\n on jax datastructures (pytrees) to functions that operate on graph nodes. ``split_inputs``\n will take graph nodes in the inputs and outputs and replace them with jax-compatible data\n structures, usually before calling into the transformed function, while ``merge_inputs``\n will convert the jax-compatible data structures back to graph nodes, usually inside the\n transformed function. For common transforms like ``jax.jit`` and ``jax.vmap`` NNX will\n provide a version that works with graph nodes, but for other transforms you can use\n ``split_inputs`` and ``merge_inputs`` to manually lift the function.\n\n The following example demonstrates how to use ``split_inputs`` and ``merge_inputs`` to\n lift ``jax.jit`` to work over a silly function has a stateful operation that zeros out\n the kernel of a linear layer::\n\n >>> from flax import nnx\n >>> import jax.numpy as jnp\n >>> import jax\n ...\n >>> @split_inputs\n ... @jax.jit\n ... @merge_inputs\n ... def forward_and_zero(model: nnx.Linear, x: jax.Array):\n ... y = model(x)\n ... model.kernel *= 0 # zero out the kernel\n ... return y\n ...\n >>> model = nnx.Linear(2, 2, rngs=nnx.Rngs(0))\n >>> y = forward_and_zero(model, jnp.ones((1, 2)))\n >>> y.shape\n (1, 2)\n >>> assert jnp.allclose(model.kernel, 0)\n\n As shown above, not only does the lifted function work with graph nodes, but it also\n propagates the side effects of the original function. **Note**: in practice use ``nnx.jit``\n instead.\n\n Splitting and merging can also be applied to multiple functions in a pipeline. The following\n example show how to lift ``jax.lax.cond`` by using ``split_inputs`` over ``cond`` and\n ``merge_inputs`` over the branches::\n\n >>> model = nnx.Linear(2, 2, rngs=nnx.Rngs(0))\n >>> x = jnp.ones((1, 2))\n ...\n >>> true_fn = lambda m, x: m(x)\n >>> false_fn = lambda m, x: x + 1\n ...\n >>> y = split_inputs(jax.lax.cond)(\n ... False,\n ... merge_inputs(true_fn),\n ... merge_inputs(false_fn), # <== gets called\n ... model,\n ... x,\n ... )\n >>> assert jnp.allclose(y, 2)\n\n **Lifting functions with output semantics**\n\n ``merge_inputs`` internally returns a ``(inputs, output)`` tuple, where ``inputs`` is the\n tuple of the input arguments with non-graph node leaves set to ``None``, and ``output`` is\n the output of the function. This is done to propage all the state changes in the function\n to the graph nodes outside the function. If the transform function has output semantics\n like e.g. ``jax.vmap``'s ``out_axes``, you must account for this in the by configuring\n the arguments accordingly::\n\n >>> from functools import partial\n ...\n >>> model = nnx.Linear(2, 2, rngs=nnx.Rngs(0))\n ...\n >>> in_axes = (None, 0)\n >>> out_axes = (in_axes, 0) # <== internal output arrangement\n ...\n >>> @split_inputs\n ... @partial(jax.vmap, in_axes=in_axes, out_axes=out_axes)\n ... @merge_inputs\n ... def forward(model: nnx.Linear, x: jax.Array):\n ... return model(x)\n ...\n >>> x = jnp.ones((10, 2))\n >>> y = forward(model, x)\n >>> y.shape\n (10, 2)\n\n .. note::\n If the transform has a rigid output structure like ``jax.grad`` or ``jax.lax.scan``\n then ``split_inputs`` and ``merge_inputs`` will not work. In this case, use the\n `Functional API `__.\n """"""\n if isinstance(f, Missing):\n return functools.partial(split_inputs, ctxtag=ctxtag) # type: ignore[return-value]\n\n @graph.update_context(ctxtag)\n @functools.wraps(f)\n def split_inputs_wrapper(*args):\n pure_args = extract.to_tree(args, ctxtag=ctxtag)\n pure_args_out, pure_out = f(*pure_args)\n args_out, out = extract.from_tree(\n (pure_args_out, pure_out), ctxtag=ctxtag, is_inner=False\n )\n return out\n\n return split_inputs_wrapper # type: ignore\n\n@tp.overload\ndef merge_inputs(\n *,\n ctxtag: str = 'split_merge_inputs',\n) -> tp.Callable[[F], F]: ...\n@tp.overload\ndef merge_inputs(\n f: F,\n *,\n ctxtag: str = 'split_merge_inputs',\n) -> F: ...\ndef merge_inputs(\n f: F | Missing = MISSING,\n *,\n ctxtag: str = 'split_merge_inputs',\n) -> F | tp.Callable[[F], F]:\n """"""Takes in a function that contains jax-compatible data structures in the\n inputs and outputs, and returns a function that replaces the jax-compatible\n data structures the corresponding graph nodes. Must be used in conjunction\n with :func:`split_inputs`.\n\n Args:\n f: The function to be transformed.\n ctxtag: The context tag to be used for the transformation. Defaults to\n 'split_merge_inputs'.\n\n Returns:\n The transformed function.\n\n For more information and examples, see :func:`split_inputs`.\n """"""\n if isinstance(f, Missing):\n return functools.partial(merge_inputs, ctxtag=ctxtag) # type: ignore[return-value]\n\n @functools.wraps(f)\n def merge_inputs_wrapper(*pure_args):\n args = extract.from_tree(pure_args, ctxtag=ctxtag, is_inner=True)\n out = f(*args)\n args_out = extract.clear_non_graph_nodes(args)\n pure_args_out, pure_out = extract.to_tree((args_out, out), ctxtag=ctxtag)\n return pure_args_out, pure_out\n\n return merge_inputs_wrapper # type: ignore\n",python,tab +7363,20589574,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py",5397,0,"",python,selection_command +7364,20591432,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py",6703,0,"",python,selection_command +7365,20593183,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +7366,20593183,"/fast/home/franz.srambical/jafar/utils/nn.py",4090,0,"",python,selection_command +7367,20597835,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +7368,20597836,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22684,0,"",python,selection_command +7369,20600178,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +7370,20600179,"/fast/home/franz.srambical/jafar/utils/nn.py",10773,0,"",python,selection_command +7371,20604249,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +7372,20604250,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",37837,0,"",python,selection_command +7373,20607387,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +7374,20607387,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77888,0,"",python,selection_command +7375,20611940,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12107,0,"",python,selection_command +7376,20646586,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12078,0,"",python,selection_command +7377,20646705,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12048,0,"",python,selection_command +7378,20646842,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12025,0,"",python,selection_command +7379,20646980,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12027,0,"",python,selection_command +7380,20647424,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12030,0,"",python,selection_command +7381,20648258,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12020,0,"",python,selection_command +7382,20648608,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10050,0,"",python,selection_command +7383,20649867,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78566,0,"",python,selection_command +7384,20652627,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78268,0,"",python,selection_command +7385,20652864,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78248,0,"",python,selection_command +7386,20653025,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78089,0,"",python,selection_command +7387,20653184,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78015,0,"",python,selection_command +7388,20653383,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77924,0,"",python,selection_command +7389,20653710,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77807,0,"",python,selection_command +7390,20654160,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77778,0,"",python,selection_command +7391,20654731,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77783,0,"",python,selection_command +7392,20654898,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77785,0,"",python,selection_command +7393,20655321,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67527,0,"",python,selection_command +7394,20660334,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67573,0,"",python,selection_command +7395,20660585,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67596,0,"",python,selection_command +7396,20660603,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67632,0,"",python,selection_command +7397,20660632,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67695,0,"",python,selection_command +7398,20660665,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67700,0,"",python,selection_command +7399,20660694,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67723,0,"",python,selection_command +7400,20660730,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67754,0,"",python,selection_command +7401,20660769,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67817,0,"",python,selection_command +7402,20660796,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67892,0,"",python,selection_command +7403,20660826,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67928,0,"",python,selection_command +7404,20660859,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68019,0,"",python,selection_command +7405,20660894,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68024,0,"",python,selection_command +7406,20660925,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68050,0,"",python,selection_command +7407,20661668,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,18," if bias is None:",python,selection_command +7408,20661841,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68046,34," if bias is None:\n bias = mask",python,selection_command +7409,20725947,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68069,0,"",python,selection_command +7410,20729436,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",66801,0,"",python,selection_command +7411,20730381,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",66846,0,"",python,selection_command +7412,20730614,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",66889,0,"",python,selection_command +7413,20730640,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",66934,0,"",python,selection_command +7414,20730668,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",66995,0,"",python,selection_command +7415,20730703,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67040,0,"",python,selection_command +7416,20730739,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67094,0,"",python,selection_command +7417,20730769,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67138,0,"",python,selection_command +7418,20730800,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67190,0,"",python,selection_command +7419,20730834,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67249,0,"",python,selection_command +7420,20730868,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67325,0,"",python,selection_command +7421,20730902,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67377,0,"",python,selection_command +7422,20730934,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67381,0,"",python,selection_command +7423,20730967,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67413,0,"",python,selection_command +7424,20731002,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67414,0,"",python,selection_command +7425,20731035,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67522,0,"",python,selection_command +7426,20731069,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,0,"",python,selection_command +7427,20731242,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67569,0,"",python,selection_command +7428,20731810,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,0,"",python,selection_command +7429,20732310,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67527,0,"",python,selection_command +7430,20735179,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",71311,21,"combine_bias_and_mask",python,selection_command +7431,20735358,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",71331,0,"",python,selection_command +7432,20737178,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77785,0,"",python,selection_command +7433,20737883,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67527,0,"",python,selection_command +7434,20742101,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77785,21,"combine_bias_and_mask",python,selection_command +7435,20742427,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77805,0,"",python,selection_command +7436,20743336,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76386,0,"",python,selection_command +7437,20744043,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",74658,0,"",python,selection_command +7438,20744562,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",73054,0,"",python,selection_command +7439,20745098,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72076,0,"",python,selection_command +7440,20745732,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72090,0,"",python,selection_command +7441,20745870,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72091,0,"",python,selection_command +7442,20746022,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72094,0,"",python,selection_command +7443,20746511,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72092,26,"def dot_product_attention(",python,selection_command +7444,20747542,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72092,668,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n",python,selection_command +7445,20747655,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72092,1047,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n",python,selection_command +7446,20747855,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72092,1222,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n",python,selection_command +7447,20748029,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72092,1403,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n The supported layouts for Q, K, V are either BT(S)NH or BNT(S)H, and they must\n adhere to the same layout. The output layout remains consistent with Q,\n defaulting to BT(S)NH.\n",python,selection_command +7448,20748121,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72092,4333,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n The supported layouts for Q, K, V are either BT(S)NH or BNT(S)H, and they must\n adhere to the same layout. The output layout remains consistent with Q,\n defaulting to BT(S)NH.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of BSNH or BNSH.\n value: Values to be used in attention with a shape of BSNH or BNSH.\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n q_seqlen: Non padded sequence length of query with a shape of B.\n If q_offsets is set, q_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n If kv_offsets is set, kv_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n q_offsets: offset of each segment packed in query with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, q_offsets = [[0,1,2,-1], [0,1,-1,-1]]. q_seqlen should be set\n to indicate the size of each segment.\n kv_offsets: offset of each segment packed in key with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, kv_offsets = [[0,1,2,-1], [0,1,-1,-1]]. kv_seqlen should be set\n to indicate the size of each segment.\n scale: Scale for the query.\n dropout_rate: Dropout rate.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n amax_s: amax of state. (fp8 only)\n amax_o: amax of output. (fp8 only)\n """"""\n # TODO(b/380898464): Check the compute capability, e.g., require GPU device,\n # in the kernel implementation (c++) code.\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n",python,selection_command +7449,20748306,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72092,5681,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n The supported layouts for Q, K, V are either BT(S)NH or BNT(S)H, and they must\n adhere to the same layout. The output layout remains consistent with Q,\n defaulting to BT(S)NH.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of BSNH or BNSH.\n value: Values to be used in attention with a shape of BSNH or BNSH.\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n q_seqlen: Non padded sequence length of query with a shape of B.\n If q_offsets is set, q_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n If kv_offsets is set, kv_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n q_offsets: offset of each segment packed in query with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, q_offsets = [[0,1,2,-1], [0,1,-1,-1]]. q_seqlen should be set\n to indicate the size of each segment.\n kv_offsets: offset of each segment packed in key with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, kv_offsets = [[0,1,2,-1], [0,1,-1,-1]]. kv_seqlen should be set\n to indicate the size of each segment.\n scale: Scale for the query.\n dropout_rate: Dropout rate.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n amax_s: amax of state. (fp8 only)\n amax_o: amax of output. (fp8 only)\n """"""\n # TODO(b/380898464): Check the compute capability, e.g., require GPU device,\n # in the kernel implementation (c++) code.\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n\n if use_fp8:\n if fp8_params is None:\n raise ValueError(""fp8_params should not be None."")\n if mask_type not in (MaskType.NO_MASK, MaskType.CAUSAL):\n raise ValueError(""Only NO_MASK or CAUSAL masks are supported for fp8."")\n if not all(x is None for x in [bias, mask, q_seqlen, kv_seqlen]):\n raise ValueError(\n f""Expected 'None' for bias, mask, q_seqlen, and kv_seqlen, ""\n f""but got: bias={bias}, mask={mask}, q_seqlen={q_seqlen}, kv_seqlen={kv_seqlen}""\n )\n check_fp8_params(fp8_params)\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n output, amax_s, amax_o = _dot_product_attention_fp8(\n query, key, value, fp8_params,\n scale, mask_type == MaskType.CAUSAL, layout.value, cudnn_version\n )\n return output, amax_s, amax_o\n else:\n if has_padding(mask_type) and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to generate padding mask"")\n if sliding_window_length is not None and sliding_window_length <= 0:\n raise ValueError(\n f""Require sliding_window_length > 0, got {sliding_window_length}"")\n if q_offsets is not None and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to use packed layout"")\n",python,selection_command +7450,20748497,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72092,6100,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n The supported layouts for Q, K, V are either BT(S)NH or BNT(S)H, and they must\n adhere to the same layout. The output layout remains consistent with Q,\n defaulting to BT(S)NH.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of BSNH or BNSH.\n value: Values to be used in attention with a shape of BSNH or BNSH.\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n q_seqlen: Non padded sequence length of query with a shape of B.\n If q_offsets is set, q_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n If kv_offsets is set, kv_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n q_offsets: offset of each segment packed in query with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, q_offsets = [[0,1,2,-1], [0,1,-1,-1]]. q_seqlen should be set\n to indicate the size of each segment.\n kv_offsets: offset of each segment packed in key with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, kv_offsets = [[0,1,2,-1], [0,1,-1,-1]]. kv_seqlen should be set\n to indicate the size of each segment.\n scale: Scale for the query.\n dropout_rate: Dropout rate.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n amax_s: amax of state. (fp8 only)\n amax_o: amax of output. (fp8 only)\n """"""\n # TODO(b/380898464): Check the compute capability, e.g., require GPU device,\n # in the kernel implementation (c++) code.\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n\n if use_fp8:\n if fp8_params is None:\n raise ValueError(""fp8_params should not be None."")\n if mask_type not in (MaskType.NO_MASK, MaskType.CAUSAL):\n raise ValueError(""Only NO_MASK or CAUSAL masks are supported for fp8."")\n if not all(x is None for x in [bias, mask, q_seqlen, kv_seqlen]):\n raise ValueError(\n f""Expected 'None' for bias, mask, q_seqlen, and kv_seqlen, ""\n f""but got: bias={bias}, mask={mask}, q_seqlen={q_seqlen}, kv_seqlen={kv_seqlen}""\n )\n check_fp8_params(fp8_params)\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n output, amax_s, amax_o = _dot_product_attention_fp8(\n query, key, value, fp8_params,\n scale, mask_type == MaskType.CAUSAL, layout.value, cudnn_version\n )\n return output, amax_s, amax_o\n else:\n if has_padding(mask_type) and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to generate padding mask"")\n if sliding_window_length is not None and sliding_window_length <= 0:\n raise ValueError(\n f""Require sliding_window_length > 0, got {sliding_window_length}"")\n if q_offsets is not None and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to use packed layout"")\n\n bias = combine_bias_and_mask(bias, mask, query.dtype)\n # check if input shape and data type is compatiable\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n has_bias = bias is not None\n has_dbias = has_bias and \\n should_export_dbias(bias.shape, query.shape, layout) # type: ignore[union-attr]\n variadic_args = (has_bias, has_dbias)\n",python,selection_command +7451,20748660,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72092,6409,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n The supported layouts for Q, K, V are either BT(S)NH or BNT(S)H, and they must\n adhere to the same layout. The output layout remains consistent with Q,\n defaulting to BT(S)NH.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of BSNH or BNSH.\n value: Values to be used in attention with a shape of BSNH or BNSH.\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n q_seqlen: Non padded sequence length of query with a shape of B.\n If q_offsets is set, q_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n If kv_offsets is set, kv_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n q_offsets: offset of each segment packed in query with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, q_offsets = [[0,1,2,-1], [0,1,-1,-1]]. q_seqlen should be set\n to indicate the size of each segment.\n kv_offsets: offset of each segment packed in key with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, kv_offsets = [[0,1,2,-1], [0,1,-1,-1]]. kv_seqlen should be set\n to indicate the size of each segment.\n scale: Scale for the query.\n dropout_rate: Dropout rate.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n amax_s: amax of state. (fp8 only)\n amax_o: amax of output. (fp8 only)\n """"""\n # TODO(b/380898464): Check the compute capability, e.g., require GPU device,\n # in the kernel implementation (c++) code.\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n\n if use_fp8:\n if fp8_params is None:\n raise ValueError(""fp8_params should not be None."")\n if mask_type not in (MaskType.NO_MASK, MaskType.CAUSAL):\n raise ValueError(""Only NO_MASK or CAUSAL masks are supported for fp8."")\n if not all(x is None for x in [bias, mask, q_seqlen, kv_seqlen]):\n raise ValueError(\n f""Expected 'None' for bias, mask, q_seqlen, and kv_seqlen, ""\n f""but got: bias={bias}, mask={mask}, q_seqlen={q_seqlen}, kv_seqlen={kv_seqlen}""\n )\n check_fp8_params(fp8_params)\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n output, amax_s, amax_o = _dot_product_attention_fp8(\n query, key, value, fp8_params,\n scale, mask_type == MaskType.CAUSAL, layout.value, cudnn_version\n )\n return output, amax_s, amax_o\n else:\n if has_padding(mask_type) and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to generate padding mask"")\n if sliding_window_length is not None and sliding_window_length <= 0:\n raise ValueError(\n f""Require sliding_window_length > 0, got {sliding_window_length}"")\n if q_offsets is not None and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to use packed layout"")\n\n bias = combine_bias_and_mask(bias, mask, query.dtype)\n # check if input shape and data type is compatiable\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n has_bias = bias is not None\n has_dbias = has_bias and \\n should_export_dbias(bias.shape, query.shape, layout) # type: ignore[union-attr]\n variadic_args = (has_bias, has_dbias)\n\n _not_used = jnp.zeros(0, dtype=query.dtype)\n if bias is None:\n bias = _not_used\n if q_seqlen is None:\n q_seqlen = _not_used\n if kv_seqlen is None:\n kv_seqlen = _not_used\n if q_offsets is None:\n q_offsets = _not_used\n if kv_offsets is None:\n kv_offsets = _not_used\n",python,selection_command +7452,20748778,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",72092,6710,"def dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n The supported layouts for Q, K, V are either BT(S)NH or BNT(S)H, and they must\n adhere to the same layout. The output layout remains consistent with Q,\n defaulting to BT(S)NH.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of BSNH or BNSH.\n value: Values to be used in attention with a shape of BSNH or BNSH.\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n q_seqlen: Non padded sequence length of query with a shape of B.\n If q_offsets is set, q_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n If kv_offsets is set, kv_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n q_offsets: offset of each segment packed in query with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, q_offsets = [[0,1,2,-1], [0,1,-1,-1]]. q_seqlen should be set\n to indicate the size of each segment.\n kv_offsets: offset of each segment packed in key with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, kv_offsets = [[0,1,2,-1], [0,1,-1,-1]]. kv_seqlen should be set\n to indicate the size of each segment.\n scale: Scale for the query.\n dropout_rate: Dropout rate.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n amax_s: amax of state. (fp8 only)\n amax_o: amax of output. (fp8 only)\n """"""\n # TODO(b/380898464): Check the compute capability, e.g., require GPU device,\n # in the kernel implementation (c++) code.\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n\n if use_fp8:\n if fp8_params is None:\n raise ValueError(""fp8_params should not be None."")\n if mask_type not in (MaskType.NO_MASK, MaskType.CAUSAL):\n raise ValueError(""Only NO_MASK or CAUSAL masks are supported for fp8."")\n if not all(x is None for x in [bias, mask, q_seqlen, kv_seqlen]):\n raise ValueError(\n f""Expected 'None' for bias, mask, q_seqlen, and kv_seqlen, ""\n f""but got: bias={bias}, mask={mask}, q_seqlen={q_seqlen}, kv_seqlen={kv_seqlen}""\n )\n check_fp8_params(fp8_params)\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n output, amax_s, amax_o = _dot_product_attention_fp8(\n query, key, value, fp8_params,\n scale, mask_type == MaskType.CAUSAL, layout.value, cudnn_version\n )\n return output, amax_s, amax_o\n else:\n if has_padding(mask_type) and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to generate padding mask"")\n if sliding_window_length is not None and sliding_window_length <= 0:\n raise ValueError(\n f""Require sliding_window_length > 0, got {sliding_window_length}"")\n if q_offsets is not None and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to use packed layout"")\n\n bias = combine_bias_and_mask(bias, mask, query.dtype)\n # check if input shape and data type is compatiable\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n has_bias = bias is not None\n has_dbias = has_bias and \\n should_export_dbias(bias.shape, query.shape, layout) # type: ignore[union-attr]\n variadic_args = (has_bias, has_dbias)\n\n _not_used = jnp.zeros(0, dtype=query.dtype)\n if bias is None:\n bias = _not_used\n if q_seqlen is None:\n q_seqlen = _not_used\n if kv_seqlen is None:\n kv_seqlen = _not_used\n if q_offsets is None:\n q_offsets = _not_used\n if kv_offsets is None:\n kv_offsets = _not_used\n\n output = _dot_product_attention(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n _not_used, _not_used, scale, seed, dropout_rate, variadic_args,\n mask_type, layout.value, sliding_window_length, cudnn_version,\n return_residual)\n return output\n",python,selection_command +7453,20752541,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78802,0,"",python,selection_command +7454,20753118,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78784,0,"",python,selection_command +7455,20753330,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78759,0,"",python,selection_command +7456,20753359,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78688,0,"",python,selection_command +7457,20753389,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78616,0,"",python,selection_command +7458,20753414,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78539,0,"",python,selection_command +7459,20753449,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78502,0,"",python,selection_command +7460,20753490,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78501,0,"",python,selection_command +7461,20753515,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78472,0,"",python,selection_command +7462,20753549,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78445,0,"",python,selection_command +7463,20753581,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78417,0,"",python,selection_command +7464,20753615,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78391,0,"",python,selection_command +7465,20753649,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78363,0,"",python,selection_command +7466,20753682,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78337,0,"",python,selection_command +7467,20753722,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78310,0,"",python,selection_command +7468,20753757,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78285,0,"",python,selection_command +7469,20753790,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78262,0,"",python,selection_command +7470,20753822,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78241,0,"",python,selection_command +7471,20753855,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78193,0,"",python,selection_command +7472,20753888,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78192,0,"",python,selection_command +7473,20753920,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78150,0,"",python,selection_command +7474,20753953,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78063,0,"",python,selection_command +7475,20753986,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78032,0,"",python,selection_command +7476,20754021,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",78000,0,"",python,selection_command +7477,20754053,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77974,0,"",python,selection_command +7478,20754086,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77888,0,"",python,selection_command +7479,20754119,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77832,0,"",python,selection_command +7480,20754153,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77774,0,"",python,selection_command +7481,20774179,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77778,0,"",python,selection_command +7482,20774475,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77783,0,"",python,selection_command +7483,20774796,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",77785,0,"",python,selection_command +7484,20775184,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67527,0,"",python,selection_command +7485,20775880,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,45,"def combine_bias_and_mask(bias, mask, dtype):",python,selection_command +7486,20776033,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,68,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:",python,selection_command +7487,20776297,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,104,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape",python,selection_command +7488,20776310,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,171,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)",python,selection_command +7489,20776345,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,172,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n",python,selection_command +7490,20776377,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,195,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:",python,selection_command +7491,20776412,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,226,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:",python,selection_command +7492,20776446,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,289,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)",python,selection_command +7493,20776480,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,364,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)",python,selection_command +7494,20776511,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,400,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape",python,selection_command +7495,20776546,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,495,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]",python,selection_command +7496,20776581,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,496,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n",python,selection_command +7497,20776614,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,522,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask",python,selection_command +7498,20776645,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,541,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask\n if bias is None:",python,selection_command +7499,20776679,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,557,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask\n if bias is None:\n bias = mask",python,selection_command +7500,20776713,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,565,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask\n if bias is None:\n bias = mask\n else:",python,selection_command +7501,20776878,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,590,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask\n if bias is None:\n bias = mask\n else:\n if mask is not None:",python,selection_command +7502,20777031,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,632,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask\n if bias is None:\n bias = mask\n else:\n if mask is not None:\n # should be broadcast to same shape",python,selection_command +7503,20777165,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,657,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask\n if bias is None:\n bias = mask\n else:\n if mask is not None:\n # should be broadcast to same shape\n bias = bias + mask",python,selection_command +7504,20777316,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",67523,671,"def combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == jnp.bool:\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask\n if bias is None:\n bias = mask\n else:\n if mask is not None:\n # should be broadcast to same shape\n bias = bias + mask\n return bias",python,selection_command +7505,20784821,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",68185,0,"",python,selection_command +7506,21068653,"utils/nn.py",0,0,"",python,tab +7507,21069841,"utils/nn.py",11145,0,"",python,selection_command +7508,21070257,"utils/nn.py",11121,0,"",python,selection_command +7509,21070483,"utils/nn.py",11120,0,"",python,selection_command +7510,21070515,"utils/nn.py",11041,0,"",python,selection_command +7511,21070548,"utils/nn.py",11031,0,"",python,selection_command +7512,21070580,"utils/nn.py",10998,0,"",python,selection_command +7513,21070614,"utils/nn.py",10955,0,"",python,selection_command +7514,21070647,"utils/nn.py",10929,0,"",python,selection_command +7515,21070681,"utils/nn.py",10903,0,"",python,selection_command +7516,21070714,"utils/nn.py",10875,0,"",python,selection_command +7517,21070748,"utils/nn.py",10851,0,"",python,selection_command +7518,21070781,"utils/nn.py",10823,0,"",python,selection_command +7519,21070814,"utils/nn.py",10773,0,"",python,selection_command +7520,21070846,"utils/nn.py",10703,0,"",python,selection_command +7521,21070881,"utils/nn.py",10702,0,"",python,selection_command +7522,21071052,"utils/nn.py",10631,0,"",python,selection_command +7523,21071190,"utils/nn.py",10630,0,"",python,selection_command +7524,21071343,"utils/nn.py",10605,0,"",python,selection_command +7525,21071460,"utils/nn.py",10559,0,"",python,selection_command +7526,21071680,"utils/nn.py",10462,0,"",python,selection_command +7527,21072305,"utils/nn.py",10388,0,"",python,selection_command +7528,21072445,"utils/nn.py",10374,0,"",python,selection_command +7529,21072638,"utils/nn.py",10288,0,"",python,selection_command +7530,21101250,"utils/nn.py",10241,0,"",python,selection_command +7531,21101594,"utils/nn.py",10202,0,"",python,selection_command +7532,21103647,"utils/nn.py",10240,0,"\n ",python,content +7533,21104619,"utils/nn.py",10253,0,"m",python,content +7534,21104620,"utils/nn.py",10254,0,"",python,selection_keyboard +7535,21104697,"utils/nn.py",10254,0,"a",python,content +7536,21104697,"utils/nn.py",10255,0,"",python,selection_keyboard +7537,21104760,"utils/nn.py",10255,0,"s",python,content +7538,21104760,"utils/nn.py",10256,0,"",python,selection_keyboard +7539,21104796,"utils/nn.py",10256,0,"k",python,content +7540,21104796,"utils/nn.py",10257,0,"",python,selection_keyboard +7541,21105042,"utils/nn.py",10257,0,"_",python,content +7542,21105042,"utils/nn.py",10258,0,"",python,selection_keyboard +7543,21105314,"utils/nn.py",10258,0,"4",python,content +7544,21105315,"utils/nn.py",10259,0,"",python,selection_keyboard +7545,21105534,"utils/nn.py",10259,0,"d",python,content +7546,21105534,"utils/nn.py",10260,0,"",python,selection_keyboard +7547,21106807,"utils/nn.py",10260,0," ",python,content +7548,21106808,"utils/nn.py",10261,0,"",python,selection_keyboard +7549,21106925,"utils/nn.py",10261,0,"=",python,content +7550,21106926,"utils/nn.py",10262,0,"",python,selection_keyboard +7551,21107742,"utils/nn.py",10262,0," ",python,content +7552,21107742,"utils/nn.py",10263,0,"",python,selection_keyboard +7553,21134417,"utils/nn.py",10263,0,"m",python,content +7554,21134417,"utils/nn.py",10264,0,"",python,selection_keyboard +7555,21134507,"utils/nn.py",10264,0,"a",python,content +7556,21134507,"utils/nn.py",10265,0,"",python,selection_keyboard +7557,21134560,"utils/nn.py",10265,0,"s",python,content +7558,21134560,"utils/nn.py",10266,0,"",python,selection_keyboard +7559,21134641,"utils/nn.py",10266,0,"k",python,content +7560,21134641,"utils/nn.py",10267,0,"",python,selection_keyboard +7561,21134843,"utils/nn.py",10267,0,"_",python,content +7562,21134844,"utils/nn.py",10268,0,"",python,selection_keyboard +7563,21135049,"utils/nn.py",10268,0,"4",python,content +7564,21135049,"utils/nn.py",10269,0,"",python,selection_keyboard +7565,21135270,"utils/nn.py",10269,0,"d",python,content +7566,21135270,"utils/nn.py",10270,0,"",python,selection_keyboard +7567,21136546,"utils/nn.py",10263,7,"",python,content +7568,21142664,"utils/nn.py",10263,0,"m",python,content +7569,21142664,"utils/nn.py",10264,0,"",python,selection_keyboard +7570,21142696,"utils/nn.py",10264,0,"a",python,content +7571,21142696,"utils/nn.py",10265,0,"",python,selection_keyboard +7572,21142752,"utils/nn.py",10265,0,"s",python,content +7573,21142752,"utils/nn.py",10266,0,"",python,selection_keyboard +7574,21142827,"utils/nn.py",10266,0,"k",python,content +7575,21142827,"utils/nn.py",10267,0,"",python,selection_keyboard +7576,21143082,"utils/nn.py",10267,0,"_",python,content +7577,21143082,"utils/nn.py",10268,0,"",python,selection_keyboard +7578,21144555,"utils/nn.py",10267,1,"",python,content +7579,21144696,"utils/nn.py",10266,1,"",python,content +7580,21144851,"utils/nn.py",10265,1,"",python,content +7581,21144985,"utils/nn.py",10264,1,"",python,content +7582,21145116,"utils/nn.py",10263,1,"",python,content +7583,21145234,"utils/nn.py",10263,0,"e",python,content +7584,21145235,"utils/nn.py",10264,0,"",python,selection_keyboard +7585,21145314,"utils/nn.py",10264,0,"i",python,content +7586,21145314,"utils/nn.py",10265,0,"",python,selection_keyboard +7587,21145380,"utils/nn.py",10265,0,"n",python,content +7588,21145380,"utils/nn.py",10266,0,"",python,selection_keyboard +7589,21145647,"utils/nn.py",10266,0,"o",python,content +7590,21145647,"utils/nn.py",10267,0,"",python,selection_keyboard +7591,21145731,"utils/nn.py",10267,0,"p",python,content +7592,21145732,"utils/nn.py",10268,0,"",python,selection_keyboard +7593,21145851,"utils/nn.py",10268,0,"s",python,content +7594,21145851,"utils/nn.py",10269,0,"",python,selection_keyboard +7595,21146331,"utils/nn.py",10269,0,".rearrange(mask_4d, ""b h q k -> b q k h"")",python,content +7596,21146629,"utils/nn.py",10309,0,"",python,selection_command +7597,21146802,"utils/nn.py",10308,0,"",python,selection_command +7598,21147051,"utils/nn.py",10307,0,"",python,selection_command +7599,21147082,"utils/nn.py",10305,0,"",python,selection_command +7600,21147115,"utils/nn.py",10303,0,"",python,selection_command +7601,21147147,"utils/nn.py",10301,0,"",python,selection_command +7602,21147182,"utils/nn.py",10298,0,"",python,selection_command +7603,21147348,"utils/nn.py",10296,0,"",python,selection_command +7604,21147501,"utils/nn.py",10294,0,"",python,selection_command +7605,21147618,"utils/nn.py",10292,0,"",python,selection_command +7606,21147782,"utils/nn.py",10290,0,"",python,selection_command +7607,21147954,"utils/nn.py",10289,0,"",python,selection_command +7608,21148168,"utils/nn.py",10290,0,"",python,selection_command +7609,21151122,"utils/nn.py",10290,1,"b",python,selection_command +7610,21151186,"utils/nn.py",10290,3,"b h",python,selection_command +7611,21151341,"utils/nn.py",10290,5,"b h q",python,selection_command +7612,21151498,"utils/nn.py",10290,7,"b h q k",python,selection_command +7613,21163100,"utils/nn.py",10290,7,"",python,content +7614,21163757,"utils/nn.py",10290,0,".",python,content +7615,21163757,"utils/nn.py",10291,0,"",python,selection_keyboard +7616,21163905,"utils/nn.py",10291,0,".",python,content +7617,21163905,"utils/nn.py",10292,0,"",python,selection_keyboard +7618,21164033,"utils/nn.py",10292,0,".",python,content +7619,21164033,"utils/nn.py",10293,0,"",python,selection_keyboard +7620,21164164,"utils/nn.py",10293,0," ",python,content +7621,21164164,"utils/nn.py",10294,0,"",python,selection_keyboard +7622,21177058,"utils/nn.py",10294,0,"t",python,content +7623,21177058,"utils/nn.py",10295,0,"",python,selection_keyboard +7624,21177431,"utils/nn.py",10295,0," ",python,content +7625,21177431,"utils/nn.py",10296,0,"",python,selection_keyboard +7626,21177564,"utils/nn.py",10296,0,"s",python,content +7627,21177565,"utils/nn.py",10297,0,"",python,selection_keyboard +7628,21177803,"utils/nn.py",10296,0,"",python,selection_command +7629,21178039,"utils/nn.py",10298,0,"",python,selection_command +7630,21178194,"utils/nn.py",10301,0,"",python,selection_command +7631,21178326,"utils/nn.py",10303,0,"",python,selection_command +7632,21178994,"utils/nn.py",10301,0,"",python,selection_command +7633,21179185,"utils/nn.py",10301,1,"b",python,selection_command +7634,21179243,"utils/nn.py",10301,3,"b q",python,selection_command +7635,21179379,"utils/nn.py",10301,5,"b q k",python,selection_command +7636,21179624,"utils/nn.py",10301,7,"b q k h",python,selection_command +7637,21179966,"utils/nn.py",10301,7,"",python,content +7638,21180229,"utils/nn.py",10301,0,".",python,content +7639,21180229,"utils/nn.py",10302,0,"",python,selection_keyboard +7640,21180346,"utils/nn.py",10302,0,".",python,content +7641,21180346,"utils/nn.py",10303,0,"",python,selection_keyboard +7642,21180511,"utils/nn.py",10303,0," ",python,content +7643,21180511,"utils/nn.py",10304,0,"",python,selection_keyboard +7644,21180985,"utils/nn.py",10303,1,"",python,content +7645,21181110,"utils/nn.py",10302,1,"",python,content +7646,21181260,"utils/nn.py",10301,1,"",python,content +7647,21182175,"utils/nn.py",10300,1,"",python,content +7648,21182334,"utils/nn.py",10300,0," ",python,content +7649,21182335,"utils/nn.py",10301,0,"",python,selection_keyboard +7650,21182718,"utils/nn.py",10301,0,".",python,content +7651,21182719,"utils/nn.py",10302,0,"",python,selection_keyboard +7652,21183094,"utils/nn.py",10302,0,".",python,content +7653,21183095,"utils/nn.py",10303,0,"",python,selection_keyboard +7654,21183285,"utils/nn.py",10303,0,".",python,content +7655,21183286,"utils/nn.py",10304,0,"",python,selection_keyboard +7656,21183486,"utils/nn.py",10304,0," ",python,content +7657,21183486,"utils/nn.py",10305,0,"",python,selection_keyboard +7658,21207248,"utils/nn.py",10304,0,"",python,selection_command +7659,21207352,"utils/nn.py",10301,0,"",python,selection_command +7660,21207495,"utils/nn.py",10298,0,"",python,selection_command +7661,21207614,"utils/nn.py",10296,0,"",python,selection_command +7662,21207781,"utils/nn.py",10294,0,"",python,selection_command +7663,21208594,"utils/nn.py",10295,0,"",python,selection_command +7664,21208705,"utils/nn.py",10295,0,"i",python,content +7665,21208705,"utils/nn.py",10296,0,"",python,selection_keyboard +7666,21208815,"utils/nn.py",10295,0,"",python,selection_command +7667,21208985,"utils/nn.py",10296,0,"",python,selection_command +7668,21209125,"utils/nn.py",10297,0,"",python,selection_command +7669,21209258,"utils/nn.py",10298,0,"",python,selection_command +7670,21209377,"utils/nn.py",10298,0,"i",python,content +7671,21209377,"utils/nn.py",10299,0,"",python,selection_keyboard +7672,21209503,"utils/nn.py",10298,0,"",python,selection_command +7673,21209752,"utils/nn.py",10301,0,"",python,selection_command +7674,21209908,"utils/nn.py",10305,0,"",python,selection_command +7675,21210074,"utils/nn.py",10308,0,"",python,selection_command +7676,21210692,"utils/nn.py",10307,0,"",python,selection_command +7677,21211173,"utils/nn.py",10307,0,"t",python,content +7678,21211173,"utils/nn.py",10308,0,"",python,selection_keyboard +7679,21211275,"utils/nn.py",10308,0," ",python,content +7680,21211276,"utils/nn.py",10309,0,"",python,selection_keyboard +7681,21211437,"utils/nn.py",10309,0,"s",python,content +7682,21211437,"utils/nn.py",10310,0,"",python,selection_keyboard +7683,21211673,"utils/nn.py",10309,0,"",python,selection_command +7684,21212103,"utils/nn.py",10310,0,"",python,selection_command +7685,21212170,"utils/nn.py",10311,0,"",python,selection_command +7686,21212595,"utils/nn.py",10311,0,",",python,content +7687,21212596,"utils/nn.py",10312,0,"",python,selection_keyboard +7688,21212721,"utils/nn.py",10312,0," ",python,content +7689,21212721,"utils/nn.py",10313,0,"",python,selection_keyboard +7690,21214552,"utils/nn.py",10313,0,"t",python,content +7691,21214552,"utils/nn.py",10314,0,"",python,selection_keyboard +7692,21214903,"utils/nn.py",10314,0,"=",python,content +7693,21214904,"utils/nn.py",10315,0,"",python,selection_keyboard +7694,21216513,"utils/nn.py",10315,0,"target_seq_len, s=target_seq_len",python,content +7695,21216750,"utils/nn.py",10346,0,"",python,selection_command +7696,21218119,"utils/nn.py",10241,0,"",python,selection_command +7697,21220025,"utils/nn.py",10347,0,"",python,selection_command +7698,21248174,"utils/nn.py",10241,0,"",python,selection_command +7699,21249662,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +7700,21249724,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +7701,21250007,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +7702,21250702,"utils/nn.py",10347,0,"",python,selection_command +7703,21261421,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +7704,21269409,"TERMINAL",0,0,"2025-07-27 15:13:09.210674: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7705,21270881,"TERMINAL",0,0,"2025-07-27 15:13:10.681764: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7706,21273485,"utils/nn.py",10394,0,"",python,selection_command +7707,21273744,"utils/nn.py",10480,0,"",python,selection_command +7708,21273909,"utils/nn.py",10494,0,"",python,selection_command +7709,21274347,"TERMINAL",0,0,"2025-07-27 15:13:14.150042: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7710,21275121,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/einops/einops.py"", line 531, in reduce\r\n recipe = _prepare_transformation_recipe(pattern, reduction, axes_names=tuple(axes_lengths), ndim=len(shape))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/einops/einops.py"", line 313, in _prepare_transformation_recipe\r\n raise EinopsError(""Identifiers only on one side of expression (should be on both): {}"".format(difference))\r\neinops.EinopsError: Identifiers only on one side of expression (should be on both): {'ti', 's', 't', 'si'}\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 324, in attention_fn\r\n mask_4d = einops.rearrange(mask_4d, ""... ti si -> ... t s"", t=target_seq_len, s=target_seq_len)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/einops/einops.py"", line 600, in rearrange\r\n return reduce(tensor, pattern, reduction=""rearrange"", **axes_lengths)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/einops/einops.py"", line 542, in reduce\r\n raise EinopsError(message + ""\n {}"".format(e))\r\neinops.EinopsError: Error while processing rearrange-reduction pattern ""... ti si -> ... t s"".\r\n Input tensor shape: (921, 1, 1, 1). Additional info: {'t': 4, 's': 4}.\r\n Identifiers only on one side of expression (should be on both): {'ti', 's', 't', 'si'}\r\n",,terminal_output +7711,21276132,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +7712,21279995,"utils/nn.py",10480,0,"",python,selection_command +7713,21280697,"utils/nn.py",10396,86,"",python,content +7714,21280741,"utils/nn.py",10404,0,"",python,selection_command +7715,21280826,"utils/nn.py",10357,0,"",python,selection_command +7716,21280980,"utils/nn.py",10249,0,"",python,selection_command +7717,21281123,"utils/nn.py",10210,0,"",python,selection_command +7718,21282031,"utils/nn.py",10240,0,"\n # mask_4d.shape (921, 1, 1, 1) (B, H-broadcast, Q-broadcast, K-broadcast)",python,content +7719,21282032,"utils/nn.py",10253,0,"",python,selection_command +7720,21302202,"utils/nn.py",10339,0,"",python,selection_command +7721,21302450,"utils/nn.py",10327,107," mask_4d = einops.rearrange(mask_4d, ""... ti si -> ... t s"", t=target_seq_len, s=target_seq_len)",python,selection_command +7722,21305354,"utils/nn.py",10339,0,"",python,selection_command +7723,21305739,"utils/nn.py",10327,107," mask_4d = einops.rearrange(mask_4d, ""... ti si -> ... t s"", t=target_seq_len, s=target_seq_len)",python,selection_command +7724,21306082,"utils/nn.py",10339,0,"",python,selection_command +7725,21318001,"utils/nn.py",10327,0,"",python,selection_command +7726,21319981,"utils/nn.py",10327,0," mask_4d = jnp.broadcast_to(mask_4d, mask_4d.shape[:-2] + (target_seq_len, target_seq_len))\n",python,content +7727,21319983,"utils/nn.py",10430,108,"",python,content +7728,21324165,"utils/nn.py",10327,0,"",python,selection_command +7729,21324811,"utils/nn.py",10327,102," mask_4d = einops.rearrange(mask_4d, ""... ti si -> ... t s"", t=target_seq_len, s=target_seq_len)",python,content +7730,21326081,"utils/nn.py",10327,0," mask_4d = einops.rearrange(mask_4d, ""... -> ... t s"", t=target_seq_len, s=target_seq_len)\n",python,content +7731,21326083,"utils/nn.py",10429,108,"",python,content +7732,21336570,"utils/nn.py",10339,0,"",python,selection_command +7733,21339051,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +7734,21339668,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +7735,21350892,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +7736,21369000,"TERMINAL",0,0,"2025-07-27 15:14:48.765307: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7737,21370436,"TERMINAL",0,0,"2025-07-27 15:14:50.239375: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7738,21373907,"TERMINAL",0,0,"2025-07-27 15:14:53.710363: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7739,21374680,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/einops/einops.py"", line 531, in reduce\r\n recipe = _prepare_transformation_recipe(pattern, reduction, axes_names=tuple(axes_lengths), ndim=len(shape))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/einops/einops.py"", line 313, in _prepare_transformation_recipe\r\n raise EinopsError(""Identifiers only on one side of expression (should be on both): {}"".format(difference))\r\neinops.EinopsError: Identifiers only on one side of expression (should be on both): {'s', 't'}\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 231, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 134, in __call__\r\n z = self.temporal_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 325, in attention_fn\r\n mask_4d = einops.rearrange(mask_4d, ""... -> ... t s"", t=target_seq_len, s=target_seq_len)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/einops/einops.py"", line 600, in rearrange\r\n return reduce(tensor, pattern, reduction=""rearrange"", **axes_lengths)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/einops/einops.py"", line 542, in reduce\r\n raise EinopsError(message + ""\n {}"".format(e))\r\neinops.EinopsError: Error while processing rearrange-reduction pattern ""... -> ... t s"".\r\n Input tensor shape: (921, 1, 1, 1). Additional info: {'t': 4, 's': 4}.\r\n Identifiers only on one side of expression (should be on both): {'s', 't'}\r\n",,terminal_output +7740,21375716,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +7741,21469408,"utils/nn.py",10380,0,"ti si ",python,content +7742,21470209,"utils/nn.py",10253,86,"",python,content +7743,21470222,"utils/nn.py",10210,0,"",python,selection_command +7744,21470773,"utils/nn.py",10253,0,"# mask_4d.shape (921, 1, 1, 1) (B, H-broadcast, Q-broadcast, K-broadcast)\n ",python,content +7745,21471304,"utils/nn.py",10249,0,"",python,selection_command +7746,21471766,"utils/nn.py",10335,0,"",python,selection_command +7747,21472013,"utils/nn.py",10327,107," mask_4d = einops.rearrange(mask_4d, ""... ti si -> ... t s"", t=target_seq_len, s=target_seq_len)",python,selection_command +7748,21472488,"utils/nn.py",10335,0,"",python,selection_command +7749,21557939,"utils/nn.py",10339,0,"",python,selection_command +7750,21558098,"utils/nn.py",10347,0,"",python,selection_command +7751,21558274,"utils/nn.py",10349,0,"",python,selection_command +7752,21558401,"utils/nn.py",10355,0,"",python,selection_command +7753,21558644,"utils/nn.py",10356,0,"",python,selection_command +7754,21558970,"utils/nn.py",10356,9,"",python,content +7755,21559170,"utils/nn.py",10356,0,"r",python,content +7756,21559171,"utils/nn.py",10357,0,"",python,selection_keyboard +7757,21559223,"utils/nn.py",10357,0,"e",python,content +7758,21559224,"utils/nn.py",10358,0,"",python,selection_keyboard +7759,21559356,"utils/nn.py",10358,0,"p",python,content +7760,21559356,"utils/nn.py",10359,0,"",python,selection_keyboard +7761,21559423,"utils/nn.py",10359,0,"e",python,content +7762,21559424,"utils/nn.py",10360,0,"",python,selection_keyboard +7763,21559509,"utils/nn.py",10360,0,"a",python,content +7764,21559510,"utils/nn.py",10361,0,"",python,selection_keyboard +7765,21559643,"utils/nn.py",10361,0,"t",python,content +7766,21559643,"utils/nn.py",10362,0,"",python,selection_keyboard +7767,21559897,"utils/nn.py",10361,0,"",python,selection_command +7768,21564112,"utils/nn.py",10362,0,"",python,selection_command +7769,21564279,"utils/nn.py",10363,0,"",python,selection_command +7770,21564432,"utils/nn.py",10370,0,"",python,selection_command +7771,21564598,"utils/nn.py",10372,0,"",python,selection_command +7772,21564742,"utils/nn.py",10377,0,"",python,selection_command +7773,21564918,"utils/nn.py",10380,0,"",python,selection_command +7774,21565347,"utils/nn.py",10377,0,"",python,selection_command +7775,21565547,"utils/nn.py",10372,0,"",python,selection_command +7776,21565817,"utils/nn.py",10377,0,"",python,selection_command +7777,21566120,"utils/nn.py",10377,1,"t",python,selection_command +7778,21566213,"utils/nn.py",10377,2,"ti",python,selection_command +7779,21566617,"utils/nn.py",10377,5,"ti si",python,selection_command +7780,21566913,"utils/nn.py",10377,5,"",python,content +7781,21567188,"utils/nn.py",10377,0,"1",python,content +7782,21567188,"utils/nn.py",10378,0,"",python,selection_keyboard +7783,21567281,"utils/nn.py",10378,0," ",python,content +7784,21567281,"utils/nn.py",10379,0,"",python,selection_keyboard +7785,21567454,"utils/nn.py",10379,0,"1",python,content +7786,21567455,"utils/nn.py",10380,0,"",python,selection_keyboard +7787,21567682,"utils/nn.py",10379,0,"",python,selection_command +7788,21568297,"utils/nn.py",10381,0,"",python,selection_command +7789,21568503,"utils/nn.py",10384,0,"",python,selection_command +7790,21569019,"utils/nn.py",10428,0,"",python,selection_command +7791,21569387,"utils/nn.py",10327,0,"",python,selection_command +7792,21570751,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +7793,21570815,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +7794,21571115,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +7795,21582576,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +7796,21590700,"TERMINAL",0,0,"2025-07-27 15:18:30.450486: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7797,21592120,"TERMINAL",0,0,"2025-07-27 15:18:31.923398: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7798,21595596,"TERMINAL",0,0,"2025-07-27 15:18:35.397376: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7799,21596347,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7800,21617789,"TERMINAL",0,0,"c",,terminal_output +7801,21618001,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7802,21618573,"TERMINAL",0,0,"c",,terminal_output +7803,21618717,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7804,21619388,"TERMINAL",0,0,"c",,terminal_output +7805,21619496,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7806,21619888,"TERMINAL",0,0,"c",,terminal_output +7807,21620017,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7808,21620353,"TERMINAL",0,0,"c",,terminal_output +7809,21620479,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(333)attention_fn()\r\n-> bias_4d = _pad(_rearrange(bias)) if bias is not None else None\r\n",,terminal_output +7810,21620855,"TERMINAL",0,0,"c",,terminal_output +7811,21620920,"TERMINAL",0,0,"\r\n",,terminal_output +7812,21621412,"TERMINAL",0,0,"2025-07-27 15:19:01.215093: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +7813,21623878,"TERMINAL",0,0,"(Pdb) SSIM: 0.010262742638587952\r\n",,terminal_output +7814,21624944,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +7815,21634671,"utils/nn.py",10326,0,"\n ",python,content +7816,21634766,"utils/nn.py",10339,0,"#",python,content +7817,21634766,"utils/nn.py",10340,0,"",python,selection_keyboard +7818,21634806,"utils/nn.py",10340,0," ",python,content +7819,21634806,"utils/nn.py",10341,0,"",python,selection_keyboard +7820,21635451,"utils/nn.py",10341,0,"W",python,content +7821,21635451,"utils/nn.py",10342,0,"",python,selection_keyboard +7822,21635635,"utils/nn.py",10342,0,"e",python,content +7823,21635635,"utils/nn.py",10343,0,"",python,selection_keyboard +7824,21635701,"utils/nn.py",10343,0," ",python,content +7825,21635701,"utils/nn.py",10344,0,"",python,selection_keyboard +7826,21635821,"utils/nn.py",10344,0,"n",python,content +7827,21635821,"utils/nn.py",10345,0,"",python,selection_keyboard +7828,21635885,"utils/nn.py",10345,0,"e",python,content +7829,21635885,"utils/nn.py",10346,0,"",python,selection_keyboard +7830,21636034,"utils/nn.py",10346,0,"e",python,content +7831,21636035,"utils/nn.py",10347,0,"",python,selection_keyboard +7832,21636085,"utils/nn.py",10347,0,"d",python,content +7833,21636086,"utils/nn.py",10348,0,"",python,selection_keyboard +7834,21636172,"utils/nn.py",10348,0," ",python,content +7835,21636172,"utils/nn.py",10349,0,"",python,selection_keyboard +7836,21636306,"utils/nn.py",10349,0,"t",python,content +7837,21636307,"utils/nn.py",10350,0,"",python,selection_keyboard +7838,21636369,"utils/nn.py",10350,0,"o",python,content +7839,21636370,"utils/nn.py",10351,0,"",python,selection_keyboard +7840,21636437,"utils/nn.py",10351,0," ",python,content +7841,21636437,"utils/nn.py",10352,0,"",python,selection_keyboard +7842,21636878,"utils/nn.py",10352,0,"b",python,content +7843,21636878,"utils/nn.py",10353,0,"",python,selection_keyboard +7844,21636919,"utils/nn.py",10353,0,"r",python,content +7845,21636920,"utils/nn.py",10354,0,"",python,selection_keyboard +7846,21637036,"utils/nn.py",10354,0,"o",python,content +7847,21637036,"utils/nn.py",10355,0,"",python,selection_keyboard +7848,21637087,"utils/nn.py",10355,0,"a",python,content +7849,21637087,"utils/nn.py",10356,0,"",python,selection_keyboard +7850,21637185,"utils/nn.py",10356,0,"d",python,content +7851,21637185,"utils/nn.py",10357,0,"",python,selection_keyboard +7852,21637437,"utils/nn.py",10357,0,"c",python,content +7853,21637438,"utils/nn.py",10358,0,"",python,selection_keyboard +7854,21637490,"utils/nn.py",10358,0,"a",python,content +7855,21637490,"utils/nn.py",10359,0,"",python,selection_keyboard +7856,21637555,"utils/nn.py",10359,0,"s",python,content +7857,21637555,"utils/nn.py",10360,0,"",python,selection_keyboard +7858,21637705,"utils/nn.py",10360,0,"t",python,content +7859,21637705,"utils/nn.py",10361,0,"",python,selection_keyboard +7860,21638028,"utils/nn.py",10380,0,"",python,selection_command +7861,21638028,"utils/nn.py",10382,0,"",python,selection_command +7862,21638028,"utils/nn.py",10382,0,"1 1",python,content +7863,21638101,"utils/nn.py",10385,0,"a",python,content +7864,21638891,"utils/nn.py",10385,0,"",python,selection_command +7865,21639060,"utils/nn.py",10382,4,"",python,content +7866,21639118,"utils/nn.py",10339,35,"",python,content +7867,21639120,"utils/nn.py",10327,0,"",python,selection_command +7868,21639814,"utils/nn.py",10339,0,"# We need to broadcast\n ",python,content +7869,21639832,"utils/nn.py",10382,0,"1 1a",python,content +7870,21640347,"utils/nn.py",10362,0,"",python,selection_command +7871,21640508,"utils/nn.py",10374,0,"",python,selection_command +7872,21640762,"utils/nn.py",10382,0,"",python,selection_command +7873,21646111,"utils/nn.py",10382,1,"1",python,selection_command +7874,21646198,"utils/nn.py",10382,2,"1 ",python,selection_command +7875,21646352,"utils/nn.py",10382,3,"1 1",python,selection_command +7876,21646673,"utils/nn.py",10382,4,"1 1a",python,selection_command +7877,21647819,"utils/nn.py",10385,0,"",python,selection_command +7878,21654637,"utils/nn.py",10385,1,"a",python,selection_command +7879,21655043,"utils/nn.py",10384,2,"1a",python,selection_command +7880,21655420,"utils/nn.py",10382,4,"1 1a",python,selection_command +7881,21656756,"utils/nn.py",10382,4,"",python,content +7882,21657519,"utils/nn.py",10362,0,"",python,selection_command +7883,21657624,"utils/nn.py",10374,0,"",python,selection_command +7884,21658973,"utils/nn.py",10339,0,"",python,selection_command +7885,21659232,"utils/nn.py",10361,0,"",python,selection_command +7886,21659889,"utils/nn.py",10360,0,"",python,selection_command +7887,21659992,"utils/nn.py",10352,0,"",python,selection_command +7888,21660159,"utils/nn.py",10349,0,"",python,selection_command +7889,21660289,"utils/nn.py",10344,0,"",python,selection_command +7890,21660624,"utils/nn.py",10341,0,"",python,selection_command +7891,21661509,"utils/nn.py",10340,0,"",python,selection_command +7892,21662121,"utils/nn.py",10339,0,"",python,selection_command +7893,21662596,"utils/nn.py",10361,0,"",python,selection_command +7894,21663164,"utils/nn.py",10360,0,"",python,selection_command +7895,21727533,"utils/nn.py",10361,0,"",python,selection_command +7896,21727709,"utils/nn.py",10361,0," ",python,content +7897,21727709,"utils/nn.py",10362,0,"",python,selection_keyboard +7898,21727842,"utils/nn.py",10362,0,"t",python,content +7899,21727843,"utils/nn.py",10363,0,"",python,selection_keyboard +7900,21728045,"utils/nn.py",10363,0,"h",python,content +7901,21728045,"utils/nn.py",10364,0,"",python,selection_keyboard +7902,21728052,"utils/nn.py",10364,0,"e",python,content +7903,21728052,"utils/nn.py",10365,0,"",python,selection_keyboard +7904,21728214,"utils/nn.py",10365,0," ",python,content +7905,21728215,"utils/nn.py",10366,0,"",python,selection_keyboard +7906,21729349,"utils/nn.py",10366,0,"t",python,content +7907,21729349,"utils/nn.py",10367,0,"",python,selection_keyboard +7908,21729530,"utils/nn.py",10367,0," ",python,content +7909,21729530,"utils/nn.py",10368,0,"",python,selection_keyboard +7910,21729630,"utils/nn.py",10368,0,"a",python,content +7911,21729630,"utils/nn.py",10369,0,"",python,selection_keyboard +7912,21729765,"utils/nn.py",10369,0,"n",python,content +7913,21729765,"utils/nn.py",10370,0,"",python,selection_keyboard +7914,21729826,"utils/nn.py",10370,0,"d",python,content +7915,21729827,"utils/nn.py",10371,0,"",python,selection_keyboard +7916,21729900,"utils/nn.py",10371,0," ",python,content +7917,21729901,"utils/nn.py",10372,0,"",python,selection_keyboard +7918,21729966,"utils/nn.py",10372,0,"s",python,content +7919,21729967,"utils/nn.py",10373,0,"",python,selection_keyboard +7920,21730207,"utils/nn.py",10373,0," ",python,content +7921,21730207,"utils/nn.py",10374,0,"",python,selection_keyboard +7922,21730658,"utils/nn.py",10374,0,"d",python,content +7923,21730658,"utils/nn.py",10375,0,"",python,selection_keyboard +7924,21730748,"utils/nn.py",10375,0,"i",python,content +7925,21730748,"utils/nn.py",10376,0,"",python,selection_keyboard +7926,21730801,"utils/nn.py",10376,0,"m",python,content +7927,21730802,"utils/nn.py",10377,0,"",python,selection_keyboard +7928,21730900,"utils/nn.py",10377,0,"e",python,content +7929,21730901,"utils/nn.py",10378,0,"",python,selection_keyboard +7930,21731014,"utils/nn.py",10378,0,"n",python,content +7931,21731015,"utils/nn.py",10379,0,"",python,selection_keyboard +7932,21731132,"utils/nn.py",10379,0,"s",python,content +7933,21731133,"utils/nn.py",10380,0,"",python,selection_keyboard +7934,21731220,"utils/nn.py",10380,0,"i",python,content +7935,21731220,"utils/nn.py",10381,0,"",python,selection_keyboard +7936,21731268,"utils/nn.py",10381,0,"o",python,content +7937,21731268,"utils/nn.py",10382,0,"",python,selection_keyboard +7938,21731337,"utils/nn.py",10382,0,"n",python,content +7939,21731338,"utils/nn.py",10383,0,"",python,selection_keyboard +7940,21731450,"utils/nn.py",10383,0,"s",python,content +7941,21731450,"utils/nn.py",10384,0,"",python,selection_keyboard +7942,21733193,"utils/nn.py",10383,0,"",python,selection_command +7943,21735805,"utils/nn.py",10327,0,"",python,selection_command +7944,21736388,"utils/nn.py",10384,0,"",python,selection_command +7945,21736692,"utils/nn.py",10383,0,"",python,selection_command +7946,21737010,"utils/nn.py",10374,0,"",python,selection_command +7947,21737255,"utils/nn.py",10372,0,"",python,selection_command +7948,21737268,"utils/nn.py",10368,0,"",python,selection_command +7949,21737299,"utils/nn.py",10366,0,"",python,selection_command +7950,21737327,"utils/nn.py",10362,0,"",python,selection_command +7951,21737625,"utils/nn.py",10352,0,"",python,selection_command +7952,21737878,"utils/nn.py",10349,0,"",python,selection_command +7953,21737890,"utils/nn.py",10344,0,"",python,selection_command +7954,21738019,"utils/nn.py",10341,0,"",python,selection_command +7955,21738794,"utils/nn.py",10342,0,"",python,selection_command +7956,21739024,"utils/nn.py",10347,0,"",python,selection_command +7957,21739216,"utils/nn.py",10350,0,"",python,selection_command +7958,21739381,"utils/nn.py",10360,0,"",python,selection_command +7959,21739615,"utils/nn.py",10364,0,"",python,selection_command +7960,21739875,"utils/nn.py",10366,0,"",python,selection_command +7961,21740058,"utils/nn.py",10370,0,"",python,selection_command +7962,21740299,"utils/nn.py",10372,0,"",python,selection_command +7963,21740456,"utils/nn.py",10383,0,"",python,selection_command +7964,21741111,"utils/nn.py",10374,0,"",python,selection_command +7965,21741344,"utils/nn.py",10372,0,"",python,selection_command +7966,21741377,"utils/nn.py",10368,0,"",python,selection_command +7967,21741406,"utils/nn.py",10366,0,"",python,selection_command +7968,21741439,"utils/nn.py",10362,0,"",python,selection_command +7969,21742954,"utils/nn.py",10384,0,"",python,selection_command +7970,21762391,"utils/nn.py",10384,0," ",python,content +7971,21762392,"utils/nn.py",10385,0,"",python,selection_keyboard +7972,21762645,"utils/nn.py",10385,0,"t",python,content +7973,21762645,"utils/nn.py",10386,0,"",python,selection_keyboard +7974,21762691,"utils/nn.py",10386,0,"o",python,content +7975,21762691,"utils/nn.py",10387,0,"",python,selection_keyboard +7976,21763585,"utils/nn.py",10387,0," ",python,content +7977,21763585,"utils/nn.py",10388,0,"",python,selection_keyboard +7978,21764741,"utils/nn.py",10388,0,"t",python,content +7979,21764741,"utils/nn.py",10389,0,"",python,selection_keyboard +7980,21764824,"utils/nn.py",10389,0,"a",python,content +7981,21764824,"utils/nn.py",10390,0,"",python,selection_keyboard +7982,21764909,"utils/nn.py",10390,0,"r",python,content +7983,21764909,"utils/nn.py",10391,0,"",python,selection_keyboard +7984,21765063,"utils/nn.py",10391,0,"g",python,content +7985,21765063,"utils/nn.py",10392,0,"",python,selection_keyboard +7986,21765125,"utils/nn.py",10392,0,"e",python,content +7987,21765126,"utils/nn.py",10393,0,"",python,selection_keyboard +7988,21765214,"utils/nn.py",10393,0,"t",python,content +7989,21765214,"utils/nn.py",10394,0,"",python,selection_keyboard +7990,21765494,"utils/nn.py",10394,0,"_",python,content +7991,21765495,"utils/nn.py",10395,0,"",python,selection_keyboard +7992,21765730,"utils/nn.py",10395,0,"s",python,content +7993,21765730,"utils/nn.py",10396,0,"",python,selection_keyboard +7994,21766015,"utils/nn.py",10396,0,"e",python,content +7995,21766015,"utils/nn.py",10397,0,"",python,selection_keyboard +7996,21766064,"utils/nn.py",10397,0,"q",python,content +7997,21766064,"utils/nn.py",10398,0,"",python,selection_keyboard +7998,21766333,"utils/nn.py",10398,0,"_",python,content +7999,21766334,"utils/nn.py",10399,0,"",python,selection_keyboard +8000,21766517,"utils/nn.py",10399,0,"l",python,content +8001,21766517,"utils/nn.py",10400,0,"",python,selection_keyboard +8002,21766563,"utils/nn.py",10400,0,"e",python,content +8003,21766563,"utils/nn.py",10401,0,"",python,selection_keyboard +8004,21766685,"utils/nn.py",10401,0,"n",python,content +8005,21766685,"utils/nn.py",10402,0,"",python,selection_keyboard +8006,21766832,"utils/nn.py",10401,0,"",python,selection_command +8007,21767046,"utils/nn.py",10327,0,"",python,selection_command +8008,21768185,"utils/nn.py",10339,0,"",python,selection_command +8009,21779648,"utils/nn.py",10402,0,"",python,selection_command +8010,21782368,"utils/nn.py",10402,0," ",python,content +8011,21782368,"utils/nn.py",10403,0,"",python,selection_keyboard +8012,21782408,"utils/nn.py",10403,0,"s",python,content +8013,21782408,"utils/nn.py",10404,0,"",python,selection_keyboard +8014,21782524,"utils/nn.py",10404,0,"i",python,content +8015,21782524,"utils/nn.py",10405,0,"",python,selection_keyboard +8016,21782607,"utils/nn.py",10405,0,"n",python,content +8017,21782608,"utils/nn.py",10406,0,"",python,selection_keyboard +8018,21782628,"utils/nn.py",10406,0,"c",python,content +8019,21782628,"utils/nn.py",10407,0,"",python,selection_keyboard +8020,21782711,"utils/nn.py",10407,0,"e",python,content +8021,21782711,"utils/nn.py",10408,0,"",python,selection_keyboard +8022,21782742,"utils/nn.py",10408,0," ",python,content +8023,21782742,"utils/nn.py",10409,0,"",python,selection_keyboard +8024,21782910,"utils/nn.py",10409,0,"c",python,content +8025,21782910,"utils/nn.py",10410,0,"",python,selection_keyboard +8026,21783014,"utils/nn.py",10410,0,"u",python,content +8027,21783014,"utils/nn.py",10411,0,"",python,selection_keyboard +8028,21783116,"utils/nn.py",10411,0,"d",python,content +8029,21783116,"utils/nn.py",10412,0,"",python,selection_keyboard +8030,21783224,"utils/nn.py",10412,0,"n",python,content +8031,21783225,"utils/nn.py",10413,0,"",python,selection_keyboard +8032,21783328,"utils/nn.py",10413,0,"n",python,content +8033,21783328,"utils/nn.py",10414,0,"",python,selection_keyboard +8034,21783426,"utils/nn.py",10414,0," ",python,content +8035,21783427,"utils/nn.py",10415,0,"",python,selection_keyboard +8036,21804326,"utils/nn.py",10415,0,"d",python,content +8037,21804326,"utils/nn.py",10416,0,"",python,selection_keyboard +8038,21805261,"utils/nn.py",10415,1,"",python,content +8039,21805478,"utils/nn.py",10415,0,"s",python,content +8040,21805478,"utils/nn.py",10416,0,"",python,selection_keyboard +8041,21805905,"utils/nn.py",10416,0,"i",python,content +8042,21805905,"utils/nn.py",10417,0,"",python,selection_keyboard +8043,21806191,"utils/nn.py",10416,1,"",python,content +8044,21806252,"utils/nn.py",10416,0,"t",python,content +8045,21806252,"utils/nn.py",10417,0,"",python,selection_keyboard +8046,21806328,"utils/nn.py",10417,0,"r",python,content +8047,21806328,"utils/nn.py",10418,0,"",python,selection_keyboard +8048,21806376,"utils/nn.py",10418,0,"i",python,content +8049,21806376,"utils/nn.py",10419,0,"",python,selection_keyboard +8050,21806582,"utils/nn.py",10419,0,"c",python,content +8051,21806583,"utils/nn.py",10420,0,"",python,selection_keyboard +8052,21806760,"utils/nn.py",10420,0,"t",python,content +8053,21806760,"utils/nn.py",10421,0,"",python,selection_keyboard +8054,21806826,"utils/nn.py",10421,0,"l",python,content +8055,21806826,"utils/nn.py",10422,0,"",python,selection_keyboard +8056,21806935,"utils/nn.py",10422,0,"y",python,content +8057,21806935,"utils/nn.py",10423,0,"",python,selection_keyboard +8058,21807076,"utils/nn.py",10423,0," ",python,content +8059,21807076,"utils/nn.py",10424,0,"",python,selection_keyboard +8060,21808107,"utils/nn.py",10424,0,"c",python,content +8061,21808107,"utils/nn.py",10425,0,"",python,selection_keyboard +8062,21808249,"utils/nn.py",10425,0,"h",python,content +8063,21808249,"utils/nn.py",10426,0,"",python,selection_keyboard +8064,21808414,"utils/nn.py",10426,0,"e",python,content +8065,21808415,"utils/nn.py",10427,0,"",python,selection_keyboard +8066,21808631,"utils/nn.py",10427,0,"c",python,content +8067,21808631,"utils/nn.py",10428,0,"",python,selection_keyboard +8068,21808747,"utils/nn.py",10428,0,"k",python,content +8069,21808748,"utils/nn.py",10429,0,"",python,selection_keyboard +8070,21808829,"utils/nn.py",10429,0,"s",python,content +8071,21808830,"utils/nn.py",10430,0,"",python,selection_keyboard +8072,21808930,"utils/nn.py",10430,0," ",python,content +8073,21808931,"utils/nn.py",10431,0,"",python,selection_keyboard +8074,21809086,"utils/nn.py",10431,0,"t",python,content +8075,21809086,"utils/nn.py",10432,0,"",python,selection_keyboard +8076,21809164,"utils/nn.py",10432,0,"h",python,content +8077,21809165,"utils/nn.py",10433,0,"",python,selection_keyboard +8078,21809228,"utils/nn.py",10433,0,"e",python,content +8079,21809228,"utils/nn.py",10434,0,"",python,selection_keyboard +8080,21809330,"utils/nn.py",10434,0," ",python,content +8081,21809330,"utils/nn.py",10435,0,"",python,selection_keyboard +8082,21809413,"utils/nn.py",10435,0,"s",python,content +8083,21809413,"utils/nn.py",10436,0,"",python,selection_keyboard +8084,21809496,"utils/nn.py",10436,0,"h",python,content +8085,21809496,"utils/nn.py",10437,0,"",python,selection_keyboard +8086,21809634,"utils/nn.py",10437,0,"a",python,content +8087,21809634,"utils/nn.py",10438,0,"",python,selection_keyboard +8088,21809765,"utils/nn.py",10438,0,"p",python,content +8089,21809766,"utils/nn.py",10439,0,"",python,selection_keyboard +8090,21809844,"utils/nn.py",10439,0,"e",python,content +8091,21809845,"utils/nn.py",10440,0,"",python,selection_keyboard +8092,21810047,"utils/nn.py",10439,0,"",python,selection_command +8093,21810148,"utils/nn.py",10327,0,"",python,selection_command +8094,21811548,"utils/nn.py",10339,0,"",python,selection_command +8095,21811770,"utils/nn.py",10341,0,"",python,selection_command +8096,21812187,"utils/nn.py",10341,0,"N",python,content +8097,21812187,"utils/nn.py",10342,0,"",python,selection_keyboard +8098,21812249,"utils/nn.py",10342,0,"O",python,content +8099,21812250,"utils/nn.py",10343,0,"",python,selection_keyboard +8100,21812281,"utils/nn.py",10343,0,"T",python,content +8101,21812281,"utils/nn.py",10344,0,"",python,selection_keyboard +8102,21812401,"utils/nn.py",10344,0,"E",python,content +8103,21812401,"utils/nn.py",10345,0,"",python,selection_keyboard +8104,21812552,"utils/nn.py",10345,0,":",python,content +8105,21812552,"utils/nn.py",10346,0,"",python,selection_keyboard +8106,21812618,"utils/nn.py",10346,0," ",python,content +8107,21812619,"utils/nn.py",10347,0,"",python,selection_keyboard +8108,21812815,"utils/nn.py",10346,0,"",python,selection_command +8109,21813033,"utils/nn.py",10327,0,"",python,selection_command +8110,22046252,"utils/nn.py",10445,0,"",python,selection_command +8111,22051814,"utils/nn.py",10446,0,"",python,selection_command +8112,22051985,"utils/nn.py",10446,0,"\n ",python,content +8113,22052836,"utils/nn.py",10459,0,"#",python,content +8114,22052837,"utils/nn.py",10460,0,"",python,selection_keyboard +8115,22052896,"utils/nn.py",10460,0," ",python,content +8116,22052896,"utils/nn.py",10461,0,"",python,selection_keyboard +8117,22054167,"utils/nn.py",10460,0,"",python,selection_command +8118,22137220,"utils/nn.py",10461,0,"",python,selection_command +8119,22137431,"utils/nn.py",10461,0,"https://github.com/jax-ml/jax/issues/28974",python,content +8120,22137431,"utils/nn.py",10503,0,"",python,selection_keyboard +8121,22137739,"utils/nn.py",10502,0,"",python,selection_command +8122,22160375,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +8123,22175783,"utils/nn.py",0,0,"",python,tab +8124,22176340,"utils/nn.py",10503,0,"\n ",python,content +8125,22176870,"utils/nn.py",10516,0,"#",python,content +8126,22176870,"utils/nn.py",10517,0,"",python,selection_keyboard +8127,22176921,"utils/nn.py",10517,0," ",python,content +8128,22176921,"utils/nn.py",10518,0,"",python,selection_keyboard +8129,22177177,"utils/nn.py",10518,0,"https://github.com/jax-ml/jax/blob/08c7677393672ccb85c10f1ed0bd506905c3c994/jax/_src/cudnn/fused_attention_stablehlo.py#L1830",python,content +8130,22177177,"utils/nn.py",10643,0,"",python,selection_keyboard +8131,22177438,"utils/nn.py",10642,0,"",python,selection_command +8132,22178007,"utils/nn.py",10504,0,"",python,selection_command +8133,22180062,"utils/nn.py",10643,0,"\n ",python,content +8134,22180591,"utils/nn.py",10656,0,"#",python,content +8135,22180591,"utils/nn.py",10657,0,"",python,selection_keyboard +8136,22180637,"utils/nn.py",10657,0," ",python,content +8137,22180637,"utils/nn.py",10658,0,"",python,selection_keyboard +8138,22181380,"utils/nn.py",10657,0,"",python,selection_command +8139,22183306,"utils/nn.py",10658,0,"",python,selection_command +8140,22205990,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +8141,22208768,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"",python,tab +8142,22211669,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",71414,0,"",python,selection_command +8143,22215698,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10018,0,"",python,selection_command +8144,22218132,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",71414,0,"",python,selection_command +8145,22221758,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",71360,0,"",python,selection_command +8146,22221927,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",71304,0,"",python,selection_command +8147,22223536,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",71360,0,"",python,selection_command +8148,22223689,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",71414,0,"",python,selection_command +8149,22224116,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10018,0,"",python,selection_command +8150,22225094,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10030,0,"",python,selection_command +8151,22225229,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10031,0,"",python,selection_command +8152,22225380,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10036,0,"",python,selection_command +8153,22225631,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10038,0,"",python,selection_command +8154,22225658,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10041,0,"",python,selection_command +8155,22225689,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10043,0,"",python,selection_command +8156,22225994,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10048,0,"",python,selection_command +8157,22226160,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",10050,0,"",python,selection_command +8158,22226951,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12020,0,"",python,selection_command +8159,22239356,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12030,0,"",python,selection_command +8160,22239507,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",12067,0,"",python,selection_command +8161,22258316,"utils/nn.py",0,0,"",python,tab +8162,22259076,"utils/nn.py",10658,0,"https://github.com/jax-ml/jax/blob/08c7677393672ccb85c10f1ed0bd506905c3c994/jax/_src/cudnn/fused_attention_stablehlo.py#L337",python,content +8163,22259076,"utils/nn.py",10782,0,"",python,selection_keyboard +8164,22259322,"utils/nn.py",10781,0,"",python,selection_command +8165,22259551,"utils/nn.py",10644,0,"",python,selection_command +8166,22260325,"utils/nn.py",10781,0,"",python,selection_command +8167,22265142,"utils/nn.py",10644,0,"",python,selection_command +8168,22266031,"utils/nn.py",10504,0,"",python,selection_command +8169,22266226,"utils/nn.py",10447,0,"",python,selection_command +8170,22267466,"utils/nn.py",10327,0,"",python,selection_command +8171,22267821,"utils/nn.py",10445,0,"",python,selection_command +8172,22271582,"utils/nn.py",10441,0,"",python,selection_command +8173,22271889,"utils/nn.py",10441,0,"m",python,content +8174,22271890,"utils/nn.py",10442,0,"",python,selection_keyboard +8175,22271945,"utils/nn.py",10442,0,"a",python,content +8176,22271946,"utils/nn.py",10443,0,"",python,selection_keyboard +8177,22271999,"utils/nn.py",10443,0,"s",python,content +8178,22271999,"utils/nn.py",10444,0,"",python,selection_keyboard +8179,22272077,"utils/nn.py",10444,0,"k",python,content +8180,22272077,"utils/nn.py",10445,0,"",python,selection_keyboard +8181,22272161,"utils/nn.py",10445,0," ",python,content +8182,22272162,"utils/nn.py",10446,0,"",python,selection_keyboard +8183,22272279,"utils/nn.py",10445,0,"",python,selection_command +8184,22272428,"utils/nn.py",10327,0,"",python,selection_command +8185,22274613,"utils/nn.py",10339,0,"",python,selection_command +8186,22274873,"utils/nn.py",10341,0,"",python,selection_command +8187,22274896,"utils/nn.py",10345,0,"",python,selection_command +8188,22274928,"utils/nn.py",10347,0,"",python,selection_command +8189,22274961,"utils/nn.py",10350,0,"",python,selection_command +8190,22275182,"utils/nn.py",10355,0,"",python,selection_command +8191,22275302,"utils/nn.py",10358,0,"",python,selection_command +8192,22275481,"utils/nn.py",10368,0,"",python,selection_command +8193,22275618,"utils/nn.py",10372,0,"",python,selection_command +8194,22275803,"utils/nn.py",10374,0,"",python,selection_command +8195,22276100,"utils/nn.py",10372,0,"",python,selection_command +8196,22287392,"utils/nn.py",10372,1,"T",python,content +8197,22287591,"utils/nn.py",10374,0,"",python,selection_command +8198,22287710,"utils/nn.py",10378,0,"",python,selection_command +8199,22287909,"utils/nn.py",10380,0,"",python,selection_command +8200,22288250,"utils/nn.py",10378,0,"",python,selection_command +8201,22288842,"utils/nn.py",10378,1,"S",python,content +8202,22289691,"utils/nn.py",10374,0,"",python,selection_command +8203,22289879,"utils/nn.py",10372,0,"",python,selection_command +8204,22290412,"utils/nn.py",10368,0,"",python,selection_command +8205,22291781,"utils/nn.py",10368,4,"",python,content +8206,22293585,"utils/nn.py",10369,0,"",python,selection_command +8207,22295288,"utils/nn.py",10372,0,"",python,selection_command +8208,22295468,"utils/nn.py",10374,0,"",python,selection_command +8209,22295780,"utils/nn.py",10385,0,"",python,selection_command +8210,22295934,"utils/nn.py",10388,0,"",python,selection_command +8211,22296114,"utils/nn.py",10403,0,"",python,selection_command +8212,22296335,"utils/nn.py",10409,0,"",python,selection_command +8213,22297084,"utils/nn.py",10415,0,"",python,selection_command +8214,22297272,"utils/nn.py",10424,0,"",python,selection_command +8215,22297787,"utils/nn.py",10446,0,"",python,selection_command +8216,22298732,"utils/nn.py",10442,0,"",python,selection_command +8217,22298983,"utils/nn.py",10437,0,"",python,selection_command +8218,22299018,"utils/nn.py",10433,0,"",python,selection_command +8219,22299035,"utils/nn.py",10426,0,"",python,selection_command +8220,22299071,"utils/nn.py",10417,0,"",python,selection_command +8221,22299105,"utils/nn.py",10411,0,"",python,selection_command +8222,22299267,"utils/nn.py",10405,0,"",python,selection_command +8223,22299505,"utils/nn.py",10409,0,"",python,selection_command +8224,22299695,"utils/nn.py",10415,0,"",python,selection_command +8225,22299843,"utils/nn.py",10416,0,"",python,selection_command +8226,22300790,"utils/nn.py",10416,0," ",python,content +8227,22300790,"utils/nn.py",10417,0,"",python,selection_keyboard +8228,22301828,"utils/nn.py",10417,0,"a",python,content +8229,22301829,"utils/nn.py",10418,0,"",python,selection_keyboard +8230,22302390,"utils/nn.py",10417,1,"",python,content +8231,22303283,"utils/nn.py",10417,0,"a",python,content +8232,22303284,"utils/nn.py",10418,0,"",python,selection_keyboard +8233,22303360,"utils/nn.py",10418,0,"t",python,content +8234,22303360,"utils/nn.py",10419,0,"",python,selection_keyboard +8235,22303479,"utils/nn.py",10419,0,"t",python,content +8236,22303479,"utils/nn.py",10420,0,"",python,selection_keyboard +8237,22303567,"utils/nn.py",10420,0,"e",python,content +8238,22303568,"utils/nn.py",10421,0,"",python,selection_keyboard +8239,22303669,"utils/nn.py",10421,0,"n",python,content +8240,22303669,"utils/nn.py",10422,0,"",python,selection_keyboard +8241,22303759,"utils/nn.py",10422,0,"t",python,content +8242,22303760,"utils/nn.py",10423,0,"",python,selection_keyboard +8243,22303858,"utils/nn.py",10423,0,"i",python,content +8244,22303859,"utils/nn.py",10424,0,"",python,selection_keyboard +8245,22303889,"utils/nn.py",10424,0,"o",python,content +8246,22303889,"utils/nn.py",10425,0,"",python,selection_keyboard +8247,22303974,"utils/nn.py",10425,0,"n",python,content +8248,22303974,"utils/nn.py",10426,0,"",python,selection_keyboard +8249,22304057,"utils/nn.py",10425,0,"",python,selection_command +8250,22304959,"utils/nn.py",10457,0,"",python,selection_command +8251,22305262,"utils/nn.py",10456,0,"",python,selection_command +8252,22307044,"utils/nn.py",10327,0,"",python,selection_command +8253,22355558,"sample.py",0,0,"",python,tab +8254,22374514,"TERMINAL",0,0,"\r[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +8255,22377383,"sample.py",0,0,"",python,tab +8256,22377384,"sample.py",2805,0,"",python,selection_command +8257,22385836,"TERMINAL",0,0,"\r[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +8258,22389491,"utils/nn.py",0,0,"import math\nfrom typing import Tuple, Callable\n\nfrom flax import nnx\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nnx.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n def __init__(self, d_model: int, max_len: int = 5000):\n self.d_model = d_model\n self.max_len = max_len\n\n pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n pe = pe.at[:, 0::2].set(jnp.sin(position * div_term))\n pe = pe.at[:, 1::2].set(jnp.cos(position * div_term))\n self.pe = nnx.Variable(pe)\n\n def __call__(self, x: jax.Array) -> jax.Array:\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nnx.Module):\n def __init__(\n self,\n dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.dim = dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.spatial_pos_enc = PositionalEncoding(self.dim)\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention,\n is_causal=self.spatial_causal,\n ),\n rngs=rngs,\n # decode=self.decode,\n decode=False,\n )\n\n self.temporal_pos_enc = PositionalEncoding(self.dim)\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = self.spatial_pos_enc(x)\n z = self.spatial_norm(z)\n # z.shape (1, 1, 921, 512)\n z = self.spatial_attention(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = self.temporal_pos_enc(x)\n z = self.temporal_norm(z)\n # z.shape (1, 921, 1, 512)\n z = self.temporal_attention(z)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = self.ffn_norm(x)\n z = self.ffn_dense1(z)\n z = jax.nn.gelu(z)\n z = self.ffn_dense2(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nnx.Module):\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n self.blocks: list[STBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=self.spatial_causal,\n decode=self.decode,\n rngs=rngs,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x: jax.Array) -> jax.Array:\n # x.shape (1, 1, 921, 512)\n x = self.input_norm1(x)\n x = self.input_dense(x)\n x = self.input_norm2(x)\n\n for block in self.blocks:\n # x.shape (1, 1, 921, 512)\n x = block(x)\n\n x = self.output_dense(x)\n return x # (B, T, E)\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n def __init__(\n self, latent_dim: int, num_latents: int, dropout: float, rngs: nnx.Rngs\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n normalized_codebook = normalize(self.codebook.value)\n distance = -jnp.matmul(x, normalized_codebook.T)\n if training:\n distance = self.drop(distance)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array) -> jax.Array:\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n # for temporal attention (using kv cache)\n # FIRST PASS: qkv.shape (1, 921, 1, 8, 64)\n # SECOND PASS: qkv.shape \n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n # query_4d.shape (921, 4, 8, 64)\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n # Handle causal mask for cached decoder self-attention (from nnx.MultiHeadAttention)\n if mask is not None:\n # mask.shape (1, 921, 1, 1, 1)\n mask_4d = _rearrange(mask)\n # mask_4d.shape (921, 1, 1, 1) (B, H-broadcast, Q-broadcast, K-broadcast)\n # NOTE: We need to broadcast T and S dimensions to target_seq_len since cudnn attention strictly checks the mask shape\n # https://github.com/jax-ml/jax/issues/28974\n # https://github.com/jax-ml/jax/blob/08c7677393672ccb85c10f1ed0bd506905c3c994/jax/_src/cudnn/fused_attention_stablehlo.py#L1830\n # https://github.com/jax-ml/jax/blob/08c7677393672ccb85c10f1ed0bd506905c3c994/jax/_src/cudnn/fused_attention_stablehlo.py#L337\n mask_4d = einops.repeat(mask_4d, ""... 1 1 -> ... t s"", t=target_seq_len, s=target_seq_len)\n mask_4d = mask_4d.astype(jnp.bool)\n else:\n # FIXME (f.srambical): Investigate whether/why this is needed\n mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n if query.shape == (1, 921, 1, 8, 64):\n breakpoint()\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n # NOTE: jax.nn.dot_product_attention does not support dropout\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +8259,22389491,"utils/nn.py",2133,0,"",python,selection_command +8260,22400950,"utils/nn.py",2133,9067," self.use_flash_attention, is_causal=self.spatial_causal\n ),\n rngs=rngs,\n # decode=self.decode,\n decode=False,\n )\n\n self.temporal_pos_enc = PositionalEncoding(self.dim)\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = self.spatial_pos_enc(x)\n z = self.spatial_norm(z)\n z = self.spatial_attention(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = self.temporal_pos_enc(x)\n z = self.temporal_norm(z)\n # FIXME (f.srambical): no need to pass mask if is_causal=True\n causal_mask = jnp.tri(z.shape[-2])\n z = self.temporal_attention(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = self.ffn_norm(x)\n z = self.ffn_dense1(z)\n z = jax.nn.gelu(z)\n z = self.ffn_dense2(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nnx.Module):\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n self.blocks: list[STBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=self.spatial_causal,\n decode=self.decode,\n rngs=rngs,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x: jax.Array) -> jax.Array:\n x = self.input_norm1(x)\n x = self.input_dense(x)\n x = self.input_norm2(x)\n\n for block in self.blocks:\n x = block(x)\n\n x = self.output_dense(x)\n return x # (B, T, E)\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n def __init__(\n self, latent_dim: int, num_latents: int, dropout: float, rngs: nnx.Rngs\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n normalized_codebook = normalize(self.codebook.value)\n distance = -jnp.matmul(x, normalized_codebook.T)\n if training:\n distance = self.drop(distance)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array) -> jax.Array:\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n",python,content +8261,22401304,"utils/nn.py",0,0,"",python,tab +8262,22401305,"utils/nn.py",2133,0,"",python,selection_command +8263,22415129,"utils/nn.py",4081,0,"",python,selection_mouse +8264,22415882,"utils/nn.py",4055,35,"",python,content +8265,22415897,"utils/nn.py",4063,0,"",python,selection_command +8266,22421238,"utils/nn.py",4055,0," # z.shape (1, 921, 1, 512)\n",python,content +8267,22421253,"utils/nn.py",4063,0,"",python,selection_command +8268,22427977,"utils/nn.py",0,0,"",python,tab +8269,22427977,"utils/nn.py",4003,0,"",python,selection_command +8270,22455936,"utils/nn.py",0,0,"",python,tab +8271,22455938,"utils/nn.py",2133,0,"",python,selection_command +8272,22460117,"utils/nn.py",3854,0,"",python,selection_mouse +8273,22462176,"utils/nn.py",3826,35,"",python,content +8274,22462199,"utils/nn.py",3834,0,"",python,selection_command +8275,22462705,"utils/nn.py",4049,0,"",python,selection_mouse +8276,22463067,"utils/nn.py",4020,35,"",python,content +8277,22463083,"utils/nn.py",4028,0,"",python,selection_command +8278,22475999,"utils/nn.py",10220,0,"",python,selection_mouse +8279,22476454,"utils/nn.py",10171,86,"",python,content +8280,22476476,"utils/nn.py",10183,0,"",python,selection_command +8281,22476946,"utils/nn.py",10128,0,"",python,selection_mouse +8282,22477294,"utils/nn.py",10089,43,"",python,content +8283,22477296,"utils/nn.py",10101,0,"",python,selection_command +8284,22482640,"utils/nn.py",10796,0,"",python,selection_mouse +8285,22483419,"utils/nn.py",10759,74,"",python,content +8286,22483429,"utils/nn.py",10771,0,"",python,selection_command +8287,22484845,"utils/nn.py",9705,0,"",python,selection_mouse +8288,22485251,"utils/nn.py",9687,41,"",python,content +8289,22488862,"utils/nn.py",8961,0,"",python,selection_mouse +8290,22490435,"utils/nn.py",8939,50,"",python,content +8291,22490457,"utils/nn.py",8947,0,"",python,selection_command +8292,22490847,"utils/nn.py",8939,51,"",python,content +8293,22490863,"utils/nn.py",8947,0,"",python,selection_command +8294,22491339,"utils/nn.py",8939,34,"",python,content +8295,22491355,"utils/nn.py",8947,0,"",python,selection_command +8296,22494795,"utils/nn.py",10716,0,"",python,selection_mouse +8297,22495225,"utils/nn.py",10680,46,"",python,content +8298,22495245,"utils/nn.py",10692,0,"",python,selection_command +8299,22495757,"utils/nn.py",10680,25,"",python,content +8300,22504436,"utils/nn.py",2030,0,"",python,selection_mouse +8301,22505297,"utils/nn.py",10680,0," breakpoint()\n",python,content +8302,22505297,"utils/nn.py",10692,0,"",python,selection_command +8303,22505946,"utils/nn.py",10688,0,"if query.shape == (1, 921, 1, 8, 64):\n ",python,content +8304,22505953,"utils/nn.py",10716,0,"",python,selection_command +8305,22506206,"utils/nn.py",8947,0,"# SECOND PASS: qkv.shape \n ",python,content +8306,22506208,"utils/nn.py",8947,0,"",python,selection_command +8307,22507024,"utils/nn.py",8949,0,"FIRST PASS: qkv.shape (1, 921, 1, 8, 64)\n # ",python,content +8308,22507376,"utils/nn.py",8949,0,"for temporal attention (using kv cache)\n # ",python,content +8309,22507376,"utils/nn.py",8961,0,"",python,selection_command +8310,22507708,"utils/nn.py",9687,0," # query_4d.shape (921, 4, 8, 64)\n",python,content +8311,22507711,"utils/nn.py",9705,0,"",python,selection_command +8312,22508247,"utils/nn.py",10771,0,"# FIXME (f.srambical): Investigate whether/why this is needed\n ",python,content +8313,22508258,"utils/nn.py",10796,0,"",python,selection_command +8314,22508932,"utils/nn.py",10101,0,"# mask.shape (1, 921, 1, 1, 1)\n ",python,content +8315,22508941,"utils/nn.py",10128,0,"",python,selection_command +8316,22509478,"utils/nn.py",10185,0,"mask_4d.shape (921, 1, 1, 1) (B, H-broadcast, Q-broadcast, K-broadcast)\n # ",python,content +8317,22509479,"utils/nn.py",10220,0,"",python,selection_command +8318,22509910,"utils/nn.py",4028,0,"# z.shape (1, 921, 1, 512)\n ",python,content +8319,22509912,"utils/nn.py",4049,0,"",python,selection_command +8320,22510949,"utils/nn.py",3834,0,"# z.shape (1, 1, 921, 512)\n ",python,content +8321,22510964,"utils/nn.py",3854,0,"",python,selection_command +8322,22511499,"utils/nn.py",4063,35,"",python,content +8323,22511500,"utils/nn.py",4063,0,"",python,selection_command +8324,22512497,"utils/nn.py",4063,0,"# z.shape (1, 921, 1, 512)\n ",python,content +8325,22512517,"utils/nn.py",4063,0,"",python,selection_command +8326,22515741,"utils/nn.py",0,0,"",python,tab +8327,22518206,"sample.py",0,0,"",python,tab +8328,22518206,"sample.py",2805,0,"",python,selection_command +8329,22555294,"jasmine.py",0,0,"from typing import Dict\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics_causal import DynamicsCausal\nfrom models.dynamics_maskgit import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Jasmine(nnx.Module):\n """"""World model with three components: a tokenizer, a latent action model (LAM), and a dynamics model for predicting future tokens.""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n dynamics_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n # --- Dynamics ---\n self.dynamics_type = dynamics_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n self.dropout = dropout\n self.mask_limit = mask_limit\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.dynamics_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dynamics_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dynamics_type}"")\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_logits, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits\n if dyna_mask is not None:\n outputs[""mask""] = dyna_mask\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices, (H, W))\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> jax.Array:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: patches per frame\n S: sequence length\n A: action space\n D: model latent dimension\n """"""\n assert self.dynamics_type == ""maskgit""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array], step: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array, jax.Array, jax.Array], None]:\n rng, token_idxs, mask, action_tokens = carry\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n if not isinstance(self.dynamics, DynamicsMaskGIT):\n raise TypeError(""`sample_maskgit` requires `DynamicsMaskGIT`."")\n mask_token = self.dynamics.mask_token.value # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1)\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.transformer(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(\n jax.nn.softmax(final_logits), sampled_token_idxs\n )\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array], None]:\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)).astype(\n bool\n ) # (B, S, N)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = jax.lax.scan(\n maskgit_step_fn, init_carry_maskgit, jnp.arange(steps)\n )\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=(H, W),\n )\n return final_frames\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> jax.Array:\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n\n def token_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], token_idx: jax.Array\n ) -> tuple[tuple[jax.Array, jax.Array, jax.Array], None]:\n rng, token_idxs_full, action_tokens = carry\n t = token_idx // N\n n = token_idx % N\n\n # For autoregressive decoding, we only need to pass the token from the previous step.\n # The model internally uses a KV cache to remember previous tokens.\n current_token_sequence = jax.lax.dynamic_slice(\n token_idxs_full, (0, t, 0), (B, 1, N)\n )\n\n dyna_inputs = {\n ""video_tokens"": current_token_sequence,\n ""latent_actions"": action_tokens,\n }\n # The model will output logits for all patches in the sequence (which is just one frame).\n # FIXME: do we need the model to output logits for a single frame in order to use kv caching?\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\n # We select the logits for the specific patch `n` we are currently generating.\n next_token_logits = next_token_logits[:, 0, n, :].astype(\n jnp.float32\n ) # (B, vocab_size)\n\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B,)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B,)\n\n # Insert the generated token into the full sequence.\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n new_carry = (rng, token_idxs_full, action_tokens)\n return new_carry, None\n\n # --- Autoregressive generation ---\n future_frames = seq_len - T\n total_future_tokens = future_frames * N\n start_token_idx = T * N\n step_indices = jnp.arange(start_token_idx, start_token_idx + total_future_tokens)\n\n initial_carry = (batch[""rng""], token_idxs_full, action_tokens)\n final_carry, _ = jax.lax.scan(\n token_step_fn, initial_carry, step_indices\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames = self.tokenizer.decode(final_token_idxs, video_hw=(H, W))\n return final_frames\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rngs = nnx.Rngs(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, dummy_tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n optimizer.model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, dummy_tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n optimizer.model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del optimizer.model.lam.decoder\n lam_checkpoint_manager.close()\n\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +8330,22555295,"jasmine.py",14200,0,"",python,selection_command +8331,22573545,"jasmine.py",13950,0,"",python,selection_mouse +8332,22577009,"utils/nn.py",0,0,"",python,tab +8333,22577330,"utils/nn.py",11715,0,"",python,selection_command +8334,22579146,"utils/nn.py",11691,0,"",python,selection_command +8335,22579384,"utils/nn.py",11690,0,"",python,selection_command +8336,22579413,"utils/nn.py",11611,0,"",python,selection_command +8337,22579446,"utils/nn.py",11601,0,"",python,selection_command +8338,22579480,"utils/nn.py",11568,0,"",python,selection_command +8339,22579514,"utils/nn.py",11525,0,"",python,selection_command +8340,22579547,"utils/nn.py",11499,0,"",python,selection_command +8341,22579582,"utils/nn.py",11473,0,"",python,selection_command +8342,22579616,"utils/nn.py",11445,0,"",python,selection_command +8343,22579650,"utils/nn.py",11421,0,"",python,selection_command +8344,22579682,"utils/nn.py",11393,0,"",python,selection_command +8345,22579716,"utils/nn.py",11343,0,"",python,selection_command +8346,22579750,"utils/nn.py",11273,0,"",python,selection_command +8347,22579783,"utils/nn.py",11272,0,"",python,selection_command +8348,22579816,"utils/nn.py",11201,0,"",python,selection_command +8349,22580058,"utils/nn.py",11200,0,"",python,selection_command +8350,22580225,"utils/nn.py",11175,0,"",python,selection_command +8351,22580872,"utils/nn.py",11129,0,"",python,selection_command +8352,22583460,"utils/nn.py",11032,0,"",python,selection_command +8353,22583708,"utils/nn.py",10958,0,"",python,selection_command +8354,22583732,"utils/nn.py",10944,0,"",python,selection_command +8355,22583767,"utils/nn.py",10897,0,"",python,selection_command +8356,22583791,"utils/nn.py",10794,0,"",python,selection_command +8357,22583824,"utils/nn.py",10655,0,"",python,selection_command +8358,22583860,"utils/nn.py",10515,0,"",python,selection_command +8359,22583892,"utils/nn.py",10458,0,"",python,selection_command +8360,22583925,"utils/nn.py",10327,0,"",python,selection_command +8361,22583959,"utils/nn.py",10241,0,"",python,selection_command +8362,22584327,"utils/nn.py",9152,0,"",python,selection_command +8363,22585642,"utils/nn.py",9118,0,"",python,selection_command +8364,22585754,"utils/nn.py",9067,0,"",python,selection_command +8365,22585913,"utils/nn.py",9017,0,"",python,selection_command +8366,22586033,"utils/nn.py",8944,0,"",python,selection_command +8367,22645372,"utils/nn.py",0,0,"",python,tab +8368,22645373,"utils/nn.py",2133,0,"",python,selection_command +8369,22679165,"utils/nn.py",9552,0,"",python,selection_mouse +8370,22710485,"TERMINAL",0,0,"\r[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +8371,22711241,"utils/nn.py",0,0,"",python,tab +8372,22712797,"utils/nn.py",7806,0,"",python,selection_command +8373,22713143,"utils/nn.py",6930,0,"",python,selection_command +8374,22713328,"utils/nn.py",5960,0,"",python,selection_command +8375,22713480,"utils/nn.py",5012,0,"",python,selection_command +8376,22713626,"utils/nn.py",4302,0,"",python,selection_command +8377,22713748,"utils/nn.py",3446,0,"",python,selection_command +8378,22714627,"utils/nn.py",4302,0,"",python,selection_command +8379,22715110,"utils/nn.py",5012,0,"",python,selection_command +8380,22716845,"utils/nn.py",5183,0,"",python,selection_command +8381,22717740,"utils/nn.py",0,0,"",python,selection_command +8382,22717846,"utils/nn.py",1201,0,"",python,selection_command +8383,22718963,"utils/nn.py",1545,0,"",python,selection_command +8384,22719949,"utils/nn.py",1554,0,"",python,selection_command +8385,22721677,"utils/nn.py",2274,0,"",python,selection_command +8386,22723253,"utils/nn.py",2273,0,"",python,selection_command +8387,22723392,"utils/nn.py",2272,0,"",python,selection_command +8388,22724328,"utils/nn.py",2272,1,"",python,content +8389,22724526,"utils/nn.py",2272,1,"",python,content +8390,22724734,"utils/nn.py",2304,0,"",python,selection_command +8391,22725812,"utils/nn.py",2292,26,"",python,content +8392,22725815,"utils/nn.py",2300,0,"",python,selection_command +8393,22725957,"utils/nn.py",2268,0,"",python,selection_command +8394,22727602,"TERMINAL",0,0,"\r[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +8395,22728633,"TERMINAL",0,0,"\r[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +8396,22729505,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +8397,22729987,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +8398,22734220,"utils/nn.py",11159,0,"",python,selection_command +8399,22734888,"utils/nn.py",11113,0,"",python,selection_command +8400,22735152,"utils/nn.py",11101,0,"",python,selection_command +8401,22735241,"utils/nn.py",11109,0,"",python,selection_command +8402,22735886,"utils/nn.py",11109,1,"i",python,selection_command +8403,22736033,"utils/nn.py",11109,1,"i",python,selection_command +8404,22736413,"utils/nn.py",11109,0,"",python,selection_command +8405,22736510,"utils/nn.py",11155,0,"#",python,content +8406,22736510,"utils/nn.py",11109,0,"#",python,content +8407,22736510,"utils/nn.py",11110,0,"",python,selection_keyboard +8408,22736545,"utils/nn.py",11157,0," ",python,content +8409,22736545,"utils/nn.py",11110,0," ",python,content +8410,22736546,"utils/nn.py",11111,0,"",python,selection_keyboard +8411,22736763,"utils/nn.py",11110,0,"",python,selection_command +8412,22737963,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=14001.5 task 0: running\r\n",,terminal_output +8413,22738103,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=14001.5\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T15:37:37.915] error: *** STEP 14001.5 ON hai007 CANCELLED AT 2025-07-27T15:37:37 DUE to SIGNAL Killed ***\r\n",,terminal_output +8414,22738260,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=14001.5\r\nsrun: job abort in progress\r\n",,terminal_output +8415,22738441,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ [franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +8416,22738511,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +8417,22739046,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +8418,22749957,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +8419,22758013,"TERMINAL",0,0,"2025-07-27 15:37:57.818218: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8420,22759486,"TERMINAL",0,0,"2025-07-27 15:37:59.289193: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8421,22762950,"TERMINAL",0,0,"2025-07-27 15:38:02.752645: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8422,22763801,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 230, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 125, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 556, in __call__\r\n raise ValueError(\r\nValueError: Autoregressive cache shape error, expected query shape (1, 1, 1, 8, 64) instead got (1, 1, 921, 8, 64).\r\n",,terminal_output +8423,22764831,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +8424,22785356,"slurm/jobs/mihir/horeka/overfit_batch_tiny/sample.sh",0,0,"#!/usr/bin/env bash\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\n# Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}\n\n# Example: If you want to use a specific checkpoint, set it here\n# CHECKPOINT_PATH=$ws_dir/checkpoints/3299272/dynamics-tiny-overfit-big-lr-3299272_50000/\n# Or use the latest in the directory\n# CHECKPOINT_PATH=$(ls -d $CHECKPOINT_DIR/*/ | sort | tail -n 1)\nCHECKPOINT_PATH=$CHECKPOINT_DIR/genie_1751067601_200000/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/../checkpoints/3307618/genie_1751322003_15500/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3307619/genie_1751322003_200000/\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_200000/\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1000 \\n --num_latent_actions=6 \\n --seq_len=16 \\n --start_frame=0\n\n# python sample.py \\n # --checkpoint ""$CHECKPOINT_PATH"" \\n # --data_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/coinrun_episodes\n",shellscript,tab +8425,22788867,"utils/nn.py",0,0,"",python,tab +8426,22792213,"experiments/sample.sh",0,0,"",shellscript,tab +8427,22794009,"experiments/sample.sh",246,0,"",shellscript,selection_command +8428,22794313,"experiments/sample.sh",267,0,"",shellscript,selection_command +8429,22797730,"experiments/sample.sh",267,1,"1",shellscript,content +8430,22801411,"TERMINAL",0,0,"[franz.srambical@hai007.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +8431,22801787,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +8432,22809177,"utils/nn.py",0,0,"",python,tab +8433,22813204,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +8434,22821250,"TERMINAL",0,0,"2025-07-27 15:39:00.995228: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8435,22822616,"TERMINAL",0,0,"2025-07-27 15:39:02.416171: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8436,22826050,"TERMINAL",0,0,"2025-07-27 15:39:05.850280: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8437,22826884,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 230, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 125, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 556, in __call__\r\n raise ValueError(\r\nValueError: Autoregressive cache shape error, expected query shape (1, 1, 1, 8, 64) instead got (1, 0, 921, 8, 64).\r\n",,terminal_output +8438,22827806,"TERMINAL",0,0,"srun: error: hai007: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai007.haicore.berlin:~/jafar] $ ",,terminal_output +8439,22851081,"utils/nn.py",11013,0,"",python,selection_command +8440,22851323,"utils/nn.py",10939,0,"",python,selection_command +8441,22851352,"utils/nn.py",10925,0,"",python,selection_command +8442,22851381,"utils/nn.py",10878,0,"",python,selection_command +8443,22851415,"utils/nn.py",10775,0,"",python,selection_command +8444,22851453,"utils/nn.py",10636,0,"",python,selection_command +8445,22851487,"utils/nn.py",10496,0,"",python,selection_command +8446,22851520,"utils/nn.py",10439,0,"",python,selection_command +8447,22851619,"utils/nn.py",10308,0,"",python,selection_command +8448,22851775,"utils/nn.py",10222,0,"",python,selection_command +8449,22851894,"utils/nn.py",10183,0,"",python,selection_command +8450,22852026,"utils/nn.py",10140,0,"",python,selection_command +8451,22854476,"utils/nn.py",10183,0,"",python,selection_command +8452,22854727,"utils/nn.py",10173,0,"\n ",python,content +8453,22855109,"utils/nn.py",10186,0,"#",python,content +8454,22855109,"utils/nn.py",10187,0,"",python,selection_keyboard +8455,22855483,"utils/nn.py",10187,0," ",python,content +8456,22855483,"utils/nn.py",10188,0,"",python,selection_keyboard +8457,22855615,"utils/nn.py",10188,0,"D",python,content +8458,22855616,"utils/nn.py",10189,0,"",python,selection_keyboard +8459,22855711,"utils/nn.py",10189,0,"I",python,content +8460,22855711,"utils/nn.py",10190,0,"",python,selection_keyboard +8461,22856761,"utils/nn.py",10189,1,"",python,content +8462,22856905,"utils/nn.py",10188,1,"",python,content +8463,22857045,"utils/nn.py",10188,0,"F",python,content +8464,22857046,"utils/nn.py",10189,0,"",python,selection_keyboard +8465,22857163,"utils/nn.py",10189,0,"X",python,content +8466,22857163,"utils/nn.py",10190,0,"",python,selection_keyboard +8467,22857192,"utils/nn.py",10190,0,"I",python,content +8468,22857192,"utils/nn.py",10191,0,"",python,selection_keyboard +8469,22857377,"utils/nn.py",10191,0,"M",python,content +8470,22857378,"utils/nn.py",10192,0,"",python,selection_keyboard +8471,22857442,"utils/nn.py",10192,0,"E",python,content +8472,22857442,"utils/nn.py",10193,0,"",python,selection_keyboard +8473,22857562,"utils/nn.py",10193,0,":",python,content +8474,22857562,"utils/nn.py",10194,0,"",python,selection_keyboard +8475,22857663,"utils/nn.py",10194,0," ",python,content +8476,22857664,"utils/nn.py",10195,0,"",python,selection_keyboard +8477,22859093,"utils/nn.py",10194,1,"",python,content +8478,22859241,"utils/nn.py",10193,1,"",python,content +8479,22859333,"utils/nn.py",10193,0," ",python,content +8480,22859334,"utils/nn.py",10194,0,"",python,selection_keyboard +8481,22859463,"utils/nn.py",10194,0,"()",python,content +8482,22859464,"utils/nn.py",10195,0,"",python,selection_keyboard +8483,22859699,"utils/nn.py",10195,0,"f",python,content +8484,22859700,"utils/nn.py",10196,0,"",python,selection_keyboard +8485,22859882,"utils/nn.py",10196,0,"s",python,content +8486,22859883,"utils/nn.py",10197,0,"",python,selection_keyboard +8487,22860012,"utils/nn.py",10197,0,".",python,content +8488,22860012,"utils/nn.py",10198,0,"",python,selection_keyboard +8489,22860381,"utils/nn.py",10197,1,"",python,content +8490,22860515,"utils/nn.py",10196,1,"",python,content +8491,22860700,"utils/nn.py",10196,0,".",python,content +8492,22860701,"utils/nn.py",10197,0,"",python,selection_keyboard +8493,22860798,"utils/nn.py",10197,0,"s",python,content +8494,22860798,"utils/nn.py",10198,0,"",python,selection_keyboard +8495,22860819,"utils/nn.py",10198,0,"r",python,content +8496,22860820,"utils/nn.py",10199,0,"",python,selection_keyboard +8497,22860948,"utils/nn.py",10199,0,"a",python,content +8498,22860949,"utils/nn.py",10200,0,"",python,selection_keyboard +8499,22860977,"utils/nn.py",10200,0,"m",python,content +8500,22860978,"utils/nn.py",10201,0,"",python,selection_keyboard +8501,22861168,"utils/nn.py",10201,0,"b",python,content +8502,22861168,"utils/nn.py",10202,0,"",python,selection_keyboard +8503,22861230,"utils/nn.py",10202,0,"i",python,content +8504,22861231,"utils/nn.py",10203,0,"",python,selection_keyboard +8505,22861281,"utils/nn.py",10203,0,"c",python,content +8506,22861282,"utils/nn.py",10204,0,"",python,selection_keyboard +8507,22861330,"utils/nn.py",10204,0,"a",python,content +8508,22861330,"utils/nn.py",10205,0,"",python,selection_keyboard +8509,22861395,"utils/nn.py",10205,0,"l",python,content +8510,22861395,"utils/nn.py",10206,0,"",python,selection_keyboard +8511,22861650,"utils/nn.py",10206,1,")",python,content +8512,22861651,"utils/nn.py",10207,0,"",python,selection_keyboard +8513,22862275,"utils/nn.py",10207,0,":",python,content +8514,22862275,"utils/nn.py",10208,0,"",python,selection_keyboard +8515,22862457,"utils/nn.py",10208,0," ",python,content +8516,22862458,"utils/nn.py",10209,0,"",python,selection_keyboard +8517,22863308,"utils/nn.py",10209,0,"t",python,content +8518,22863308,"utils/nn.py",10210,0,"",python,selection_keyboard +8519,22863418,"utils/nn.py",10210,0,"h",python,content +8520,22863419,"utils/nn.py",10211,0,"",python,selection_keyboard +8521,22863440,"utils/nn.py",10211,0,"e",python,content +8522,22863440,"utils/nn.py",10212,0,"",python,selection_keyboard +8523,22863509,"utils/nn.py",10212,0," ",python,content +8524,22863510,"utils/nn.py",10213,0,"",python,selection_keyboard +8525,22863604,"utils/nn.py",10213,0,"r",python,content +8526,22863604,"utils/nn.py",10214,0,"",python,selection_keyboard +8527,22863716,"utils/nn.py",10214,0,"e",python,content +8528,22863717,"utils/nn.py",10215,0,"",python,selection_keyboard +8529,22863856,"utils/nn.py",10215,0,"a",python,content +8530,22863857,"utils/nn.py",10216,0,"",python,selection_keyboard +8531,22864285,"utils/nn.py",10216,0,"r",python,content +8532,22864286,"utils/nn.py",10217,0,"",python,selection_keyboard +8533,22864422,"utils/nn.py",10217,0,"r",python,content +8534,22864423,"utils/nn.py",10218,0,"",python,selection_keyboard +8535,22864500,"utils/nn.py",10218,0,"a",python,content +8536,22864501,"utils/nn.py",10219,0,"",python,selection_keyboard +8537,22864662,"utils/nn.py",10219,0,"n",python,content +8538,22864662,"utils/nn.py",10220,0,"",python,selection_keyboard +8539,22864748,"utils/nn.py",10220,0,"g",python,content +8540,22864749,"utils/nn.py",10221,0,"",python,selection_keyboard +8541,22864817,"utils/nn.py",10221,0,"e",python,content +8542,22864817,"utils/nn.py",10222,0,"",python,selection_keyboard +8543,22864906,"utils/nn.py",10222,0," ",python,content +8544,22864907,"utils/nn.py",10223,0,"",python,selection_keyboard +8545,22865312,"utils/nn.py",10223,0,"d",python,content +8546,22865313,"utils/nn.py",10224,0,"",python,selection_keyboard +8547,22865437,"utils/nn.py",10224,0,"e",python,content +8548,22865438,"utils/nn.py",10225,0,"",python,selection_keyboard +8549,22865504,"utils/nn.py",10225,0,"p",python,content +8550,22865505,"utils/nn.py",10226,0,"",python,selection_keyboard +8551,22865615,"utils/nn.py",10226,0,"e",python,content +8552,22865615,"utils/nn.py",10227,0,"",python,selection_keyboard +8553,22865733,"utils/nn.py",10227,0,"n",python,content +8554,22865734,"utils/nn.py",10228,0,"",python,selection_keyboard +8555,22865783,"utils/nn.py",10228,0,"d",python,content +8556,22865783,"utils/nn.py",10229,0,"",python,selection_keyboard +8557,22865836,"utils/nn.py",10229,0,"s",python,content +8558,22865837,"utils/nn.py",10230,0,"",python,selection_keyboard +8559,22865935,"utils/nn.py",10230,0," ",python,content +8560,22865936,"utils/nn.py",10231,0,"",python,selection_keyboard +8561,22866007,"utils/nn.py",10231,0,"o",python,content +8562,22866007,"utils/nn.py",10232,0,"",python,selection_keyboard +8563,22866083,"utils/nn.py",10232,0,"n",python,content +8564,22866083,"utils/nn.py",10233,0,"",python,selection_keyboard +8565,22866185,"utils/nn.py",10233,0," ",python,content +8566,22866185,"utils/nn.py",10234,0,"",python,selection_keyboard +8567,22866605,"utils/nn.py",10234,0,"w",python,content +8568,22866605,"utils/nn.py",10235,0,"",python,selection_keyboard +8569,22866709,"utils/nn.py",10235,0,"h",python,content +8570,22866709,"utils/nn.py",10236,0,"",python,selection_keyboard +8571,22866753,"utils/nn.py",10236,0,"e",python,content +8572,22866754,"utils/nn.py",10237,0,"",python,selection_keyboard +8573,22866852,"utils/nn.py",10237,0,"t",python,content +8574,22866852,"utils/nn.py",10238,0,"",python,selection_keyboard +8575,22866950,"utils/nn.py",10238,0,"h",python,content +8576,22866950,"utils/nn.py",10239,0,"",python,selection_keyboard +8577,22867003,"utils/nn.py",10239,0,"e",python,content +8578,22867003,"utils/nn.py",10240,0,"",python,selection_keyboard +8579,22867051,"utils/nn.py",10240,0,"r",python,content +8580,22867051,"utils/nn.py",10241,0,"",python,selection_keyboard +8581,22867102,"utils/nn.py",10241,0," ",python,content +8582,22867102,"utils/nn.py",10242,0,"",python,selection_keyboard +8583,22867667,"utils/nn.py",10242,0,"t",python,content +8584,22867668,"utils/nn.py",10243,0,"",python,selection_keyboard +8585,22867705,"utils/nn.py",10243,0,"h",python,content +8586,22867705,"utils/nn.py",10244,0,"",python,selection_keyboard +8587,22867754,"utils/nn.py",10244,0,"i",python,content +8588,22867755,"utils/nn.py",10245,0,"",python,selection_keyboard +8589,22867821,"utils/nn.py",10245,0,"s",python,content +8590,22867821,"utils/nn.py",10246,0,"",python,selection_keyboard +8591,22867885,"utils/nn.py",10246,0," ",python,content +8592,22867885,"utils/nn.py",10247,0,"",python,selection_keyboard +8593,22867968,"utils/nn.py",10247,0,"i",python,content +8594,22867968,"utils/nn.py",10248,0,"",python,selection_keyboard +8595,22867987,"utils/nn.py",10248,0,"s",python,content +8596,22867987,"utils/nn.py",10249,0,"",python,selection_keyboard +8597,22868053,"utils/nn.py",10249,0," ",python,content +8598,22868053,"utils/nn.py",10250,0,"",python,selection_keyboard +8599,22868222,"utils/nn.py",10250,0,"a",python,content +8600,22868222,"utils/nn.py",10251,0,"",python,selection_keyboard +8601,22868707,"utils/nn.py",10250,1,"",python,content +8602,22869622,"utils/nn.py",10250,0,"s",python,content +8603,22869622,"utils/nn.py",10251,0,"",python,selection_keyboard +8604,22870074,"utils/nn.py",10251,0,"patial or temporal attention",python,content +8605,22870606,"utils/nn.py",10278,0,"",python,selection_command +8606,22873806,"utils/nn.py",10186,0,"",python,selection_command +8607,22874039,"utils/nn.py",10185,0,"",python,selection_command +8608,22874243,"utils/nn.py",10186,0,"",python,selection_command +8609,22874459,"utils/nn.py",10188,0,"",python,selection_command +8610,22874714,"utils/nn.py",10188,5,"",python,content +8611,22874976,"utils/nn.py",10188,0,"F",python,content +8612,22874976,"utils/nn.py",10189,0,"",python,selection_keyboard +8613,22875075,"utils/nn.py",10189,0,"I",python,content +8614,22875075,"utils/nn.py",10190,0,"",python,selection_keyboard +8615,22875191,"utils/nn.py",10190,0,"X",python,content +8616,22875192,"utils/nn.py",10191,0,"",python,selection_keyboard +8617,22875293,"utils/nn.py",10191,0,"M",python,content +8618,22875293,"utils/nn.py",10192,0,"",python,selection_keyboard +8619,22875391,"utils/nn.py",10192,0,"E",python,content +8620,22875392,"utils/nn.py",10193,0,"",python,selection_keyboard +8621,22875617,"utils/nn.py",10192,0,"",python,selection_command +8622,23942244,"TERMINAL",0,0,"salloc: Job 14001 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T15:57:42.014] error: *** STEP 14001.interactive ON hai007 CANCELLED AT 2025-07-27T15:57:42 DUE TO TIME LIMIT ***\r\n",,terminal_output +8623,23971857,"TERMINAL",0,0,"srun: error: hai007: task 0: Killed\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +8624,24079080,"experiments/sample.sh",0,0,"",shellscript,tab +8625,24080158,"experiments/sample.sh",267,1,"2",shellscript,content +8626,24081228,"utils/nn.py",0,0,"",python,tab +8627,24084805,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G --time=01:00:00",,terminal_command +8628,24084855,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 14007\r\n",,terminal_output +8629,24084955,"TERMINAL",0,0,"salloc: Nodes hai003 are ready for job\r\n",,terminal_output +8630,24085319,"TERMINAL",0,0,"Running inside SLURM, Job ID 14007.\r\n",,terminal_output +8631,24085412,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8632,24085945,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8633,24086094,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G",,terminal_output +8634,24086579,"TERMINAL",0,0,"\rource /home/franz.srambical/jafar/.venv/bin/activate",,terminal_output +8635,24087022,"TERMINAL",0,0,"\rexit",,terminal_output +8636,24087477,"TERMINAL",0,0,"source /home/franz.srambical/jafar/.venv/bin/activate",,terminal_output +8637,24087612,"TERMINAL",0,0,"alloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G",,terminal_output +8638,24088125,"TERMINAL",0,0,"\r",,terminal_output +8639,24088229,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +8640,24088398,"TERMINAL",0,0,"s': salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=1 --mem=100G",,terminal_output +8641,24088492,"TERMINAL",0,0,"\r[1@a': salloc --gpus=1 --ntasks-per-node=1 --cpus-per-tas\rm': bash experiments/sample.sh ",,terminal_output +8642,24088624,"TERMINAL",0,0,"[1@p': bash experiments/samp[1@l': bash experiments/sampl",,terminal_output +8643,24088783,"TERMINAL",0,0,"[1@e': bash experiments/sample",,terminal_output +8644,24089065,"TERMINAL",0,0,"\r[22@[franz.srambical@hai003.haicore.berlin:~/jafar] $ bash experiments/sample",,terminal_output +8645,24089344,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +8646,24102744,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +8647,24110909,"TERMINAL",0,0,"2025-07-27 16:00:30.709679: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8648,24112386,"TERMINAL",0,0,"2025-07-27 16:00:32.186852: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8649,24113551,"utils/nn.py",10279,0,"\n ",python,content +8650,24113725,"utils/nn.py",10292,0,"b",python,content +8651,24113725,"utils/nn.py",10293,0,"",python,selection_keyboard +8652,24113775,"utils/nn.py",10293,0,"r",python,content +8653,24113776,"utils/nn.py",10294,0,"",python,selection_keyboard +8654,24113827,"utils/nn.py",10294,0,"e",python,content +8655,24113827,"utils/nn.py",10295,0,"",python,selection_keyboard +8656,24113894,"utils/nn.py",10295,0,"a",python,content +8657,24113895,"utils/nn.py",10296,0,"",python,selection_keyboard +8658,24113939,"utils/nn.py",10296,0,"k",python,content +8659,24113939,"utils/nn.py",10297,0,"",python,selection_keyboard +8660,24114114,"utils/nn.py",10297,0,"p",python,content +8661,24114114,"utils/nn.py",10298,0,"",python,selection_keyboard +8662,24114164,"utils/nn.py",10298,0,"o",python,content +8663,24114164,"utils/nn.py",10299,0,"",python,selection_keyboard +8664,24114263,"utils/nn.py",10299,0,"i",python,content +8665,24114264,"utils/nn.py",10300,0,"",python,selection_keyboard +8666,24114297,"utils/nn.py",10300,0,"n",python,content +8667,24114298,"utils/nn.py",10301,0,"",python,selection_keyboard +8668,24114385,"utils/nn.py",10301,0,"t",python,content +8669,24114386,"utils/nn.py",10302,0,"",python,selection_keyboard +8670,24114676,"utils/nn.py",10302,0,"()",python,content +8671,24114676,"utils/nn.py",10303,0,"",python,selection_keyboard +8672,24114696,"utils/nn.py",10303,1,")",python,content +8673,24114696,"utils/nn.py",10304,0,"",python,selection_keyboard +8674,24114888,"utils/nn.py",10303,0,"",python,selection_command +8675,24116036,"TERMINAL",0,0,"2025-07-27 16:00:35.838567: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8676,24116704,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 230, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 125, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 556, in __call__\r\n raise ValueError(\r\nValueError: Autoregressive cache shape error, expected query shape (1, 1, 1, 8, 64) instead got (1, 1, 921, 8, 64).\r\n",,terminal_output +8677,24117768,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8678,24119041,"utils/nn.py",10304,0,"",python,selection_mouse +8679,24119043,"utils/nn.py",10303,0,"",python,selection_command +8680,24119062,"utils/nn.py",10303,1,")",python,selection_mouse +8681,24119064,"utils/nn.py",10304,0,"",python,selection_command +8682,24119693,"utils/nn.py",10303,0,"",python,selection_command +8683,24125263,"utils/nn.py",10279,0,"\n ",python,content +8684,24125465,"utils/nn.py",10292,0,"i",python,content +8685,24125465,"utils/nn.py",10293,0,"",python,selection_keyboard +8686,24125528,"utils/nn.py",10293,0,"f",python,content +8687,24125528,"utils/nn.py",10294,0,"",python,selection_keyboard +8688,24125643,"utils/nn.py",10294,0," ",python,content +8689,24125643,"utils/nn.py",10295,0,"",python,selection_keyboard +8690,24128410,"utils/nn.py",10295,0,"q",python,content +8691,24128410,"utils/nn.py",10296,0,"",python,selection_keyboard +8692,24128459,"utils/nn.py",10296,0,"u",python,content +8693,24128460,"utils/nn.py",10297,0,"",python,selection_keyboard +8694,24128548,"utils/nn.py",10297,0,"e",python,content +8695,24128548,"utils/nn.py",10298,0,"",python,selection_keyboard +8696,24128629,"utils/nn.py",10298,0,"r",python,content +8697,24128629,"utils/nn.py",10299,0,"",python,selection_keyboard +8698,24128710,"utils/nn.py",10299,0,"y",python,content +8699,24128710,"utils/nn.py",10300,0,"",python,selection_keyboard +8700,24128897,"utils/nn.py",10300,0,".",python,content +8701,24128897,"utils/nn.py",10301,0,"",python,selection_keyboard +8702,24128977,"utils/nn.py",10301,0,"s",python,content +8703,24128977,"utils/nn.py",10302,0,"",python,selection_keyboard +8704,24129095,"utils/nn.py",10302,0,"h",python,content +8705,24129095,"utils/nn.py",10303,0,"",python,selection_keyboard +8706,24129145,"utils/nn.py",10303,0,"a",python,content +8707,24129145,"utils/nn.py",10304,0,"",python,selection_keyboard +8708,24129333,"utils/nn.py",10304,0,"p",python,content +8709,24129333,"utils/nn.py",10305,0,"",python,selection_keyboard +8710,24129401,"utils/nn.py",10305,0,"e",python,content +8711,24129402,"utils/nn.py",10306,0,"",python,selection_keyboard +8712,24129566,"utils/nn.py",10306,0," ",python,content +8713,24129566,"utils/nn.py",10307,0,"",python,selection_keyboard +8714,24129700,"utils/nn.py",10307,0,"=",python,content +8715,24129700,"utils/nn.py",10308,0,"",python,selection_keyboard +8716,24129986,"utils/nn.py",10308,0,"= (1, 921, 1, 8, 64):",python,content +8717,24130165,"utils/nn.py",10328,0,"",python,selection_command +8718,24130309,"utils/nn.py",10327,0,"",python,selection_command +8719,24130865,"utils/nn.py",10353,0,"",python,selection_command +8720,24131100,"utils/nn.py",10342,0,"",python,selection_command +8721,24131623,"utils/nn.py",10342,0," ",python,content +8722,24131834,"utils/nn.py",10345,0,"",python,selection_command +8723,24131947,"utils/nn.py",10295,0,"",python,selection_command +8724,24132086,"utils/nn.py",10300,0,"",python,selection_command +8725,24132234,"utils/nn.py",10301,0,"",python,selection_command +8726,24132418,"utils/nn.py",10307,0,"",python,selection_command +8727,24132569,"utils/nn.py",10310,0,"",python,selection_command +8728,24132734,"utils/nn.py",10311,0,"",python,selection_command +8729,24133919,"utils/nn.py",10311,1,"1",python,selection_command +8730,24133953,"utils/nn.py",10311,2,"1,",python,selection_command +8731,24134102,"utils/nn.py",10311,3,"1, ",python,selection_command +8732,24134532,"utils/nn.py",10311,3,"",python,content +8733,24135379,"utils/nn.py",10310,0,"",python,selection_command +8734,24135703,"utils/nn.py",10311,0,"1, ",python,content +8735,24135709,"utils/nn.py",10311,0,"",python,selection_command +8736,24136104,"utils/nn.py",10312,0,"",python,selection_command +8737,24136274,"utils/nn.py",10313,0,"",python,selection_command +8738,24136424,"utils/nn.py",10314,0,"",python,selection_command +8739,24136943,"utils/nn.py",10315,0,"",python,selection_command +8740,24137385,"utils/nn.py",10314,0,"",python,selection_command +8741,24137837,"utils/nn.py",10314,0,"1",python,content +8742,24137837,"utils/nn.py",10315,0,"",python,selection_keyboard +8743,24137944,"utils/nn.py",10315,0,",",python,content +8744,24137944,"utils/nn.py",10316,0,"",python,selection_keyboard +8745,24138052,"utils/nn.py",10316,0," ",python,content +8746,24138052,"utils/nn.py",10317,0,"",python,selection_keyboard +8747,24138175,"utils/nn.py",10316,0,"",python,selection_command +8748,24138369,"utils/nn.py",10317,0,"",python,selection_command +8749,24138456,"utils/nn.py",10320,0,"",python,selection_command +8750,24138809,"utils/nn.py",10322,0,"",python,selection_command +8751,24139137,"utils/nn.py",10322,1,"",python,content +8752,24139901,"utils/nn.py",10322,1,"",python,content +8753,24140070,"utils/nn.py",10322,1,"",python,content +8754,24141613,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8755,24141684,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +8756,24141992,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +8757,24153177,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +8758,24166279,"TERMINAL",0,0,"2025-07-27 16:01:26.023132: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8759,24167703,"TERMINAL",0,0,"2025-07-27 16:01:27.502802: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8760,24171297,"TERMINAL",0,0,"2025-07-27 16:01:31.050719: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8761,24171997,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 230, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 125, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 556, in __call__\r\n raise ValueError(\r\nValueError: Autoregressive cache shape error, expected query shape (1, 1, 1, 8, 64) instead got (1, 1, 921, 8, 64).\r\n",,terminal_output +8762,24173075,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8763,24179367,"utils/nn.py",10280,49," if query.shape == (1, 1, 921, 8, 64):",python,selection_command +8764,24179725,"utils/nn.py",10280,78," if query.shape == (1, 1, 921, 8, 64):\n breakpoint()",python,selection_command +8765,24179980,"utils/nn.py",10280,0,"",python,selection_command +8766,24180409,"utils/nn.py",10174,0,"",python,selection_command +8767,24180620,"utils/nn.py",10279,0,"\n if query.shape == (1, 1, 921, 8, 64):\n breakpoint()",python,content +8768,24180626,"utils/nn.py",10292,0,"",python,selection_command +8769,24181056,"utils/nn.py",10295,0,"",python,selection_command +8770,24181224,"utils/nn.py",10300,0,"",python,selection_command +8771,24181370,"utils/nn.py",10301,0,"",python,selection_command +8772,24181520,"utils/nn.py",10307,0,"",python,selection_command +8773,24181657,"utils/nn.py",10310,0,"",python,selection_command +8774,24181973,"utils/nn.py",10311,0,"",python,selection_command +8775,24182223,"utils/nn.py",10312,0,"",python,selection_command +8776,24182381,"utils/nn.py",10314,0,"",python,selection_command +8777,24182712,"utils/nn.py",10314,1,"1",python,selection_command +8778,24182834,"utils/nn.py",10314,2,"1,",python,selection_command +8779,24182994,"utils/nn.py",10314,3,"1, ",python,selection_command +8780,24183200,"utils/nn.py",10314,3,"",python,content +8781,24183763,"utils/nn.py",10316,0,"",python,selection_command +8782,24184267,"utils/nn.py",10317,0,"",python,selection_command +8783,24184436,"utils/nn.py",10318,0,"",python,selection_command +8784,24184544,"utils/nn.py",10318,0," ",python,content +8785,24184544,"utils/nn.py",10319,0,"",python,selection_keyboard +8786,24184741,"utils/nn.py",10319,0,"1",python,content +8787,24184741,"utils/nn.py",10320,0,"",python,selection_keyboard +8788,24184909,"utils/nn.py",10320,0,",",python,content +8789,24184910,"utils/nn.py",10321,0,"",python,selection_keyboard +8790,24184974,"utils/nn.py",10320,0,"",python,selection_command +8791,24186250,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8792,24186307,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +8793,24186538,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +8794,24197885,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +8795,24203713,"utils/nn.py",7864,0,"",python,selection_command +8796,24204537,"utils/nn.py",10292,0,"",python,selection_command +8797,24205891,"TERMINAL",0,0,"2025-07-27 16:02:05.692078: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8798,24207364,"TERMINAL",0,0,"2025-07-27 16:02:07.165904: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8799,24207929,"utils/nn.py",7864,0,"",python,selection_command +8800,24208556,"utils/nn.py",6014,0,"",python,selection_command +8801,24208984,"utils/nn.py",4315,0,"",python,selection_command +8802,24210435,"utils/nn.py",2546,0,"",python,selection_command +8803,24210840,"TERMINAL",0,0,"2025-07-27 16:02:10.640063: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8804,24211588,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 230, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 125, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 556, in __call__\r\n raise ValueError(\r\nValueError: Autoregressive cache shape error, expected query shape (1, 1, 1, 8, 64) instead got (1, 1, 921, 8, 64).\r\n",,terminal_output +8805,24212651,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8806,24213456,"utils/nn.py",11955,0,"",python,selection_command +8807,24214029,"utils/nn.py",10425,0,"",python,selection_command +8808,24214836,"utils/nn.py",10375,0,"",python,selection_command +8809,24215166,"utils/nn.py",10346,0,"",python,selection_command +8810,24215375,"utils/nn.py",10330,28," breakpoint()",python,selection_command +8811,24215507,"utils/nn.py",10280,78," if query.shape == (1, 921, 1, 8, 64):\n breakpoint()",python,selection_command +8812,24215607,"utils/nn.py",10280,79,"",python,content +8813,24215620,"utils/nn.py",10292,0,"",python,selection_command +8814,24220993,"utils/nn.py",10280,49," if query.shape == (1, 1, 921, 8, 64):",python,selection_command +8815,24221105,"utils/nn.py",10280,78," if query.shape == (1, 1, 921, 8, 64):\n breakpoint()",python,selection_command +8816,24222148,"utils/nn.py",10280,79,"",python,content +8817,24222172,"utils/nn.py",10292,0,"",python,selection_command +8818,24222490,"utils/nn.py",10186,0,"",python,selection_command +8819,24222636,"utils/nn.py",10143,0,"",python,selection_command +8820,24222813,"utils/nn.py",10114,0,"",python,selection_command +8821,24222977,"utils/nn.py",10143,0,"",python,selection_command +8822,24223207,"utils/nn.py",10186,0,"",python,selection_command +8823,24223394,"utils/nn.py",10143,0,"",python,selection_command +8824,24223652,"utils/nn.py",10114,0,"",python,selection_command +8825,24223742,"utils/nn.py",10021,0,"",python,selection_command +8826,24223902,"utils/nn.py",10008,0,"",python,selection_command +8827,24224799,"utils/nn.py",10008,0,"\n if query.shape == (1, 1, 921, 8, 64):\n breakpoint()",python,content +8828,24224825,"utils/nn.py",10021,0,"",python,selection_command +8829,24225813,"utils/nn.py",10009,49," if query.shape == (1, 1, 921, 8, 64):",python,selection_command +8830,24225910,"utils/nn.py",10009,78," if query.shape == (1, 1, 921, 8, 64):\n breakpoint()",python,selection_command +8831,24226048,"utils/nn.py",10059,16," ",python,content +8832,24226048,"utils/nn.py",10009,12," ",python,content +8833,24226050,"utils/nn.py",10017,0,"",python,selection_command +8834,24227369,"TERMINAL",0,0,"[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8835,24227421,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +8836,24227626,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +8837,24238736,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +8838,24246859,"TERMINAL",0,0,"2025-07-27 16:02:46.661059: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8839,24248362,"TERMINAL",0,0,"2025-07-27 16:02:48.163550: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8840,24251894,"TERMINAL",0,0,"2025-07-27 16:02:51.695146: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8841,24252560,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 225, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 167, in _autoreg_sample\r\n generated_vid = _sampling_fn(jasmine, batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 154, in _sampling_fn\r\n return model.sample_causal(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 379, in sample_causal\r\n final_carry, _ = jax.lax.scan(\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 352, in token_step_fn\r\n next_token_logits, _ = self.dynamics(dyna_inputs, training=False)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics_causal.py"", line 75, in __call__\r\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 230, in __call__\r\n x = block(x)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 125, in __call__\r\n z = self.spatial_attention(z)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 556, in __call__\r\n raise ValueError(\r\nValueError: Autoregressive cache shape error, expected query shape (1, 1, 1, 8, 64) instead got (1, 1, 921, 8, 64).\r\n",,terminal_output +8842,24253664,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8843,24262783,"utils/nn.py",10009,46,"",python,content +8844,24262806,"utils/nn.py",10021,0,"",python,selection_command +8845,24263161,"utils/nn.py",10017,4,"",python,content +8846,24263241,"utils/nn.py",10016,0,"",python,selection_command +8847,24264747,"TERMINAL",0,0,"\r[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8848,24265615,"TERMINAL",0,0,"\r[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +8849,24266005,"TERMINAL",0,0,"lbash experiments/sample.sh ",,terminal_output +8850,24266265,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +8851,24267094,"utils/nn.py",10037,0,"",python,selection_command +8852,24267346,"utils/nn.py",10130,0,"",python,selection_command +8853,24267374,"utils/nn.py",10159,0,"",python,selection_command +8854,24267400,"utils/nn.py",10202,0,"",python,selection_command +8855,24267434,"utils/nn.py",10308,0,"",python,selection_command +8856,24267467,"utils/nn.py",10347,0,"",python,selection_command +8857,24267500,"utils/nn.py",10433,0,"",python,selection_command +8858,24267555,"utils/nn.py",10564,0,"",python,selection_command +8859,24267590,"utils/nn.py",10621,0,"",python,selection_command +8860,24267625,"utils/nn.py",10761,0,"",python,selection_command +8861,24267641,"utils/nn.py",10900,0,"",python,selection_command +8862,24267826,"utils/nn.py",11003,0,"",python,selection_command +8863,24267963,"utils/nn.py",11050,0,"",python,selection_command +8864,24268093,"utils/nn.py",11064,0,"",python,selection_command +8865,24271330,"utils/nn.py",11050,0,"",python,selection_command +8866,24271568,"utils/nn.py",11003,0,"",python,selection_command +8867,24271592,"utils/nn.py",10900,0,"",python,selection_command +8868,24271622,"utils/nn.py",10761,0,"",python,selection_command +8869,24271661,"utils/nn.py",10621,0,"",python,selection_command +8870,24271694,"utils/nn.py",10564,0,"",python,selection_command +8871,24271727,"utils/nn.py",10433,0,"",python,selection_command +8872,24271758,"utils/nn.py",10347,0,"",python,selection_command +8873,24271792,"utils/nn.py",10308,0,"",python,selection_command +8874,24271826,"utils/nn.py",10202,0,"",python,selection_command +8875,24271857,"utils/nn.py",10159,0,"",python,selection_command +8876,24271891,"utils/nn.py",10130,0,"",python,selection_command +8877,24271927,"utils/nn.py",10037,0,"",python,selection_command +8878,24271960,"utils/nn.py",10016,0,"",python,selection_command +8879,24271992,"utils/nn.py",10008,0,"",python,selection_command +8880,24272133,"utils/nn.py",9939,0,"",python,selection_command +8881,24272297,"utils/nn.py",10008,0,"",python,selection_command +8882,24272522,"utils/nn.py",10016,0,"",python,selection_command +8883,24272755,"utils/nn.py",10009,0,"",python,selection_command +8884,24276566,"utils/nn.py",8907,0,"",python,selection_command +8885,24277219,"utils/nn.py",8899,0,"",python,selection_command +8886,24277456,"utils/nn.py",8859,0,"",python,selection_command +8887,24277484,"utils/nn.py",8761,0,"",python,selection_command +8888,24277516,"utils/nn.py",8676,0,"",python,selection_command +8889,24277524,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +8890,24277550,"utils/nn.py",8675,0,"",python,selection_command +8891,24277583,"utils/nn.py",8596,0,"",python,selection_command +8892,24277618,"utils/nn.py",8500,0,"",python,selection_command +8893,24277652,"utils/nn.py",8499,0,"",python,selection_command +8894,24277686,"utils/nn.py",8428,0,"",python,selection_command +8895,24277722,"utils/nn.py",8420,0,"",python,selection_command +8896,24277757,"utils/nn.py",8332,0,"",python,selection_command +8897,24277966,"utils/nn.py",8420,0,"",python,selection_command +8898,24278152,"utils/nn.py",8332,0,"",python,selection_command +8899,24278265,"utils/nn.py",8336,0,"",python,selection_command +8900,24278424,"utils/nn.py",8362,0,"",python,selection_command +8901,24278659,"utils/nn.py",8336,0,"",python,selection_command +8902,24280224,"utils/nn.py",2105,26,"_create_flash_attention_fn",python,selection_command +8903,24280358,"utils/nn.py",2130,0,"",python,selection_command +8904,24280756,"utils/nn.py",2054,0,"",python,selection_command +8905,24281007,"utils/nn.py",2012,0,"",python,selection_command +8906,24281033,"utils/nn.py",1973,0,"",python,selection_command +8907,24281061,"utils/nn.py",1938,0,"",python,selection_command +8908,24281093,"utils/nn.py",1904,0,"",python,selection_command +8909,24281126,"utils/nn.py",1866,0,"",python,selection_command +8910,24281305,"utils/nn.py",1809,0,"",python,selection_command +8911,24281405,"utils/nn.py",1813,0,"",python,selection_command +8912,24281594,"utils/nn.py",1817,0,"",python,selection_command +8913,24281722,"utils/nn.py",1818,0,"",python,selection_command +8914,24281978,"utils/nn.py",1836,0,"",python,selection_command +8915,24282003,"utils/nn.py",1838,0,"",python,selection_command +8916,24282026,"utils/nn.py",1841,0,"",python,selection_command +8917,24282060,"utils/nn.py",1842,0,"",python,selection_command +8918,24282190,"utils/nn.py",1860,0,"",python,selection_command +8919,24282231,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(321)attention_fn()\r\n-> if mask is not None:\r\n",,terminal_output +8920,24282458,"utils/nn.py",1842,0,"",python,selection_command +8921,24282715,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +8922,24282715,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",9464,0,"",python,selection_command +8923,24283255,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",12332,0,"",python,selection_command +8924,24283573,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",14092,0,"",python,selection_command +8925,24284711,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",16150,0,"",python,selection_command +8926,24285069,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18670,0,"",python,selection_command +8927,24285543,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",20649,0,"",python,selection_command +8928,24285931,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22802,0,"",python,selection_command +8929,24286651,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22840,0,"",python,selection_command +8930,24287025,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22802,0,"",python,selection_command +8931,24287261,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22771,0,"",python,selection_command +8932,24287287,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22754,0,"",python,selection_command +8933,24287407,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22741,0,"",python,selection_command +8934,24287730,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22754,0,"",python,selection_command +8935,24287900,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22741,0,"",python,selection_command +8936,24288111,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22754,0,"",python,selection_command +8937,24288263,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22771,0,"",python,selection_command +8938,24288364,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22782,0,"",python,selection_command +8939,24288524,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22783,0,"",python,selection_command +8940,24288696,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22763,0,"",python,selection_command +8941,24289146,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22759,0,"",python,selection_command +8942,24289483,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",22754,0,"",python,selection_command +8943,24289906,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21797,0,"",python,selection_command +8944,24291428,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21767,0,"",python,selection_command +8945,24314750,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21551,0,"",python,selection_command +8946,24315817,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18091,0,"",python,selection_command +8947,24318843,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17942,0,"",python,selection_command +8948,24319108,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17926,0,"",python,selection_command +8949,24319439,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17011,0,"",python,selection_command +8950,24325143,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17926,0,"",python,selection_command +8951,24325311,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",17942,0,"",python,selection_command +8952,24325514,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",18091,0,"",python,selection_command +8953,24325834,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21551,0,"",python,selection_command +8954,24326549,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21767,0,"",python,selection_command +8955,24327963,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21772,0,"",python,selection_command +8956,24328108,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21774,0,"",python,selection_command +8957,24328484,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26230,0,"",python,selection_command +8958,24336158,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21774,0,"",python,selection_command +8959,24348446,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26230,0,"",python,selection_command +8960,24382473,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26249,0,"",python,selection_command +8961,24382725,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26300,0,"",python,selection_command +8962,24382741,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26319,0,"",python,selection_command +8963,24382779,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26345,0,"",python,selection_command +8964,24382814,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26350,0,"",python,selection_command +8965,24382840,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26358,0,"",python,selection_command +8966,24382872,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26432,0,"",python,selection_command +8967,24382906,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26468,0,"",python,selection_command +8968,24382938,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26473,0,"",python,selection_command +8969,24382973,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26484,0,"",python,selection_command +8970,24383009,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26559,0,"",python,selection_command +8971,24383044,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26565,0,"",python,selection_command +8972,24383076,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26616,0,"",python,selection_command +8973,24383111,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26637,0,"",python,selection_command +8974,24383144,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26653,0,"",python,selection_command +8975,24383178,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26667,0,"",python,selection_command +8976,24383212,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26727,0,"",python,selection_command +8977,24383244,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26805,0,"",python,selection_command +8978,24383280,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26839,0,"",python,selection_command +8979,24383483,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26872,0,"",python,selection_command +8980,24383673,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26877,0,"",python,selection_command +8981,24383826,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26879,0,"",python,selection_command +8982,24384024,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26882,0,"",python,selection_command +8983,24384207,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26883,0,"",python,selection_command +8984,24384798,".venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py",0,0,"# Copyright 2018 The JAX Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""\nImplements ufuncs for jax.numpy.\n""""""\n\nfrom __future__ import annotations\n\nfrom collections.abc import Callable\nfrom functools import partial\nimport operator\nfrom typing import Any\n\nimport numpy as np\n\nfrom jax._src import core\nfrom jax._src import dtypes\nfrom jax._src.api import jit\nfrom jax._src.custom_derivatives import custom_jvp\nfrom jax._src.lax import lax\nfrom jax._src.lax import other as lax_other\nfrom jax._src.typing import Array, ArrayLike\nfrom jax._src.numpy import error as jnp_error\nfrom jax._src.numpy import reductions\nfrom jax._src.numpy.ufunc_api import ufunc\nfrom jax._src.numpy.util import (\n check_arraylike, ensure_arraylike, promote_args, promote_args_inexact,\n promote_args_numeric, promote_dtypes_inexact, promote_dtypes_numeric,\n promote_shapes, _where, check_no_float0s)\nfrom jax._src.util import set_module\n\n\nexport = set_module('jax.numpy')\n\n_lax_const = lax._const\n\n_INT_DTYPES = {\n 16: np.int16,\n 32: np.int32,\n 64: np.int64,\n}\n\ndef _constant_like(x, const):\n return np.array(const, dtype=dtypes.dtype(x))\n\ndef _replace_inf(x: ArrayLike) -> Array:\n return lax.select(isposinf(real(x)), lax._zeros(x), x)\n\ndef _to_bool(x: Array) -> Array:\n return x if x.dtype == bool else lax.ne(x, _lax_const(x, 0))\n\n\ndef unary_ufunc(func: Callable[[ArrayLike], Array]) -> ufunc:\n """"""An internal helper function for defining unary ufuncs.""""""\n func_jit = jit(func, inline=True)\n return ufunc(func_jit, name=func.__name__, nin=1, nout=1, call=func_jit)\n\n\ndef binary_ufunc(identity: Any, reduce: Callable[..., Any] | None = None,\n accumulate: Callable[..., Any] | None = None,\n at: Callable[..., Any] | None = None,\n reduceat: Callable[..., Any] | None = None) -> Callable[[Callable[[ArrayLike, ArrayLike], Array]], ufunc]:\n """"""An internal helper function for defining binary ufuncs.""""""\n def decorator(func: Callable[[ArrayLike, ArrayLike], Array]) -> ufunc:\n func_jit = jit(func, inline=True)\n return ufunc(func_jit, name=func.__name__, nin=2, nout=1, call=func_jit,\n identity=identity, reduce=reduce, accumulate=accumulate, at=at, reduceat=reduceat)\n return decorator\n\n\n@export\n@partial(jit, inline=True)\ndef fabs(x: ArrayLike, /) -> Array:\n """"""Compute the element-wise absolute values of the real-valued input.\n\n JAX implementation of :obj:`numpy.fabs`.\n\n Args:\n x: input array or scalar. Must not have a complex dtype.\n\n Returns:\n An array with same shape as ``x`` and dtype float, containing the element-wise\n absolute values.\n\n See also:\n - :func:`jax.numpy.absolute`: Computes the absolute values of the input including\n complex dtypes.\n - :func:`jax.numpy.abs`: Computes the absolute values of the input including\n complex dtypes.\n\n Examples:\n For integer inputs:\n\n >>> x = jnp.array([-5, -9, 1, 10, 15])\n >>> jnp.fabs(x)\n Array([ 5., 9., 1., 10., 15.], dtype=float32)\n\n For float type inputs:\n\n >>> x1 = jnp.array([-1.342, 5.649, 3.927])\n >>> jnp.fabs(x1)\n Array([1.342, 5.649, 3.927], dtype=float32)\n\n For boolean inputs:\n\n >>> x2 = jnp.array([True, False])\n >>> jnp.fabs(x2)\n Array([1., 0.], dtype=float32)\n """"""\n x = ensure_arraylike('fabs', x)\n if dtypes.issubdtype(dtypes.dtype(x), np.complexfloating):\n raise TypeError(""ufunc 'fabs' does not support complex dtypes"")\n return lax.abs(*promote_args_inexact('fabs', x))\n\n\n@export\n@partial(jit, inline=True)\ndef bitwise_invert(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.invert`.""""""\n return lax.bitwise_not(*promote_args('bitwise_invert', x))\n\n\n@export\n@partial(jit, inline=True)\ndef bitwise_not(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.invert`.""""""\n return lax.bitwise_not(*promote_args('bitwise_not', x))\n\n\n@export\n@partial(jit, inline=True)\ndef invert(x: ArrayLike, /) -> Array:\n """"""Compute the bitwise inversion of an input.\n\n JAX implementation of :func:`numpy.invert`. This function provides the\n implementation of the ``~`` operator for JAX arrays.\n\n Args:\n x: input array, must be boolean or integer typed.\n\n Returns:\n An array of the same shape and dtype as ```x``, with the bits inverted.\n\n See also:\n - :func:`jax.numpy.bitwise_invert`: Array API alias of this function.\n - :func:`jax.numpy.logical_not`: Invert after casting input to boolean.\n\n Examples:\n >>> x = jnp.arange(5, dtype='uint8')\n >>> print(x)\n [0 1 2 3 4]\n >>> print(jnp.invert(x))\n [255 254 253 252 251]\n\n This function implements the unary ``~`` operator for JAX arrays:\n\n >>> print(~x)\n [255 254 253 252 251]\n\n :func:`invert` operates bitwise on the input, and so the meaning of its\n output may be more clear by showing the bitwise representation:\n\n >>> with jnp.printoptions(formatter={'int': lambda x: format(x, '#010b')}):\n ... print(f""{x = }"")\n ... print(f""{~x = }"")\n x = Array([0b00000000, 0b00000001, 0b00000010, 0b00000011, 0b00000100], dtype=uint8)\n ~x = Array([0b11111111, 0b11111110, 0b11111101, 0b11111100, 0b11111011], dtype=uint8)\n\n For boolean inputs, :func:`invert` is equivalent to :func:`logical_not`:\n\n >>> x = jnp.array([True, False, True, True, False])\n >>> jnp.invert(x)\n Array([False, True, False, False, True], dtype=bool)\n """"""\n return lax.bitwise_not(*promote_args('invert', x))\n\n\n@unary_ufunc\ndef negative(x: ArrayLike, /) -> Array:\n """"""Return element-wise negative values of the input.\n\n JAX implementation of :obj:`numpy.negative`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array with same shape and dtype as ``x`` containing ``-x``.\n\n See also:\n - :func:`jax.numpy.positive`: Returns element-wise positive values of the input.\n - :func:`jax.numpy.sign`: Returns element-wise indication of sign of the input.\n\n Note:\n ``jnp.negative``, when applied over ``unsigned integer``, produces the result\n of their two's complement negation, which typically results in unexpected\n large positive values due to integer underflow.\n\n Examples:\n For real-valued inputs:\n\n >>> x = jnp.array([0., -3., 7])\n >>> jnp.negative(x)\n Array([-0., 3., -7.], dtype=float32)\n\n For complex inputs:\n\n >>> x1 = jnp.array([1-2j, -3+4j, 5-6j])\n >>> jnp.negative(x1)\n Array([-1.+2.j, 3.-4.j, -5.+6.j], dtype=complex64)\n\n For unit32:\n\n >>> x2 = jnp.array([5, 0, -7]).astype(jnp.uint32)\n >>> x2\n Array([ 5, 0, 4294967289], dtype=uint32)\n >>> jnp.negative(x2)\n Array([4294967291, 0, 7], dtype=uint32)\n """"""\n return lax.neg(*promote_args('negative', x))\n\n\n@export\n@partial(jit, inline=True)\ndef positive(x: ArrayLike, /) -> Array:\n """"""Return element-wise positive values of the input.\n\n JAX implementation of :obj:`numpy.positive`.\n\n Args:\n x: input array or scalar\n\n Returns:\n An array of same shape and dtype as ``x`` containing ``+x``.\n\n Note:\n ``jnp.positive`` is equivalent to ``x.copy()`` and is defined only for the\n types that support arithmetic operations.\n\n See also:\n - :func:`jax.numpy.negative`: Returns element-wise negative values of the input.\n - :func:`jax.numpy.sign`: Returns element-wise indication of sign of the input.\n\n Examples:\n For real-valued inputs:\n\n >>> x = jnp.array([-5, 4, 7., -9.5])\n >>> jnp.positive(x)\n Array([-5. , 4. , 7. , -9.5], dtype=float32)\n >>> x.copy()\n Array([-5. , 4. , 7. , -9.5], dtype=float32)\n\n For complex inputs:\n\n >>> x1 = jnp.array([1-2j, -3+4j, 5-6j])\n >>> jnp.positive(x1)\n Array([ 1.-2.j, -3.+4.j, 5.-6.j], dtype=complex64)\n >>> x1.copy()\n Array([ 1.-2.j, -3.+4.j, 5.-6.j], dtype=complex64)\n\n For uint32:\n\n >>> x2 = jnp.array([6, 0, -4]).astype(jnp.uint32)\n >>> x2\n Array([ 6, 0, 4294967292], dtype=uint32)\n >>> jnp.positive(x2)\n Array([ 6, 0, 4294967292], dtype=uint32)\n """"""\n return lax.asarray(*promote_args('positive', x))\n\n\n@export\n@partial(jit, inline=True)\ndef sign(x: ArrayLike, /) -> Array:\n r""""""Return an element-wise indication of sign of the input.\n\n JAX implementation of :obj:`numpy.sign`.\n\n The sign of ``x`` for real-valued input is:\n\n .. math::\n \mathrm{sign}(x) = \begin{cases}\n 1, & x > 0\\\n 0, & x = 0\\\n -1, & x < 0\n \end{cases}\n\n For complex valued input, ``jnp.sign`` returns a unit vector representing the\n phase. For generalized case, the sign of ``x`` is given by:\n\n .. math::\n \mathrm{sign}(x) = \begin{cases}\n \frac{x}{abs(x)}, & x \ne 0\\\n 0, & x = 0\n \end{cases}\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array with same shape and dtype as ``x`` containing the sign indication.\n\n See also:\n - :func:`jax.numpy.positive`: Returns element-wise positive values of the input.\n - :func:`jax.numpy.negative`: Returns element-wise negative values of the input.\n\n Examples:\n For Real-valued inputs:\n\n >>> x = jnp.array([0., -3., 7.])\n >>> jnp.sign(x)\n Array([ 0., -1., 1.], dtype=float32)\n\n For complex-inputs:\n\n >>> x1 = jnp.array([1, 3+4j, 5j])\n >>> jnp.sign(x1)\n Array([1. +0.j , 0.6+0.8j, 0. +1.j ], dtype=complex64)\n """"""\n return lax.sign(*promote_args('sign', x))\n\n\n@export\n@partial(jit, inline=True)\ndef floor(x: ArrayLike, /) -> Array:\n """"""Round input to the nearest integer downwards.\n\n JAX implementation of :obj:`numpy.floor`.\n\n Args:\n x: input array or scalar. Must not have complex dtype.\n\n Returns:\n An array with same shape and dtype as ``x`` containing the values rounded to\n the nearest integer that is less than or equal to the value itself.\n\n See also:\n - :func:`jax.numpy.fix`: Rounds the input to the nearest integer towards zero.\n - :func:`jax.numpy.trunc`: Rounds the input to the nearest integer towards\n zero.\n - :func:`jax.numpy.ceil`: Rounds the input up to the nearest integer.\n\n Examples:\n >>> key = jax.random.key(42)\n >>> x = jax.random.uniform(key, (3, 3), minval=-5, maxval=5)\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(x)\n [[-0.11 1.8 1.16]\n [ 0.61 -0.49 0.86]\n [-4.25 2.75 1.99]]\n >>> jnp.floor(x)\n Array([[-1., 1., 1.],\n [ 0., -1., 0.],\n [-5., 2., 1.]], dtype=float32)\n """"""\n x = ensure_arraylike('floor', x)\n if dtypes.isdtype(dtypes.dtype(x), ('integral', 'bool')):\n return x\n return lax.floor(*promote_args_inexact('floor', x))\n\n\n@export\n@partial(jit, inline=True)\ndef ceil(x: ArrayLike, /) -> Array:\n """"""Round input to the nearest integer upwards.\n\n JAX implementation of :obj:`numpy.ceil`.\n\n Args:\n x: input array or scalar. Must not have complex dtype.\n\n Returns:\n An array with same shape and dtype as ``x`` containing the values rounded to\n the nearest integer that is greater than or equal to the value itself.\n\n See also:\n - :func:`jax.numpy.fix`: Rounds the input to the nearest integer towards zero.\n - :func:`jax.numpy.trunc`: Rounds the input to the nearest integer towards\n zero.\n - :func:`jax.numpy.floor`: Rounds the input down to the nearest integer.\n\n Examples:\n >>> key = jax.random.key(1)\n >>> x = jax.random.uniform(key, (3, 3), minval=-5, maxval=5)\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(x)\n [[-0.61 0.34 -0.54]\n [-0.62 3.97 0.59]\n [ 4.84 3.42 -1.14]]\n >>> jnp.ceil(x)\n Array([[-0., 1., -0.],\n [-0., 4., 1.],\n [ 5., 4., -1.]], dtype=float32)\n """"""\n x = ensure_arraylike('ceil', x)\n if dtypes.isdtype(dtypes.dtype(x), ('integral', 'bool')):\n return lax.asarray(x)\n return lax.ceil(*promote_args_inexact('ceil', x))\n\n\n@export\n@partial(jit, inline=True)\ndef exp(x: ArrayLike, /) -> Array:\n """"""Calculate element-wise exponential of the input.\n\n JAX implementation of :obj:`numpy.exp`.\n\n Args:\n x: input array or scalar\n\n Returns:\n An array containing the exponential of each element in ``x``, promotes to\n inexact dtype.\n\n See also:\n - :func:`jax.numpy.log`: Calculates element-wise logarithm of the input.\n - :func:`jax.numpy.expm1`: Calculates :math:`e^x-1` of each element of the\n input.\n - :func:`jax.numpy.exp2`: Calculates base-2 exponential of each element of\n the input.\n\n Examples:\n ``jnp.exp`` follows the properties of exponential such as :math:`e^{(a+b)}\n = e^a * e^b`.\n\n >>> x1 = jnp.array([2, 4, 3, 1])\n >>> x2 = jnp.array([1, 3, 2, 3])\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(jnp.exp(x1+x2))\n [ 20.09 1096.63 148.41 54.6 ]\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(jnp.exp(x1)*jnp.exp(x2))\n [ 20.09 1096.63 148.41 54.6 ]\n\n This property holds for complex input also:\n\n >>> jnp.allclose(jnp.exp(3-4j), jnp.exp(3)*jnp.exp(-4j))\n Array(True, dtype=bool)\n """"""\n return lax.exp(*promote_args_inexact('exp', x))\n\n\n@export\n@partial(jit, inline=True)\ndef log(x: ArrayLike, /) -> Array:\n """"""Calculate element-wise natural logarithm of the input.\n\n JAX implementation of :obj:`numpy.log`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing the logarithm of each element in ``x``, promotes to inexact\n dtype.\n\n See also:\n - :func:`jax.numpy.exp`: Calculates element-wise exponential of the input.\n - :func:`jax.numpy.log2`: Calculates base-2 logarithm of each element of input.\n - :func:`jax.numpy.log1p`: Calculates element-wise logarithm of one plus input.\n\n Examples:\n ``jnp.log`` and ``jnp.exp`` are inverse functions of each other. Applying\n ``jnp.log`` on the result of ``jnp.exp(x)`` yields the original input ``x``.\n\n >>> x = jnp.array([2, 3, 4, 5])\n >>> jnp.log(jnp.exp(x))\n Array([2., 3., 4., 5.], dtype=float32)\n\n Using ``jnp.log`` we can demonstrate well-known properties of logarithms, such\n as :math:`log(a*b) = log(a)+log(b)`.\n\n >>> x1 = jnp.array([2, 1, 3, 1])\n >>> x2 = jnp.array([1, 3, 2, 4])\n >>> jnp.allclose(jnp.log(x1*x2), jnp.log(x1)+jnp.log(x2))\n Array(True, dtype=bool)\n """"""\n out = lax.log(*promote_args_inexact('log', x))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef expm1(x: ArrayLike, /) -> Array:\n """"""Calculate ``exp(x)-1`` of each element of the input.\n\n JAX implementation of :obj:`numpy.expm1`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing ``exp(x)-1`` of each element in ``x``, promotes to inexact\n dtype.\n\n Note:\n ``jnp.expm1`` has much higher precision than the naive computation of\n ``exp(x)-1`` for small values of ``x``.\n\n See also:\n - :func:`jax.numpy.log1p`: Calculates element-wise logarithm of one plus input.\n - :func:`jax.numpy.exp`: Calculates element-wise exponential of the input.\n - :func:`jax.numpy.exp2`: Calculates base-2 exponential of each element of\n the input.\n\n Examples:\n >>> x = jnp.array([2, -4, 3, -1])\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(jnp.expm1(x))\n [ 6.39 -0.98 19.09 -0.63]\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(jnp.exp(x)-1)\n [ 6.39 -0.98 19.09 -0.63]\n\n For values very close to 0, ``jnp.expm1(x)`` is much more accurate than\n ``jnp.exp(x)-1``:\n\n >>> x1 = jnp.array([1e-4, 1e-6, 2e-10])\n >>> jnp.expm1(x1)\n Array([1.0000500e-04, 1.0000005e-06, 2.0000000e-10], dtype=float32)\n >>> jnp.exp(x1)-1\n Array([1.00016594e-04, 9.53674316e-07, 0.00000000e+00], dtype=float32)\n """"""\n return lax.expm1(*promote_args_inexact('expm1', x))\n\n\n@export\n@partial(jit, inline=True)\ndef log1p(x: ArrayLike, /) -> Array:\n """"""Calculates element-wise logarithm of one plus input, ``log(x+1)``.\n\n JAX implementation of :obj:`numpy.log1p`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing the logarithm of one plus of each element in ``x``,\n promotes to inexact dtype.\n\n Note:\n ``jnp.log1p`` is more accurate than when using the naive computation of\n ``log(x+1)`` for small values of ``x``.\n\n See also:\n - :func:`jax.numpy.expm1`: Calculates :math:`e^x-1` of each element of the\n input.\n - :func:`jax.numpy.log2`: Calculates base-2 logarithm of each element of input.\n - :func:`jax.numpy.log`: Calculates element-wise logarithm of the input.\n\n Examples:\n >>> x = jnp.array([2, 5, 9, 4])\n >>> jnp.allclose(jnp.log1p(x), jnp.log(x+1))\n Array(True, dtype=bool)\n\n For values very close to 0, ``jnp.log1p(x)`` is more accurate than\n ``jnp.log(x+1)``:\n\n >>> x1 = jnp.array([1e-4, 1e-6, 2e-10])\n >>> jnp.expm1(jnp.log1p(x1)) # doctest: +SKIP\n Array([1.00000005e-04, 9.99999997e-07, 2.00000003e-10], dtype=float32)\n >>> jnp.expm1(jnp.log(x1+1)) # doctest: +SKIP\n Array([1.000166e-04, 9.536743e-07, 0.000000e+00], dtype=float32)\n """"""\n out = lax.log1p(*promote_args_inexact('log1p', x))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef sin(x: ArrayLike, /) -> Array:\n """"""Compute a trigonometric sine of each element of input.\n\n JAX implementation of :obj:`numpy.sin`.\n\n Args:\n x: array or scalar. Angle in radians.\n\n Returns:\n An array containing the sine of each element in ``x``, promotes to inexact\n dtype.\n\n See also:\n - :func:`jax.numpy.cos`: Computes a trigonometric cosine of each element of\n input.\n - :func:`jax.numpy.tan`: Computes a trigonometric tangent of each element of\n input.\n - :func:`jax.numpy.arcsin` and :func:`jax.numpy.asin`: Computes the inverse of\n trigonometric sine of each element of input.\n\n Examples:\n >>> pi = jnp.pi\n >>> x = jnp.array([pi/4, pi/2, 3*pi/4, pi])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... print(jnp.sin(x))\n [ 0.707 1. 0.707 -0. ]\n """"""\n out = lax.sin(*promote_args_inexact('sin', x))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef cos(x: ArrayLike, /) -> Array:\n """"""Compute a trigonometric cosine of each element of input.\n\n JAX implementation of :obj:`numpy.cos`.\n\n Args:\n x: scalar or array. Angle in radians.\n\n Returns:\n An array containing the cosine of each element in ``x``, promotes to inexact\n dtype.\n\n See also:\n - :func:`jax.numpy.sin`: Computes a trigonometric sine of each element of input.\n - :func:`jax.numpy.tan`: Computes a trigonometric tangent of each element of\n input.\n - :func:`jax.numpy.arccos` and :func:`jax.numpy.acos`: Computes the inverse of\n trigonometric cosine of each element of input.\n\n Examples:\n >>> pi = jnp.pi\n >>> x = jnp.array([pi/4, pi/2, 3*pi/4, 5*pi/6])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... print(jnp.cos(x))\n [ 0.707 -0. -0.707 -0.866]\n """"""\n out = lax.cos(*promote_args_inexact('cos', x))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef tan(x: ArrayLike, /) -> Array:\n """"""Compute a trigonometric tangent of each element of input.\n\n JAX implementation of :obj:`numpy.tan`.\n\n Args:\n x: scalar or array. Angle in radians.\n\n Returns:\n An array containing the tangent of each element in ``x``, promotes to inexact\n dtype.\n\n See also:\n - :func:`jax.numpy.sin`: Computes a trigonometric sine of each element of input.\n - :func:`jax.numpy.cos`: Computes a trigonometric cosine of each element of\n input.\n - :func:`jax.numpy.arctan` and :func:`jax.numpy.atan`: Computes the inverse of\n trigonometric tangent of each element of input.\n\n Examples:\n >>> pi = jnp.pi\n >>> x = jnp.array([0, pi/6, pi/4, 3*pi/4, 5*pi/6])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... print(jnp.tan(x))\n [ 0. 0.577 1. -1. -0.577]\n """"""\n out = lax.tan(*promote_args_inexact('tan', x))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef arcsin(x: ArrayLike, /) -> Array:\n r""""""Compute element-wise inverse of trigonometric sine of input.\n\n JAX implementation of :obj:`numpy.arcsin`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing the inverse trigonometric sine of each element of ``x``\n in radians in the range ``[-pi/2, pi/2]``, promoting to inexact dtype.\n\n Note:\n - ``jnp.arcsin`` returns ``nan`` when ``x`` is real-valued and not in the closed\n interval ``[-1, 1]``.\n - ``jnp.arcsin`` follows the branch cut convention of :obj:`numpy.arcsin` for\n complex inputs.\n\n See also:\n - :func:`jax.numpy.sin`: Computes a trigonometric sine of each element of input.\n - :func:`jax.numpy.arccos` and :func:`jax.numpy.acos`: Computes the inverse of\n trigonometric cosine of each element of input.\n - :func:`jax.numpy.arctan` and :func:`jax.numpy.atan`: Computes the inverse of\n trigonometric tangent of each element of input.\n\n Examples:\n >>> x = jnp.array([-2, -1, -0.5, 0, 0.5, 1, 2])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arcsin(x)\n Array([ nan, -1.571, -0.524, 0. , 0.524, 1.571, nan], dtype=float32)\n\n For complex-valued inputs:\n\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arcsin(3+4j)\n Array(0.634+2.306j, dtype=complex64, weak_type=True)\n """"""\n out = lax.asin(*promote_args_inexact('arcsin', x))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef arccos(x: ArrayLike, /) -> Array:\n """"""Compute element-wise inverse of trigonometric cosine of input.\n\n JAX implementation of :obj:`numpy.arccos`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing the inverse trigonometric cosine of each element of ``x``\n in radians in the range ``[0, pi]``, promoting to inexact dtype.\n\n Note:\n - ``jnp.arccos`` returns ``nan`` when ``x`` is real-valued and not in the closed\n interval ``[-1, 1]``.\n - ``jnp.arccos`` follows the branch cut convention of :obj:`numpy.arccos` for\n complex inputs.\n\n See also:\n - :func:`jax.numpy.cos`: Computes a trigonometric cosine of each element of\n input.\n - :func:`jax.numpy.arcsin` and :func:`jax.numpy.asin`: Computes the inverse of\n trigonometric sine of each element of input.\n - :func:`jax.numpy.arctan` and :func:`jax.numpy.atan`: Computes the inverse of\n trigonometric tangent of each element of input.\n\n Examples:\n >>> x = jnp.array([-2, -1, -0.5, 0, 0.5, 1, 2])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arccos(x)\n Array([ nan, 3.142, 2.094, 1.571, 1.047, 0. , nan], dtype=float32)\n\n For complex inputs:\n\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arccos(4-1j)\n Array(0.252+2.097j, dtype=complex64, weak_type=True)\n """"""\n out = lax.acos(*promote_args_inexact('arccos', x))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef arctan(x: ArrayLike, /) -> Array:\n """"""Compute element-wise inverse of trigonometric tangent of input.\n\n JAX implement of :obj:`numpy.arctan`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing the inverse trigonometric tangent of each element ``x``\n in radians in the range ``[-pi/2, pi/2]``, promoting to inexact dtype.\n\n Note:\n ``jnp.arctan`` follows the branch cut convention of :obj:`numpy.arctan` for\n complex inputs.\n\n See also:\n - :func:`jax.numpy.tan`: Computes a trigonometric tangent of each element of\n input.\n - :func:`jax.numpy.arcsin` and :func:`jax.numpy.asin`: Computes the inverse of\n trigonometric sine of each element of input.\n - :func:`jax.numpy.arccos` and :func:`jax.numpy.atan`: Computes the inverse of\n trigonometric cosine of each element of input.\n\n Examples:\n >>> x = jnp.array([-jnp.inf, -20, -1, 0, 1, 20, jnp.inf])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arctan(x)\n Array([-1.571, -1.521, -0.785, 0. , 0.785, 1.521, 1.571], dtype=float32)\n\n For complex-valued inputs:\n\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arctan(2+7j)\n Array(1.532+0.133j, dtype=complex64, weak_type=True)\n """"""\n return lax.atan(*promote_args_inexact('arctan', x))\n\n\n@export\n@partial(jit, inline=True)\ndef sinh(x: ArrayLike, /) -> Array:\n r""""""Calculate element-wise hyperbolic sine of input.\n\n JAX implementation of :obj:`numpy.sinh`.\n\n The hyperbolic sine is defined by:\n\n .. math::\n\n sinh(x) = \frac{e^x - e^{-x}}{2}\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing the hyperbolic sine of each element of ``x``, promoting\n to inexact dtype.\n\n Note:\n ``jnp.sinh`` is equivalent to computing ``-1j * jnp.sin(1j * x)``.\n\n See also:\n - :func:`jax.numpy.cosh`: Computes the element-wise hyperbolic cosine of the\n input.\n - :func:`jax.numpy.tanh`: Computes the element-wise hyperbolic tangent of the\n input.\n - :func:`jax.numpy.arcsinh`: Computes the element-wise inverse of hyperbolic\n sine of the input.\n\n Examples:\n >>> x = jnp.array([[-2, 3, 5],\n ... [0, -1, 4]])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.sinh(x)\n Array([[-3.627, 10.018, 74.203],\n [ 0. , -1.175, 27.29 ]], dtype=float32)\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... -1j * jnp.sin(1j * x)\n Array([[-3.627+0.j, 10.018-0.j, 74.203-0.j],\n [ 0. -0.j, -1.175+0.j, 27.29 -0.j]], dtype=complex64, weak_type=True)\n\n For complex-valued input:\n\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.sinh(3-2j)\n Array(-4.169-9.154j, dtype=complex64, weak_type=True)\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... -1j * jnp.sin(1j * (3-2j))\n Array(-4.169-9.154j, dtype=complex64, weak_type=True)\n """"""\n return lax.sinh(*promote_args_inexact('sinh', x))\n\n\n@export\n@partial(jit, inline=True)\ndef cosh(x: ArrayLike, /) -> Array:\n r""""""Calculate element-wise hyperbolic cosine of input.\n\n JAX implementation of :obj:`numpy.cosh`.\n\n The hyperbolic cosine is defined by:\n\n .. math::\n\n cosh(x) = \frac{e^x + e^{-x}}{2}\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing the hyperbolic cosine of each element of ``x``, promoting\n to inexact dtype.\n\n Note:\n ``jnp.cosh`` is equivalent to computing ``jnp.cos(1j * x)``.\n\n See also:\n - :func:`jax.numpy.sinh`: Computes the element-wise hyperbolic sine of the input.\n - :func:`jax.numpy.tanh`: Computes the element-wise hyperbolic tangent of the\n input.\n - :func:`jax.numpy.arccosh`: Computes the element-wise inverse of hyperbolic\n cosine of the input.\n\n Examples:\n >>> x = jnp.array([[3, -1, 0],\n ... [4, 7, -5]])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.cosh(x)\n Array([[ 10.068, 1.543, 1. ],\n [ 27.308, 548.317, 74.21 ]], dtype=float32)\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.cos(1j * x)\n Array([[ 10.068+0.j, 1.543+0.j, 1. +0.j],\n [ 27.308+0.j, 548.317+0.j, 74.21 +0.j]], dtype=complex64, weak_type=True)\n\n For complex-valued input:\n\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.cosh(5+1j)\n Array(40.096+62.44j, dtype=complex64, weak_type=True)\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.cos(1j * (5+1j))\n Array(40.096+62.44j, dtype=complex64, weak_type=True)\n """"""\n return lax.cosh(*promote_args_inexact('cosh', x))\n\n\n@export\n@partial(jit, inline=True)\ndef arcsinh(x: ArrayLike, /) -> Array:\n r""""""Calculate element-wise inverse of hyperbolic sine of input.\n\n JAX implementation of :obj:`numpy.arcsinh`.\n\n The inverse of hyperbolic sine is defined by:\n\n .. math::\n\n arcsinh(x) = \ln(x + \sqrt{1 + x^2})\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array of same shape as ``x`` containing the inverse of hyperbolic sine of\n each element of ``x``, promoting to inexact dtype.\n\n Note:\n - ``jnp.arcsinh`` returns ``nan`` for values outside the range ``(-inf, inf)``.\n - ``jnp.arcsinh`` follows the branch cut convention of :obj:`numpy.arcsinh`\n for complex inputs.\n\n See also:\n - :func:`jax.numpy.sinh`: Computes the element-wise hyperbolic sine of the input.\n - :func:`jax.numpy.arccosh`: Computes the element-wise inverse of hyperbolic\n cosine of the input.\n - :func:`jax.numpy.arctanh`: Computes the element-wise inverse of hyperbolic\n tangent of the input.\n\n Examples:\n >>> x = jnp.array([[-2, 3, 1],\n ... [4, 9, -5]])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arcsinh(x)\n Array([[-1.444, 1.818, 0.881],\n [ 2.095, 2.893, -2.312]], dtype=float32)\n\n For complex-valued inputs:\n\n >>> x1 = jnp.array([4-3j, 2j])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arcsinh(x1)\n Array([2.306-0.634j, 1.317+1.571j], dtype=complex64)\n """"""\n return lax.asinh(*promote_args_inexact('arcsinh', x))\n\n\n@export\n@jit\ndef arccosh(x: ArrayLike, /) -> Array:\n r""""""Calculate element-wise inverse of hyperbolic cosine of input.\n\n JAX implementation of :obj:`numpy.arccosh`.\n\n The inverse of hyperbolic cosine is defined by:\n\n .. math::\n\n arccosh(x) = \ln(x + \sqrt{x^2 - 1})\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array of same shape as ``x`` containing the inverse of hyperbolic cosine\n of each element of ``x``, promoting to inexact dtype.\n\n Note:\n - ``jnp.arccosh`` returns ``nan`` for real-values in the range ``[-inf, 1)``.\n - ``jnp.arccosh`` follows the branch cut convention of :obj:`numpy.arccosh`\n for complex inputs.\n\n See also:\n - :func:`jax.numpy.cosh`: Computes the element-wise hyperbolic cosine of the\n input.\n - :func:`jax.numpy.arcsinh`: Computes the element-wise inverse of hyperbolic\n sine of the input.\n - :func:`jax.numpy.arctanh`: Computes the element-wise inverse of hyperbolic\n tangent of the input.\n\n Examples:\n >>> x = jnp.array([[1, 3, -4],\n ... [-5, 2, 7]])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arccosh(x)\n Array([[0. , 1.763, nan],\n [ nan, 1.317, 2.634]], dtype=float32)\n\n For complex-valued input:\n\n >>> x1 = jnp.array([-jnp.inf+0j, 1+2j, -5+0j])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arccosh(x1)\n Array([ inf+3.142j, 1.529+1.144j, 2.292+3.142j], dtype=complex64)\n """"""\n # Note: arccosh is multi-valued for complex input, and lax.acosh\n # uses a different convention than np.arccosh.\n result = lax.acosh(*promote_args_inexact(""arccosh"", x))\n jnp_error._set_error_if_nan(result)\n if dtypes.issubdtype(result.dtype, np.complexfloating):\n result = _where(real(result) < 0, lax.neg(result), result)\n return result\n\n\n@export\n@partial(jit, inline=True)\ndef tanh(x: ArrayLike, /) -> Array:\n r""""""Calculate element-wise hyperbolic tangent of input.\n\n JAX implementation of :obj:`numpy.tanh`.\n\n The hyperbolic tangent is defined by:\n\n .. math::\n\n tanh(x) = \frac{sinh(x)}{cosh(x)} = \frac{e^x - e^{-x}}{e^x + e^{-x}}\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing the hyperbolic tangent of each element of ``x``, promoting\n to inexact dtype.\n\n Note:\n ``jnp.tanh`` is equivalent to computing ``-1j * jnp.tan(1j * x)``.\n\n See also:\n - :func:`jax.numpy.sinh`: Computes the element-wise hyperbolic sine of the input.\n - :func:`jax.numpy.cosh`: Computes the element-wise hyperbolic cosine of the\n input.\n - :func:`jax.numpy.arctanh`: Computes the element-wise inverse of hyperbolic\n tangent of the input.\n\n Examples:\n >>> x = jnp.array([[-1, 0, 1],\n ... [3, -2, 5]])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.tanh(x)\n Array([[-0.762, 0. , 0.762],\n [ 0.995, -0.964, 1. ]], dtype=float32)\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... -1j * jnp.tan(1j * x)\n Array([[-0.762+0.j, 0. -0.j, 0.762-0.j],\n [ 0.995-0.j, -0.964+0.j, 1. -0.j]], dtype=complex64, weak_type=True)\n\n For complex-valued input:\n\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.tanh(2-5j)\n Array(1.031+0.021j, dtype=complex64, weak_type=True)\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... -1j * jnp.tan(1j * (2-5j))\n Array(1.031+0.021j, dtype=complex64, weak_type=True)\n """"""\n return lax.tanh(*promote_args_inexact('tanh', x))\n\n\n@export\n@partial(jit, inline=True)\ndef arctanh(x: ArrayLike, /) -> Array:\n r""""""Calculate element-wise inverse of hyperbolic tangent of input.\n\n JAX implementation of :obj:`numpy.arctanh`.\n\n The inverse of hyperbolic tangent is defined by:\n\n .. math::\n\n arctanh(x) = \frac{1}{2} [\ln(1 + x) - \ln(1 - x)]\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array of same shape as ``x`` containing the inverse of hyperbolic tangent\n of each element of ``x``, promoting to inexact dtype.\n\n Note:\n - ``jnp.arctanh`` returns ``nan`` for real-values outside the range ``[-1, 1]``.\n - ``jnp.arctanh`` follows the branch cut convention of :obj:`numpy.arctanh`\n for complex inputs.\n\n See also:\n - :func:`jax.numpy.tanh`: Computes the element-wise hyperbolic tangent of the\n input.\n - :func:`jax.numpy.arcsinh`: Computes the element-wise inverse of hyperbolic\n sine of the input.\n - :func:`jax.numpy.arccosh`: Computes the element-wise inverse of hyperbolic\n cosine of the input.\n\n Examples:\n >>> x = jnp.array([-2, -1, -0.5, 0, 0.5, 1, 2])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arctanh(x)\n Array([ nan, -inf, -0.549, 0. , 0.549, inf, nan], dtype=float32)\n\n For complex-valued input:\n\n >>> x1 = jnp.array([-2+0j, 3+0j, 4-1j])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.arctanh(x1)\n Array([-0.549+1.571j, 0.347+1.571j, 0.239-1.509j], dtype=complex64)\n """"""\n out = lax.atanh(*promote_args_inexact('arctanh', x))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef sqrt(x: ArrayLike, /) -> Array:\n """"""Calculates element-wise non-negative square root of the input array.\n\n JAX implementation of :obj:`numpy.sqrt`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing the non-negative square root of the elements of ``x``.\n\n Note:\n - For real-valued negative inputs, ``jnp.sqrt`` produces a ``nan`` output.\n - For complex-valued negative inputs, ``jnp.sqrt`` produces a ``complex`` output.\n\n See also:\n - :func:`jax.numpy.square`: Calculates the element-wise square of the input.\n - :func:`jax.numpy.power`: Calculates the element-wise base ``x1`` exponential\n of ``x2``.\n\n Examples:\n >>> x = jnp.array([-8-6j, 1j, 4])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.sqrt(x)\n Array([1. -3.j , 0.707+0.707j, 2. +0.j ], dtype=complex64)\n >>> jnp.sqrt(-1)\n Array(nan, dtype=float32, weak_type=True)\n """"""\n out = lax.sqrt(*promote_args_inexact('sqrt', x))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef cbrt(x: ArrayLike, /) -> Array:\n """"""Calculates element-wise cube root of the input array.\n\n JAX implementation of :obj:`numpy.cbrt`.\n\n Args:\n x: input array or scalar. ``complex`` dtypes are not supported.\n\n Returns:\n An array containing the cube root of the elements of ``x``.\n\n See also:\n - :func:`jax.numpy.sqrt`: Calculates the element-wise non-negative square root\n of the input.\n - :func:`jax.numpy.square`: Calculates the element-wise square of the input.\n\n Examples:\n >>> x = jnp.array([[216, 125, 64],\n ... [-27, -8, -1]])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.cbrt(x)\n Array([[ 6., 5., 4.],\n [-3., -2., -1.]], dtype=float32)\n """"""\n return lax.cbrt(*promote_args_inexact('cbrt', x))\n\n\ndef _add_at(a: Array, indices: Any, b: ArrayLike) -> Array:\n """"""Implementation of jnp.add.at.""""""\n if a.dtype == bool:\n a = a.astype('int32')\n b = lax.convert_element_type(b, bool).astype('int32')\n return a.at[indices].add(b).astype(bool)\n return a.at[indices].add(b)\n\n\n@binary_ufunc(identity=0, reduce=reductions.sum, accumulate=reductions.cumsum, at=_add_at)\ndef add(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Add two arrays element-wise.\n\n JAX implementation of :obj:`numpy.add`. This is a universal function,\n and supports the additional APIs described at :class:`jax.numpy.ufunc`.\n This function provides the implementation of the ``+`` operator for\n JAX arrays.\n\n Args:\n x, y: arrays to add. Must be broadcastable to a common shape.\n\n Returns:\n Array containing the result of the element-wise addition.\n\n Examples:\n Calling ``add`` explicitly:\n\n >>> x = jnp.arange(4)\n >>> jnp.add(x, 10)\n Array([10, 11, 12, 13], dtype=int32)\n\n Calling ``add`` via the ``+`` operator:\n\n >>> x + 10\n Array([10, 11, 12, 13], dtype=int32)\n """"""\n x, y = promote_args(""add"", x, y)\n if x.dtype == bool:\n return lax.bitwise_or(x, y)\n out = lax.add(x, y)\n jnp_error._set_error_if_nan(out)\n return out\n\n\ndef _multiply_at(a: Array, indices: Any, b: ArrayLike) -> Array:\n """"""Implementation of jnp.multiply.at.""""""\n if a.dtype == bool:\n a = a.astype('int32')\n b = lax.convert_element_type(b, bool).astype('int32')\n return a.at[indices].mul(b).astype(bool)\n else:\n return a.at[indices].mul(b)\n\n\n@binary_ufunc(identity=1, reduce=reductions.prod, accumulate=reductions.cumprod, at=_multiply_at)\ndef multiply(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Multiply two arrays element-wise.\n\n JAX implementation of :obj:`numpy.multiply`. This is a universal function,\n and supports the additional APIs described at :class:`jax.numpy.ufunc`.\n This function provides the implementation of the ``*`` operator for\n JAX arrays.\n\n Args:\n x, y: arrays to multiply. Must be broadcastable to a common shape.\n\n Returns:\n Array containing the result of the element-wise multiplication.\n\n Examples:\n Calling ``multiply`` explicitly:\n\n >>> x = jnp.arange(4)\n >>> jnp.multiply(x, 10)\n Array([ 0, 10, 20, 30], dtype=int32)\n\n Calling ``multiply`` via the ``*`` operator:\n\n >>> x * 10\n Array([ 0, 10, 20, 30], dtype=int32)\n """"""\n x, y = promote_args(""multiply"", x, y)\n return lax.mul(x, y) if x.dtype != bool else lax.bitwise_and(x, y)\n\n\n@binary_ufunc(identity=-1, reduce=reductions._reduce_bitwise_and)\ndef bitwise_and(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Compute the bitwise AND operation elementwise.\n\n JAX implementation of :obj:`numpy.bitwise_and`. This is a universal function,\n and supports the additional APIs described at :class:`jax.numpy.ufunc`.\n This function provides the implementation of the ``&`` operator for\n JAX arrays.\n\n Args:\n x, y: integer or boolean arrays. Must be broadcastable to a common shape.\n\n Returns:\n Array containing the result of the element-wise bitwise AND.\n\n Examples:\n Calling ``bitwise_and`` explicitly:\n\n >>> x = jnp.arange(4)\n >>> jnp.bitwise_and(x, 1)\n Array([0, 1, 0, 1], dtype=int32)\n\n Calling ``bitwise_and`` via the ``&`` operator:\n\n >>> x & 1\n Array([0, 1, 0, 1], dtype=int32)\n """"""\n return lax.bitwise_and(*promote_args(""bitwise_and"", x, y))\n\n\n@binary_ufunc(identity=0, reduce=reductions._reduce_bitwise_or)\ndef bitwise_or(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Compute the bitwise OR operation elementwise.\n\n JAX implementation of :obj:`numpy.bitwise_or`. This is a universal function,\n and supports the additional APIs described at :class:`jax.numpy.ufunc`.\n This function provides the implementation of the ``|`` operator for\n JAX arrays.\n\n Args:\n x, y: integer or boolean arrays. Must be broadcastable to a common shape.\n\n Returns:\n Array containing the result of the element-wise bitwise OR.\n\n Examples:\n Calling ``bitwise_or`` explicitly:\n\n >>> x = jnp.arange(4)\n >>> jnp.bitwise_or(x, 1)\n Array([1, 1, 3, 3], dtype=int32)\n\n Calling ``bitwise_or`` via the ``|`` operator:\n\n >>> x | 1\n Array([1, 1, 3, 3], dtype=int32)\n """"""\n return lax.bitwise_or(*promote_args(""bitwise_or"", x, y))\n\n\n@binary_ufunc(identity=0, reduce=reductions._reduce_bitwise_xor)\ndef bitwise_xor(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Compute the bitwise XOR operation elementwise.\n\n JAX implementation of :obj:`numpy.bitwise_xor`. This is a universal function,\n and supports the additional APIs described at :class:`jax.numpy.ufunc`.\n This function provides the implementation of the ``^`` operator for\n JAX arrays.\n\n Args:\n x, y: integer or boolean arrays. Must be broadcastable to a common shape.\n\n Returns:\n Array containing the result of the element-wise bitwise XOR.\n\n Examples:\n Calling ``bitwise_xor`` explicitly:\n\n >>> x = jnp.arange(4)\n >>> jnp.bitwise_xor(x, 1)\n Array([1, 0, 3, 2], dtype=int32)\n\n Calling ``bitwise_xor`` via the ``^`` operator:\n\n >>> x ^ 1\n Array([1, 0, 3, 2], dtype=int32)\n """"""\n return lax.bitwise_xor(*promote_args(""bitwise_xor"", x, y))\n\n\n@export\n@partial(jit, inline=True)\ndef left_shift(x: ArrayLike, y: ArrayLike, /) -> Array:\n r""""""Shift bits of ``x`` to left by the amount specified in ``y``, element-wise.\n\n JAX implementation of :obj:`numpy.left_shift`.\n\n Args:\n x: Input array, must be integer-typed.\n y: The amount of bits to shift each element in ``x`` to the left, only accepts\n integer subtypes. ``x`` and ``y`` must either have same shape or be broadcast\n compatible.\n\n Returns:\n An array containing the left shifted elements of ``x`` by the amount specified\n in ``y``, with the same shape as the broadcasted shape of ``x`` and ``y``.\n\n Note:\n Left shifting ``x`` by ``y`` is equivalent to ``x * (2**y)`` within the\n bounds of the dtypes involved.\n\n See also:\n - :func:`jax.numpy.right_shift`: and :func:`jax.numpy.bitwise_right_shift`:\n Shifts the bits of ``x1`` to right by the amount specified in ``x2``,\n element-wise.\n - :func:`jax.numpy.bitwise_left_shift`: Alias of :func:`jax.left_shift`.\n\n Examples:\n >>> def print_binary(x):\n ... return [bin(int(val)) for val in x]\n\n >>> x1 = jnp.arange(5)\n >>> x1\n Array([0, 1, 2, 3, 4], dtype=int32)\n >>> print_binary(x1)\n ['0b0', '0b1', '0b10', '0b11', '0b100']\n >>> x2 = 1\n >>> result = jnp.left_shift(x1, x2)\n >>> result\n Array([0, 2, 4, 6, 8], dtype=int32)\n >>> print_binary(result)\n ['0b0', '0b10', '0b100', '0b110', '0b1000']\n\n >>> x3 = 4\n >>> print_binary([x3])\n ['0b100']\n >>> x4 = jnp.array([1, 2, 3, 4])\n >>> result1 = jnp.left_shift(x3, x4)\n >>> result1\n Array([ 8, 16, 32, 64], dtype=int32)\n >>> print_binary(result1)\n ['0b1000', '0b10000', '0b100000', '0b1000000']\n """"""\n return lax.shift_left(*promote_args_numeric(""left_shift"", x, y))\n\n\n@export\n@partial(jit, inline=True)\ndef bitwise_left_shift(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.left_shift`.""""""\n return lax.shift_left(*promote_args_numeric(""bitwise_left_shift"", x, y))\n\n\n@export\n@partial(jit, inline=True)\ndef equal(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Returns element-wise truth value of ``x == y``.\n\n JAX implementation of :obj:`numpy.equal`. This function provides the implementation\n of the ``==`` operator for JAX arrays.\n\n Args:\n x: input array or scalar.\n y: input array or scalar. ``x`` and ``y`` should either have same shape or be\n broadcast compatible.\n\n Returns:\n A boolean array containing ``True`` where the elements of ``x == y`` and\n ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.not_equal`: Returns element-wise truth value of ``x != y``.\n - :func:`jax.numpy.greater_equal`: Returns element-wise truth value of\n ``x >= y``.\n - :func:`jax.numpy.less_equal`: Returns element-wise truth value of ``x <= y``.\n - :func:`jax.numpy.greater`: Returns element-wise truth value of ``x > y``.\n - :func:`jax.numpy.less`: Returns element-wise truth value of ``x < y``.\n\n Examples:\n >>> jnp.equal(0., -0.)\n Array(True, dtype=bool, weak_type=True)\n >>> jnp.equal(1, 1.)\n Array(True, dtype=bool, weak_type=True)\n >>> jnp.equal(5, jnp.array(5))\n Array(True, dtype=bool, weak_type=True)\n >>> jnp.equal(2, -2)\n Array(False, dtype=bool, weak_type=True)\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6],\n ... [7, 8, 9]])\n >>> y = jnp.array([1, 5, 9])\n >>> jnp.equal(x, y)\n Array([[ True, False, False],\n [False, True, False],\n [False, False, True]], dtype=bool)\n >>> x == y\n Array([[ True, False, False],\n [False, True, False],\n [False, False, True]], dtype=bool)\n """"""\n return lax.eq(*promote_args(""equal"", x, y))\n\n\n@export\n@partial(jit, inline=True)\ndef not_equal(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Returns element-wise truth value of ``x != y``.\n\n JAX implementation of :obj:`numpy.not_equal`. This function provides the\n implementation of the ``!=`` operator for JAX arrays.\n\n Args:\n x: input array or scalar.\n y: input array or scalar. ``x`` and ``y`` should either have same shape or be\n broadcast compatible.\n\n Returns:\n A boolean array containing ``True`` where the elements of ``x != y`` and\n ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.equal`: Returns element-wise truth value of ``x == y``.\n - :func:`jax.numpy.greater_equal`: Returns element-wise truth value of\n ``x >= y``.\n - :func:`jax.numpy.less_equal`: Returns element-wise truth value of ``x <= y``.\n - :func:`jax.numpy.greater`: Returns element-wise truth value of ``x > y``.\n - :func:`jax.numpy.less`: Returns element-wise truth value of ``x < y``.\n\n Examples:\n >>> jnp.not_equal(0., -0.)\n Array(False, dtype=bool, weak_type=True)\n >>> jnp.not_equal(-2, 2)\n Array(True, dtype=bool, weak_type=True)\n >>> jnp.not_equal(1, 1.)\n Array(False, dtype=bool, weak_type=True)\n >>> jnp.not_equal(5, jnp.array(5))\n Array(False, dtype=bool, weak_type=True)\n >>> x = jnp.array([[1, 2, 3],\n ... [4, 5, 6],\n ... [7, 8, 9]])\n >>> y = jnp.array([1, 5, 9])\n >>> jnp.not_equal(x, y)\n Array([[False, True, True],\n [ True, False, True],\n [ True, True, False]], dtype=bool)\n >>> x != y\n Array([[False, True, True],\n [ True, False, True],\n [ True, True, False]], dtype=bool)\n """"""\n return lax.ne(*promote_args(""not_equal"", x, y))\n\n\ndef _subtract_at(a: Array, indices: Any, b: ArrayLike) -> Array:\n """"""Implementation of jnp.subtract.at.""""""\n return a.at[indices].subtract(b)\n\n\n@binary_ufunc(identity=None, at=_subtract_at)\ndef subtract(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Subtract two arrays element-wise.\n\n JAX implementation of :obj:`numpy.subtract`. This is a universal function,\n and supports the additional APIs described at :class:`jax.numpy.ufunc`.\n This function provides the implementation of the ``-`` operator for\n JAX arrays.\n\n Args:\n x, y: arrays to subtract. Must be broadcastable to a common shape.\n\n Returns:\n Array containing the result of the element-wise subtraction.\n\n Examples:\n Calling ``subtract`` explicitly:\n\n >>> x = jnp.arange(4)\n >>> jnp.subtract(x, 10)\n Array([-10, -9, -8, -7], dtype=int32)\n\n Calling ``subtract`` via the ``-`` operator:\n\n >>> x - 10\n Array([-10, -9, -8, -7], dtype=int32)\n """"""\n out = lax.sub(*promote_args(""subtract"", x, y))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef arctan2(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n r""""""Compute the arctangent of x1/x2, choosing the correct quadrant.\n\n JAX implementation of :func:`numpy.arctan2`\n\n Args:\n x1: numerator array.\n x2: denomniator array; should be broadcast-compatible with x1.\n\n Returns:\n The elementwise arctangent of x1 / x2, tracking the correct quadrant.\n\n See also:\n - :func:`jax.numpy.tan`: compute the tangent of an angle\n - :func:`jax.numpy.atan2`: the array API version of this function.\n\n Examples:\n Consider a sequence of angles in radians between 0 and :math:`2\pi`:\n\n >>> theta = jnp.linspace(-jnp.pi, jnp.pi, 9)\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(theta)\n [-3.14 -2.36 -1.57 -0.79 0. 0.79 1.57 2.36 3.14]\n\n These angles can equivalently be represented by ``(x, y)`` coordinates\n on a unit circle:\n\n >>> x, y = jnp.cos(theta), jnp.sin(theta)\n\n To reconstruct the input angle, we might be tempted to use the identity\n :math:`\tan(\theta) = y / x`, and compute :math:`\theta = \tan^{-1}(y/x)`.\n Unfortunately, this does not recover the input angle:\n\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(jnp.arctan(y / x))\n [-0. 0.79 1.57 -0.79 0. 0.79 1.57 -0.79 0. ]\n\n The problem is that :math:`y/x` contains some ambiguity: although\n :math:`(y, x) = (-1, -1)` and :math:`(y, x) = (1, 1)` represent different points in\n Cartesian space, in both cases :math:`y / x = 1`, and so the simple arctan\n approach loses information about which quadrant the angle lies in. :func:`arctan2`\n is built to address this:\n\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(jnp.arctan2(y, x))\n [ 3.14 -2.36 -1.57 -0.79 0. 0.79 1.57 2.36 -3.14]\n\n The results match the input ``theta``, except at the endpoints where :math:`+\pi`\n and :math:`-\pi` represent indistinguishable points on the unit circle. By convention,\n :func:`arctan2` always returns values between :math:`-\pi` and :math:`+\pi` inclusive.\n """"""\n return lax.atan2(*promote_args_inexact(""arctan2"", x1, x2))\n\n\n@binary_ufunc(identity=None, reduce=reductions._reduce_min)\ndef minimum(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Return element-wise minimum of the input arrays.\n\n JAX implementation of :obj:`numpy.minimum`.\n\n Args:\n x: input array or scalar.\n y: input array or scalar. Both ``x`` and ``y`` should either have same shape\n or be broadcast compatible.\n\n Returns:\n An array containing the element-wise minimum of ``x`` and ``y``.\n\n Note:\n For each pair of elements, ``jnp.minimum`` returns:\n - smaller of the two if both elements are finite numbers.\n - ``nan`` if one element is ``nan``.\n\n See also:\n - :func:`jax.numpy.maximum`: Returns element-wise maximum of the input arrays.\n - :func:`jax.numpy.fmin`: Returns element-wise minimum of the input arrays,\n ignoring NaNs.\n - :func:`jax.numpy.amin`: Returns the minimum of array elements along a given\n axis.\n - :func:`jax.numpy.nanmin`: Returns the minimum of the array elements along\n a given axis, ignoring NaNs.\n\n Examples:\n Inputs with ``x.shape == y.shape``:\n\n >>> x = jnp.array([2, 3, 5, 1])\n >>> y = jnp.array([-3, 6, -4, 7])\n >>> jnp.minimum(x, y)\n Array([-3, 3, -4, 1], dtype=int32)\n\n Inputs having broadcast compatibility:\n\n >>> x1 = jnp.array([[1, 5, 2],\n ... [-3, 4, 7]])\n >>> y1 = jnp.array([-2, 3, 6])\n >>> jnp.minimum(x1, y1)\n Array([[-2, 3, 2],\n [-3, 3, 6]], dtype=int32)\n\n Inputs with ``nan``:\n\n >>> nan = jnp.nan\n >>> x2 = jnp.array([[2.5, nan, -2],\n ... [nan, 5, 6],\n ... [-4, 3, 7]])\n >>> y2 = jnp.array([1, nan, 5])\n >>> jnp.minimum(x2, y2)\n Array([[ 1., nan, -2.],\n [nan, nan, 5.],\n [-4., nan, 5.]], dtype=float32)\n """"""\n return lax.min(*promote_args(""minimum"", x, y))\n\n\n@binary_ufunc(identity=None, reduce=reductions._reduce_max)\ndef maximum(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Return element-wise maximum of the input arrays.\n\n JAX implementation of :obj:`numpy.maximum`.\n\n Args:\n x: input array or scalar.\n y: input array or scalar. Both ``x`` and ``y`` should either have same shape\n or be broadcast compatible.\n\n Returns:\n An array containing the element-wise maximum of ``x`` and ``y``.\n\n Note:\n For each pair of elements, ``jnp.maximum`` returns:\n - larger of the two if both elements are finite numbers.\n - ``nan`` if one element is ``nan``.\n\n See also:\n - :func:`jax.numpy.minimum`: Returns element-wise minimum of the input\n arrays.\n - :func:`jax.numpy.fmax`: Returns element-wise maximum of the input arrays,\n ignoring NaNs.\n - :func:`jax.numpy.amax`: Returns the maximum of array elements along a given\n axis.\n - :func:`jax.numpy.nanmax`: Returns the maximum of the array elements along\n a given axis, ignoring NaNs.\n\n Examples:\n Inputs with ``x.shape == y.shape``:\n\n >>> x = jnp.array([1, -5, 3, 2])\n >>> y = jnp.array([-2, 4, 7, -6])\n >>> jnp.maximum(x, y)\n Array([1, 4, 7, 2], dtype=int32)\n\n Inputs with broadcast compatibility:\n\n >>> x1 = jnp.array([[-2, 5, 7, 4],\n ... [1, -6, 3, 8]])\n >>> y1 = jnp.array([-5, 3, 6, 9])\n >>> jnp.maximum(x1, y1)\n Array([[-2, 5, 7, 9],\n [ 1, 3, 6, 9]], dtype=int32)\n\n Inputs having ``nan``:\n\n >>> nan = jnp.nan\n >>> x2 = jnp.array([nan, -3, 9])\n >>> y2 = jnp.array([[4, -2, nan],\n ... [-3, -5, 10]])\n >>> jnp.maximum(x2, y2)\n Array([[nan, -2., nan],\n [nan, -3., 10.]], dtype=float32)\n """"""\n return lax.max(*promote_args(""maximum"", x, y))\n\n\n@export\n@partial(jit, inline=True)\ndef float_power(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Calculate element-wise base ``x`` exponential of ``y``.\n\n JAX implementation of :obj:`numpy.float_power`.\n\n Args:\n x: scalar or array. Specifies the bases.\n y: scalar or array. Specifies the exponents. ``x`` and ``y`` should either\n have same shape or be broadcast compatible.\n\n Returns:\n An array containing the base ``x`` exponentials of ``y``, promoting to the\n inexact dtype.\n\n See also:\n - :func:`jax.numpy.exp`: Calculates element-wise exponential of the input.\n - :func:`jax.numpy.exp2`: Calculates base-2 exponential of each element of\n the input.\n\n Examples:\n Inputs with same shape:\n\n >>> x = jnp.array([3, 1, -5])\n >>> y = jnp.array([2, 4, -1])\n >>> jnp.float_power(x, y)\n Array([ 9. , 1. , -0.2], dtype=float32)\n\n Inputs with broadcast compatibility:\n\n >>> x1 = jnp.array([[2, -4, 1],\n ... [-1, 2, 3]])\n >>> y1 = jnp.array([-2, 1, 4])\n >>> jnp.float_power(x1, y1)\n Array([[ 0.25, -4. , 1. ],\n [ 1. , 2. , 81. ]], dtype=float32)\n\n ``jnp.float_power`` produces ``nan`` for negative values raised to a non-integer\n values.\n\n >>> jnp.float_power(-3, 1.7)\n Array(nan, dtype=float32, weak_type=True)\n """"""\n out = lax.pow(*promote_args_inexact(""float_power"", x, y))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef nextafter(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Return element-wise next floating point value after ``x`` towards ``y``.\n\n JAX implementation of :obj:`numpy.nextafter`.\n\n Args:\n x: scalar or array. Specifies the value after which the next number is found.\n y: scalar or array. Specifies the direction towards which the next number is\n found. ``x`` and ``y`` should either have same shape or be broadcast\n compatible.\n\n Returns:\n An array containing the next representable number of ``x`` in the direction\n of ``y``.\n\n Examples:\n >>> jnp.nextafter(2, 1) # doctest: +SKIP\n Array(1.9999999, dtype=float32, weak_type=True)\n >>> x = jnp.array([3, -2, 1])\n >>> y = jnp.array([2, -1, 2])\n >>> jnp.nextafter(x, y) # doctest: +SKIP\n Array([ 2.9999998, -1.9999999, 1.0000001], dtype=float32)\n """"""\n return lax.nextafter(*promote_args_inexact(""nextafter"", x, y))\n\n\n@export\n@partial(jit, inline=True)\ndef spacing(x: ArrayLike, /) -> Array:\n """"""Return the spacing between ``x`` and the next adjacent number.\n\n JAX implementation of :func:`numpy.spacing`.\n\n Args:\n x: real-valued array. Integer or boolean types will be cast to float.\n\n Returns:\n Array of same shape as ``x`` containing spacing between each entry of\n ``x`` and its closest adjacent value.\n\n See also:\n - :func:`jax.numpy.nextafter`: find the next representable value.\n\n Examples:\n >>> x = jnp.array([0.0, 0.25, 0.5, 0.75, 1.0], dtype='float32')\n >>> jnp.spacing(x)\n Array([1.4012985e-45, 2.9802322e-08, 5.9604645e-08, 5.9604645e-08,\n 1.1920929e-07], dtype=float32)\n\n For ``x = 1``, the spacing is equal to the ``eps`` value given by\n :class:`jax.numpy.finfo`:\n\n >>> x = jnp.float32(1)\n >>> jnp.spacing(x) == jnp.finfo(x.dtype).eps\n Array(True, dtype=bool)\n """"""\n arr, = promote_args_inexact(""spacing"", x)\n if dtypes.isdtype(arr.dtype, ""complex floating""):\n raise ValueError(""jnp.spacing is not defined for complex inputs."")\n inf = _lax_const(arr, np.inf)\n smallest_subnormal = dtypes.finfo(arr.dtype).smallest_subnormal\n\n # Numpy's behavior seems to depend on dtype\n if arr.dtype == 'float16':\n return lax.nextafter(arr, inf) - arr\n else:\n result = lax.nextafter(arr, copysign(inf, arr)) - arr\n return _where(result == 0, copysign(smallest_subnormal, arr), result)\n\n\n# Logical ops\n@binary_ufunc(identity=True, reduce=reductions._reduce_logical_and)\ndef logical_and(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Compute the logical AND operation elementwise.\n\n JAX implementation of :obj:`numpy.logical_and`. This is a universal function,\n and supports the additional APIs described at :class:`jax.numpy.ufunc`.\n\n Args:\n x, y: input arrays. Must be broadcastable to a common shape.\n\n Returns:\n Array containing the result of the element-wise logical AND.\n\n Examples:\n >>> x = jnp.arange(4)\n >>> jnp.logical_and(x, 1)\n Array([False, True, True, True], dtype=bool)\n """"""\n return lax.bitwise_and(*map(_to_bool, promote_args(""logical_and"", x, y)))\n\n\n@binary_ufunc(identity=False, reduce=reductions._reduce_logical_or)\ndef logical_or(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Compute the logical OR operation elementwise.\n\n JAX implementation of :obj:`numpy.logical_or`. This is a universal function,\n and supports the additional APIs described at :class:`jax.numpy.ufunc`.\n\n Args:\n x, y: input arrays. Must be broadcastable to a common shape.\n\n Returns:\n Array containing the result of the element-wise logical OR.\n\n Examples:\n >>> x = jnp.arange(4)\n >>> jnp.logical_or(x, 1)\n Array([ True, True, True, True], dtype=bool)\n """"""\n return lax.bitwise_or(*map(_to_bool, promote_args(""logical_or"", x, y)))\n\n\n@binary_ufunc(identity=False, reduce=reductions._reduce_logical_xor)\ndef logical_xor(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Compute the logical XOR operation elementwise.\n\n JAX implementation of :obj:`numpy.logical_xor`. This is a universal function,\n and supports the additional APIs described at :class:`jax.numpy.ufunc`.\n\n Args:\n x, y: input arrays. Must be broadcastable to a common shape.\n\n Returns:\n Array containing the result of the element-wise logical XOR.\n\n Examples:\n >>> x = jnp.arange(4)\n >>> jnp.logical_xor(x, 1)\n Array([ True, False, False, False], dtype=bool)\n """"""\n return lax.bitwise_xor(*map(_to_bool, promote_args(""logical_xor"", x, y)))\n\n\n@export\n@partial(jit, inline=True)\ndef logical_not(x: ArrayLike, /) -> Array:\n """"""Compute NOT bool(x) element-wise.\n\n JAX implementation of :func:`numpy.logical_not`.\n\n Args:\n x: input array of any dtype.\n\n Returns:\n A boolean array that computes NOT bool(x) element-wise\n\n See also:\n - :func:`jax.numpy.invert` or :func:`jax.numpy.bitwise_invert`: bitwise NOT operation\n\n Examples:\n Compute NOT x element-wise on a boolean array:\n\n >>> x = jnp.array([True, False, True])\n >>> jnp.logical_not(x)\n Array([False, True, False], dtype=bool)\n\n For boolean input, this is equivalent to :func:`~jax.numpy.invert`, which implements\n the unary ``~`` operator:\n\n >>> ~x\n Array([False, True, False], dtype=bool)\n\n For non-boolean input, the input of :func:`logical_not` is implicitly cast to boolean:\n\n >>> x = jnp.array([-1, 0, 1])\n >>> jnp.logical_not(x)\n Array([False, True, False], dtype=bool)\n """"""\n return lax.bitwise_not(*map(_to_bool, promote_args(""logical_not"", x)))\n\n# Comparison ops\ndef _complex_comparison(lax_op: Callable[[ArrayLike, ArrayLike], Array],\n x: Array, y: Array):\n if dtypes.issubdtype(x.dtype, np.complexfloating):\n return lax.select(lax.eq(x.real, y.real),\n lax_op(x.imag, y.imag),\n lax_op(x.real, y.real))\n return lax_op(x, y)\n\n\n@export\n@partial(jit, inline=True)\ndef greater_equal(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Return element-wise truth value of ``x >= y``.\n\n JAX implementation of :obj:`numpy.greater_equal`.\n\n Args:\n x: input array or scalar.\n y: input array or scalar. ``x`` and ``y`` must either have same shape or be\n broadcast compatible.\n\n Returns:\n An array containing boolean values. ``True`` if the elements of ``x >= y``,\n and ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.less_equal`: Returns element-wise truth value of ``x <= y``.\n - :func:`jax.numpy.greater`: Returns element-wise truth value of ``x > y``.\n - :func:`jax.numpy.less`: Returns element-wise truth value of ``x < y``.\n\n Examples:\n Scalar inputs:\n\n >>> jnp.greater_equal(4, 7)\n Array(False, dtype=bool, weak_type=True)\n\n Inputs with same shape:\n\n >>> x = jnp.array([2, 5, -1])\n >>> y = jnp.array([-6, 4, 3])\n >>> jnp.greater_equal(x, y)\n Array([ True, True, False], dtype=bool)\n\n Inputs with broadcast compatibility:\n\n >>> x1 = jnp.array([[3, -1, 4],\n ... [5, 9, -6]])\n >>> y1 = jnp.array([-1, 4, 2])\n >>> jnp.greater_equal(x1, y1)\n Array([[ True, False, True],\n [ True, True, False]], dtype=bool)\n """"""\n return _complex_comparison(lax.ge, *promote_args(""greater_equal"", x, y))\n\n\n@export\n@partial(jit, inline=True)\ndef greater(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Return element-wise truth value of ``x > y``.\n\n JAX implementation of :obj:`numpy.greater`.\n\n Args:\n x: input array or scalar.\n y: input array or scalar. ``x`` and ``y`` must either have same shape or be\n broadcast compatible.\n\n Returns:\n An array containing boolean values. ``True`` if the elements of ``x > y``,\n and ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.less`: Returns element-wise truth value of ``x < y``.\n - :func:`jax.numpy.greater_equal`: Returns element-wise truth value of\n ``x >= y``.\n - :func:`jax.numpy.less_equal`: Returns element-wise truth value of ``x <= y``.\n\n Examples:\n Scalar inputs:\n\n >>> jnp.greater(5, 2)\n Array(True, dtype=bool, weak_type=True)\n\n Inputs with same shape:\n\n >>> x = jnp.array([5, 9, -2])\n >>> y = jnp.array([4, -1, 6])\n >>> jnp.greater(x, y)\n Array([ True, True, False], dtype=bool)\n\n Inputs with broadcast compatibility:\n\n >>> x1 = jnp.array([[5, -6, 7],\n ... [-2, 5, 9]])\n >>> y1 = jnp.array([-4, 3, 10])\n >>> jnp.greater(x1, y1)\n Array([[ True, False, False],\n [ True, True, False]], dtype=bool)\n """"""\n return _complex_comparison(lax.gt, *promote_args(""greater"", x, y))\n\n\n@export\n@partial(jit, inline=True)\ndef less_equal(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Return element-wise truth value of ``x <= y``.\n\n JAX implementation of :obj:`numpy.less_equal`.\n\n Args:\n x: input array or scalar.\n y: input array or scalar. ``x`` and ``y`` must have either same shape or be\n broadcast compatible.\n\n Returns:\n An array containing the boolean values. ``True`` if the elements of ``x <= y``,\n and ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.greater_equal`: Returns element-wise truth value of\n ``x >= y``.\n - :func:`jax.numpy.greater`: Returns element-wise truth value of ``x > y``.\n - :func:`jax.numpy.less`: Returns element-wise truth value of ``x < y``.\n\n Examples:\n Scalar inputs:\n\n >>> jnp.less_equal(6, -2)\n Array(False, dtype=bool, weak_type=True)\n\n Inputs with same shape:\n\n >>> x = jnp.array([-4, 1, 7])\n >>> y = jnp.array([2, -3, 8])\n >>> jnp.less_equal(x, y)\n Array([ True, False, True], dtype=bool)\n\n Inputs with broadcast compatibility:\n\n >>> x1 = jnp.array([2, -5, 9])\n >>> y1 = jnp.array([[1, -6, 5],\n ... [-2, 4, -6]])\n >>> jnp.less_equal(x1, y1)\n Array([[False, False, False],\n [False, True, False]], dtype=bool)\n """"""\n return _complex_comparison(lax.le, *promote_args(""less_equal"", x, y))\n\n\n@export\n@partial(jit, inline=True)\ndef less(x: ArrayLike, y: ArrayLike, /) -> Array:\n """"""Return element-wise truth value of ``x < y``.\n\n JAX implementation of :obj:`numpy.less`.\n\n Args:\n x: input array or scalar.\n y: input array or scalar. ``x`` and ``y`` must either have same shape or be\n broadcast compatible.\n\n Returns:\n An array containing boolean values. ``True`` if the elements of ``x < y``,\n and ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.greater`: Returns element-wise truth value of ``x > y``.\n - :func:`jax.numpy.greater_equal`: Returns element-wise truth value of\n ``x >= y``.\n - :func:`jax.numpy.less_equal`: Returns element-wise truth value of ``x <= y``.\n\n Examples:\n Scalar inputs:\n\n >>> jnp.less(3, 7)\n Array(True, dtype=bool, weak_type=True)\n\n Inputs with same shape:\n\n >>> x = jnp.array([5, 9, -3])\n >>> y = jnp.array([1, 6, 4])\n >>> jnp.less(x, y)\n Array([False, False, True], dtype=bool)\n\n Inputs with broadcast compatibility:\n\n >>> x1 = jnp.array([[2, -4, 6, -8],\n ... [-1, 5, -3, 7]])\n >>> y1 = jnp.array([0, 3, -5, 9])\n >>> jnp.less(x1, y1)\n Array([[False, True, False, True],\n [ True, False, False, True]], dtype=bool)\n """"""\n return _complex_comparison(lax.lt, *promote_args(""less"", x, y))\n\n\n# Array API aliases\n@export\n@partial(jit, inline=True)\ndef acos(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.arccos`""""""\n return arccos(*promote_args('acos', x))\n\n\n@export\n@partial(jit, inline=True)\ndef acosh(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.arccosh`""""""\n return arccosh(*promote_args('acosh', x))\n\n\n@export\n@partial(jit, inline=True)\ndef asin(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.arcsin`""""""\n return arcsin(*promote_args('asin', x))\n\n\n@export\n@partial(jit, inline=True)\ndef asinh(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.arcsinh`""""""\n return arcsinh(*promote_args('asinh', x))\n\n\n@export\n@partial(jit, inline=True)\ndef atan(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.arctan`""""""\n return arctan(*promote_args('atan', x))\n\n\n@export\n@partial(jit, inline=True)\ndef atanh(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.arctanh`""""""\n return arctanh(*promote_args('atanh', x))\n\n\n@export\n@partial(jit, inline=True)\ndef atan2(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.arctan2`""""""\n return arctan2(*promote_args('atan2', x1, x2))\n\n\n@export\n@jit\ndef bitwise_count(x: ArrayLike, /) -> Array:\n r""""""Counts the number of 1 bits in the binary representation of the absolute value\n of each element of ``x``.\n\n JAX implementation of :obj:`numpy.bitwise_count`.\n\n Args:\n x: Input array, only accepts integer subtypes\n\n Returns:\n An array-like object containing the binary 1 bit counts of the absolute value of\n each element in ``x``, with the same shape as ``x`` of dtype uint8.\n\n Examples:\n >>> x1 = jnp.array([64, 32, 31, 20])\n >>> # 64 = 0b1000000, 32 = 0b100000, 31 = 0b11111, 20 = 0b10100\n >>> jnp.bitwise_count(x1)\n Array([1, 1, 5, 2], dtype=uint8)\n\n >>> x2 = jnp.array([-16, -7, 7])\n >>> # |-16| = 0b10000, |-7| = 0b111, 7 = 0b111\n >>> jnp.bitwise_count(x2)\n Array([1, 3, 3], dtype=uint8)\n\n >>> x3 = jnp.array([[2, -7],[-9, 7]])\n >>> # 2 = 0b10, |-7| = 0b111, |-9| = 0b1001, 7 = 0b111\n >>> jnp.bitwise_count(x3)\n Array([[1, 3],\n [2, 3]], dtype=uint8)\n """"""\n x, = promote_args_numeric(""bitwise_count"", x)\n # Following numpy we take the absolute value and return uint8.\n return lax.population_count(abs(x)).astype('uint8')\n\n\n@export\n@partial(jit, inline=True)\ndef right_shift(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n r""""""Right shift the bits of ``x1`` to the amount specified in ``x2``.\n\n JAX implementation of :obj:`numpy.right_shift`.\n\n Args:\n x1: Input array, only accepts unsigned integer subtypes\n x2: The amount of bits to shift each element in ``x1`` to the right, only accepts\n integer subtypes\n\n Returns:\n An array-like object containing the right shifted elements of ``x1`` by the\n amount specified in ``x2``, with the same shape as the broadcasted shape of\n ``x1`` and ``x2``.\n\n Note:\n If ``x1.shape != x2.shape``, they must be compatible for broadcasting to a\n shared shape, this shared shape will also be the shape of the output. Right shifting\n a scalar x1 by scalar x2 is equivalent to ``x1 // 2**x2``.\n\n Examples:\n >>> def print_binary(x):\n ... return [bin(int(val)) for val in x]\n\n >>> x1 = jnp.array([1, 2, 4, 8])\n >>> print_binary(x1)\n ['0b1', '0b10', '0b100', '0b1000']\n >>> x2 = 1\n >>> result = jnp.right_shift(x1, x2)\n >>> result\n Array([0, 1, 2, 4], dtype=int32)\n >>> print_binary(result)\n ['0b0', '0b1', '0b10', '0b100']\n\n >>> x1 = 16\n >>> print_binary([x1])\n ['0b10000']\n >>> x2 = jnp.array([1, 2, 3, 4])\n >>> result = jnp.right_shift(x1, x2)\n >>> result\n Array([8, 4, 2, 1], dtype=int32)\n >>> print_binary(result)\n ['0b1000', '0b100', '0b10', '0b1']\n """"""\n x1, x2 = promote_args_numeric(np.right_shift.__name__, x1, x2)\n lax_fn = lax.shift_right_logical if \\n np.issubdtype(x1.dtype, np.unsignedinteger) else lax.shift_right_arithmetic\n return lax_fn(x1, x2)\n\n\n@export\n@partial(jit, inline=True)\ndef bitwise_right_shift(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.right_shift`.""""""\n return right_shift(x1, x2)\n\n\n@export\n@partial(jit, inline=True)\ndef absolute(x: ArrayLike, /) -> Array:\n r""""""Calculate the absolute value element-wise.\n\n JAX implementation of :obj:`numpy.absolute`.\n\n This is the same function as :func:`jax.numpy.abs`.\n\n Args:\n x: Input array\n\n Returns:\n An array-like object containing the absolute value of each element in ``x``,\n with the same shape as ``x``. For complex valued input, :math:`a + ib`,\n the absolute value is :math:`\sqrt{a^2+b^2}`.\n\n Examples:\n >>> x1 = jnp.array([5, -2, 0, 12])\n >>> jnp.absolute(x1)\n Array([ 5, 2, 0, 12], dtype=int32)\n\n >>> x2 = jnp.array([[ 8, -3, 1],[ 0, 9, -6]])\n >>> jnp.absolute(x2)\n Array([[8, 3, 1],\n [0, 9, 6]], dtype=int32)\n\n >>> x3 = jnp.array([8 + 15j, 3 - 4j, -5 + 0j])\n >>> jnp.absolute(x3)\n Array([17., 5., 5.], dtype=float32)\n """"""\n x = ensure_arraylike('absolute', x)\n dt = dtypes.dtype(x)\n return lax.asarray(x) if dt == np.bool_ or dtypes.issubdtype(dt, np.unsignedinteger) else lax.abs(x)\n\n\n@export\n@partial(jit, inline=True)\ndef abs(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.absolute`.""""""\n return absolute(x)\n\n\n@export\n@jit\ndef rint(x: ArrayLike, /) -> Array:\n """"""Rounds the elements of x to the nearest integer\n\n JAX implementation of :obj:`numpy.rint`.\n\n Args:\n x: Input array\n\n Returns:\n An array-like object containing the rounded elements of ``x``. Always promotes\n to inexact.\n\n Note:\n If an element of x is exactly half way, e.g. ``0.5`` or ``1.5``, rint will round\n to the nearest even integer.\n\n Examples:\n >>> x1 = jnp.array([5, 4, 7])\n >>> jnp.rint(x1)\n Array([5., 4., 7.], dtype=float32)\n\n >>> x2 = jnp.array([-2.5, -1.5, -0.5, 0.5, 1.5, 2.5, 3.5, 4.5])\n >>> jnp.rint(x2)\n Array([-2., -2., -0., 0., 2., 2., 4., 4.], dtype=float32)\n\n >>> x3 = jnp.array([-2.5+3.5j, 4.5-0.5j])\n >>> jnp.rint(x3)\n Array([-2.+4.j, 4.-0.j], dtype=complex64)\n """"""\n x = ensure_arraylike('rint', x)\n dtype = dtypes.dtype(x)\n if dtype == bool or dtypes.issubdtype(dtype, np.integer):\n return lax.convert_element_type(x, dtypes.float_)\n if dtypes.issubdtype(dtype, np.complexfloating):\n return lax.complex(rint(lax.real(x)), rint(lax.imag(x)))\n return lax.round(x, lax.RoundingMethod.TO_NEAREST_EVEN)\n\n\n@export\n@jit\ndef copysign(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Copies the sign of each element in ``x2`` to the corresponding element in ``x1``.\n\n JAX implementation of :obj:`numpy.copysign`.\n\n Args:\n x1: Input array\n x2: The array whose elements will be used to determine the sign, must be\n broadcast-compatible with ``x1``\n\n Returns:\n An array object containing the potentially changed elements of ``x1``, always promotes\n to inexact dtype, and has a shape of ``jnp.broadcast_shapes(x1.shape, x2.shape)``\n\n Examples:\n >>> x1 = jnp.array([5, 2, 0])\n >>> x2 = -1\n >>> jnp.copysign(x1, x2)\n Array([-5., -2., -0.], dtype=float32)\n\n >>> x1 = jnp.array([6, 8, 0])\n >>> x2 = 2\n >>> jnp.copysign(x1, x2)\n Array([6., 8., 0.], dtype=float32)\n\n >>> x1 = jnp.array([2, -3])\n >>> x2 = jnp.array([[1],[-4], [5]])\n >>> jnp.copysign(x1, x2)\n Array([[ 2., 3.],\n [-2., -3.],\n [ 2., 3.]], dtype=float32)\n """"""\n x1, x2 = promote_args_inexact(""copysign"", x1, x2)\n if dtypes.issubdtype(dtypes.dtype(x1), np.complexfloating):\n raise TypeError(""copysign does not support complex-valued inputs"")\n return _where(signbit(x2).astype(bool), -lax.abs(x1), lax.abs(x1))\n\n\n@export\n@partial(jit, inline=True)\ndef true_divide(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Calculates the division of x1 by x2 element-wise\n\n JAX implementation of :func:`numpy.true_divide`.\n\n Args:\n x1: Input array, the dividend\n x2: Input array, the divisor\n\n Returns:\n An array containing the elementwise quotients, will always use\n floating point division.\n\n Examples:\n >>> x1 = jnp.array([3, 4, 5])\n >>> x2 = 2\n >>> jnp.true_divide(x1, x2)\n Array([1.5, 2. , 2.5], dtype=float32)\n\n >>> x1 = 24\n >>> x2 = jnp.array([3, 4, 6j])\n >>> jnp.true_divide(x1, x2)\n Array([8.+0.j, 6.+0.j, 0.-4.j], dtype=complex64)\n\n >>> x1 = jnp.array([1j, 9+5j, -4+2j])\n >>> x2 = 3j\n >>> jnp.true_divide(x1, x2)\n Array([0.33333334+0.j , 1.6666666 -3.j ,\n 0.6666667 +1.3333334j], dtype=complex64)\n\n See Also:\n :func:`jax.numpy.floor_divide` for integer division\n """"""\n x1, x2 = promote_args_inexact(""true_divide"", x1, x2)\n jnp_error._set_error_if_divide_by_zero(x2)\n out = lax.div(x1, x2)\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\ndef divide(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.true_divide`.""""""\n return true_divide(x1, x2)\n\n\n@export\n@jit\ndef floor_divide(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Calculates the floor division of x1 by x2 element-wise\n\n JAX implementation of :obj:`numpy.floor_divide`.\n\n Args:\n x1: Input array, the dividend\n x2: Input array, the divisor\n\n Returns:\n An array-like object containing each of the quotients rounded down\n to the nearest integer towards negative infinity. This is equivalent\n to ``x1 // x2`` in Python.\n\n Note:\n ``x1 // x2`` is equivalent to ``jnp.floor_divide(x1, x2)`` for arrays ``x1``\n and ``x2``\n\n See Also:\n :func:`jax.numpy.divide` and :func:`jax.numpy.true_divide` for floating point\n division.\n\n Examples:\n >>> x1 = jnp.array([10, 20, 30])\n >>> x2 = jnp.array([3, 4, 7])\n >>> jnp.floor_divide(x1, x2)\n Array([3, 5, 4], dtype=int32)\n\n >>> x1 = jnp.array([-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5])\n >>> x2 = 3\n >>> jnp.floor_divide(x1, x2)\n Array([-2, -2, -1, -1, -1, 0, 0, 0, 1, 1, 1], dtype=int32)\n\n >>> x1 = jnp.array([6, 6, 6], dtype=jnp.int32)\n >>> x2 = jnp.array([2.0, 2.5, 3.0], dtype=jnp.float32)\n >>> jnp.floor_divide(x1, x2)\n Array([3., 2., 2.], dtype=float32)\n """"""\n x1, x2 = promote_args_numeric(""floor_divide"", x1, x2)\n jnp_error._set_error_if_divide_by_zero(x2)\n dtype = dtypes.dtype(x1)\n if dtypes.issubdtype(dtype, np.unsignedinteger):\n return lax.div(x1, x2)\n elif dtypes.issubdtype(dtype, np.integer):\n quotient = lax.div(x1, x2)\n select = logical_and(lax.sign(x1) != lax.sign(x2), lax.rem(x1, x2) != 0)\n # TODO(mattjj): investigate why subtracting a scalar was causing promotion\n return _where(select, quotient - 1, quotient)\n elif dtypes.issubdtype(dtype, np.complexfloating):\n raise TypeError(""floor_divide does not support complex-valued inputs"")\n else:\n return _float_divmod(x1, x2)[0]\n\n\n@export\n@jit\ndef divmod(x1: ArrayLike, x2: ArrayLike, /) -> tuple[Array, Array]:\n """"""Calculates the integer quotient and remainder of x1 by x2 element-wise\n\n JAX implementation of :obj:`numpy.divmod`.\n\n Args:\n x1: Input array, the dividend\n x2: Input array, the divisor\n\n Returns:\n A tuple of arrays ``(x1 // x2, x1 % x2)``.\n\n See Also:\n - :func:`jax.numpy.floor_divide`: floor division function\n - :func:`jax.numpy.remainder`: remainder function\n\n Examples:\n >>> x1 = jnp.array([10, 20, 30])\n >>> x2 = jnp.array([3, 4, 7])\n >>> jnp.divmod(x1, x2)\n (Array([3, 5, 4], dtype=int32), Array([1, 0, 2], dtype=int32))\n\n >>> x1 = jnp.array([-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5])\n >>> x2 = 3\n >>> jnp.divmod(x1, x2)\n (Array([-2, -2, -1, -1, -1, 0, 0, 0, 1, 1, 1], dtype=int32),\n Array([1, 2, 0, 1, 2, 0, 1, 2, 0, 1, 2], dtype=int32))\n\n >>> x1 = jnp.array([6, 6, 6], dtype=jnp.int32)\n >>> x2 = jnp.array([1.9, 2.5, 3.1], dtype=jnp.float32)\n >>> jnp.divmod(x1, x2)\n (Array([3., 2., 1.], dtype=float32),\n Array([0.30000007, 1. , 2.9 ], dtype=float32))\n """"""\n x1, x2 = promote_args_numeric(""divmod"", x1, x2)\n if dtypes.issubdtype(dtypes.dtype(x1), np.integer):\n return floor_divide(x1, x2), remainder(x1, x2)\n else:\n jnp_error._set_error_if_divide_by_zero(x2)\n return _float_divmod(x1, x2)\n\n\ndef _float_divmod(x1: ArrayLike, x2: ArrayLike) -> tuple[Array, Array]:\n # see float_divmod in floatobject.c of CPython\n mod = lax.rem(x1, x2)\n div = lax.div(lax.sub(x1, mod), x2)\n\n ind = lax.bitwise_and(mod != 0, lax.sign(x2) != lax.sign(mod))\n mod = lax.select(ind, mod + x2, mod)\n div = lax.select(ind, div - _constant_like(div, 1), div)\n\n return lax.round(div), mod\n\n\n@export\ndef power(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Calculate element-wise base ``x1`` exponential of ``x2``.\n\n JAX implementation of :obj:`numpy.power`.\n\n Args:\n x1: scalar or array. Specifies the bases.\n x2: scalar or array. Specifies the exponent. ``x1`` and ``x2`` should either\n have same shape or be broadcast compatible.\n\n Returns:\n An array containing the base ``x1`` exponentials of ``x2`` with same dtype\n as input.\n\n Note:\n - When ``x2`` is a concrete integer scalar, ``jnp.power`` lowers to\n :func:`jax.lax.integer_pow`.\n - When ``x2`` is a traced scalar or an array, ``jnp.power`` lowers to\n :func:`jax.lax.pow`.\n - ``jnp.power`` raises a ``TypeError`` for integer type raised to a concrete\n negative integer power. For a non-concrete power, the operation is invalid\n and the returned value is implementation-defined.\n - ``jnp.power`` returns ``nan`` for negative value raised to the power of\n non-integer values.\n\n See also:\n - :func:`jax.lax.pow`: Computes element-wise power, :math:`x^y`.\n - :func:`jax.lax.integer_pow`: Computes element-wise power :math:`x^y`, where\n :math:`y` is a fixed integer.\n - :func:`jax.numpy.float_power`: Computes the first array raised to the power\n of second array, element-wise, by promoting to the inexact dtype.\n - :func:`jax.numpy.pow`: Computes the first array raised to the power of second\n array, element-wise.\n\n Examples:\n Inputs with scalar integers:\n\n >>> jnp.power(4, 3)\n Array(64, dtype=int32, weak_type=True)\n\n Inputs with same shape:\n\n >>> x1 = jnp.array([2, 4, 5])\n >>> x2 = jnp.array([3, 0.5, 2])\n >>> jnp.power(x1, x2)\n Array([ 8., 2., 25.], dtype=float32)\n\n Inputs with broadcast compatibility:\n\n >>> x3 = jnp.array([-2, 3, 1])\n >>> x4 = jnp.array([[4, 1, 6],\n ... [1.3, 3, 5]])\n >>> jnp.power(x3, x4)\n Array([[16., 3., 1.],\n [nan, 27., 1.]], dtype=float32)\n """"""\n check_arraylike(""power"", x1, x2)\n\n # Must do __jax_array__ conversion prior to dtype check.\n x1 = x1.__jax_array__() if hasattr(x1, ""__jax_array__"") else x1\n x2 = x2.__jax_array__() if hasattr(x2, ""__jax_array__"") else x2\n\n check_no_float0s(""power"", x1, x2)\n\n # We apply special cases, both for algorithmic and autodiff reasons:\n # 1. for *concrete* integer scalar powers (and arbitrary bases), we use\n # unrolled binary exponentiation specialized on the exponent, which is\n # more precise for e.g. x ** 2 when x is a float (algorithmic reason!);\n # 2. for integer bases and integer powers, use unrolled binary exponentiation\n # where the number of steps is determined by a max bit width of 64\n # (algorithmic reason!);\n # 3. for integer powers and float/complex bases, we apply the lax primitive\n # without any promotion of input types because in this case we want the\n # function to be differentiable wrt its first argument at 0;\n # 3. for other cases, perform jnp dtype promotion on the arguments then apply\n # lax.pow.\n\n # Case 1: concrete integer scalar powers:\n if core.is_concrete(x2):\n try:\n x2 = operator.index(x2) # type: ignore[arg-type]\n except TypeError:\n pass\n else:\n x1, = promote_dtypes_numeric(x1)\n return lax.integer_pow(x1, x2)\n\n # Handle cases #2 and #3 under a jit:\n out = _power(x1, x2)\n jnp_error._set_error_if_nan(out)\n return out\n\n@export\ndef pow(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.power`""""""\n return power(x1, x2)\n\n@partial(jit, inline=True)\ndef _power(x1: ArrayLike, x2: ArrayLike) -> Array:\n x1, x2 = promote_shapes(""power"", x1, x2) # not dtypes\n\n # Case 2: bool/integer result\n x1_, x2_ = promote_args_numeric(""power"", x1, x2)\n if (dtypes.issubdtype(dtypes.dtype(x1_), np.integer) or\n dtypes.issubdtype(dtypes.dtype(x1_), np.bool_)):\n assert np.iinfo(dtypes.dtype(x1_)).bits <= 64 # _pow_int_int assumes <=64bit\n return _pow_int_int(x1_, x2_)\n\n # Case 3: float/complex base with integer power (special autodiff behavior)\n d1, d2 = dtypes.dtype(x1), dtypes.dtype(x2)\n if dtypes.issubdtype(d1, np.inexact) and dtypes.issubdtype(d2, np.integer):\n return lax.pow(x1, x2)\n\n\n # Case 4: do promotion first\n return lax.pow(x1_, x2_)\n\n# TODO(phawkins): add integer pow support to XLA.\ndef _pow_int_int(x1, x2):\n # Integer power => use binary exponentiation.\n bits = 6 # Anything more would overflow for any x1 > 1\n zero = _constant_like(x2, 0)\n one = _constant_like(x2, 1)\n # Initialize acc carefully such that pow(0, x2) is zero for x2 != 0\n acc = _where(lax.bitwise_and(lax.eq(x1, zero), lax.ne(x2, zero)), zero, one)\n for _ in range(bits):\n acc = _where(lax.bitwise_and(x2, one), lax.mul(acc, x1), acc)\n x1 = lax.mul(x1, x1)\n x2 = lax.shift_right_logical(x2, one)\n return acc\n\n\n@binary_ufunc(identity=-np.inf, reduce=reductions._logsumexp)\ndef logaddexp(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Compute ``log(exp(x1) + exp(x2))`` avoiding overflow.\n\n JAX implementation of :obj:`numpy.logaddexp`\n\n Args:\n x1: input array\n x2: input array\n\n Returns:\n array containing the result.\n\n Examples:\n\n >>> x1 = jnp.array([1, 2, 3])\n >>> x2 = jnp.array([4, 5, 6])\n >>> result1 = jnp.logaddexp(x1, x2)\n >>> result2 = jnp.log(jnp.exp(x1) + jnp.exp(x2))\n >>> print(jnp.allclose(result1, result2))\n True\n """"""\n x1, x2 = promote_args_inexact(""logaddexp"", x1, x2)\n return lax_other.logaddexp(x1, x2)\n\n\n@binary_ufunc(identity=-np.inf, reduce=reductions._logsumexp2)\ndef logaddexp2(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Logarithm of the sum of exponentials of inputs in base-2 avoiding overflow.\n\n JAX implementation of :obj:`numpy.logaddexp2`.\n\n Args:\n x1: input array or scalar.\n x2: input array or scalar. ``x1`` and ``x2`` should either have same shape or\n be broadcast compatible.\n\n Returns:\n An array containing the result, :math:`log_2(2^{x1}+2^{x2})`, element-wise.\n\n See also:\n - :func:`jax.numpy.logaddexp`: Computes ``log(exp(x1) + exp(x2))``, element-wise.\n - :func:`jax.numpy.log2`: Calculates the base-2 logarithm of ``x`` element-wise.\n\n Examples:\n >>> x1 = jnp.array([[3, -1, 4],\n ... [8, 5, -2]])\n >>> x2 = jnp.array([2, 3, -5])\n >>> result1 = jnp.logaddexp2(x1, x2)\n >>> result2 = jnp.log2(jnp.exp2(x1) + jnp.exp2(x2))\n >>> jnp.allclose(result1, result2)\n Array(True, dtype=bool)\n """"""\n x1, x2 = promote_args_inexact(""logaddexp2"", x1, x2)\n return lax_other.logaddexp2(x1, x2)\n\n\n@export\n@partial(jit, inline=True)\ndef log2(x: ArrayLike, /) -> Array:\n """"""Calculates the base-2 logarithm of ``x`` element-wise.\n\n JAX implementation of :obj:`numpy.log2`.\n\n Args:\n x: Input array\n\n Returns:\n An array containing the base-2 logarithm of each element in ``x``, promotes\n to inexact dtype.\n\n Examples:\n >>> x1 = jnp.array([0.25, 0.5, 1, 2, 4, 8])\n >>> jnp.log2(x1)\n Array([-2., -1., 0., 1., 2., 3.], dtype=float32)\n """"""\n x, = promote_args_inexact(""log2"", x)\n if dtypes.issubdtype(x.dtype, np.complexfloating):\n r = lax.log(x)\n re = lax.real(r)\n im = lax.imag(r)\n ln2 = lax.log(_constant_like(re, 2))\n return lax.complex(lax.div(re, ln2), lax.div(im, ln2))\n out = lax.div(lax.log(x), lax.log(_constant_like(x, 2)))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef log10(x: ArrayLike, /) -> Array:\n """"""Calculates the base-10 logarithm of x element-wise\n\n JAX implementation of :obj:`numpy.log10`.\n\n Args:\n x: Input array\n\n Returns:\n An array containing the base-10 logarithm of each element in ``x``, promotes\n to inexact dtype.\n\n Examples:\n >>> x1 = jnp.array([0.01, 0.1, 1, 10, 100, 1000])\n >>> with jnp.printoptions(precision=2, suppress=True):\n ... print(jnp.log10(x1))\n [-2. -1. 0. 1. 2. 3.]\n """"""\n x, = promote_args_inexact(""log10"", x)\n if dtypes.issubdtype(x.dtype, np.complexfloating):\n r = lax.log(x)\n re = lax.real(r)\n im = lax.imag(r)\n ln10 = lax.log(_constant_like(re, 10))\n return lax.complex(lax.div(re, ln10), lax.div(im, ln10))\n out = lax.div(lax.log(x), lax.log(_constant_like(x, 10)))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef exp2(x: ArrayLike, /) -> Array:\n """"""Calculate element-wise base-2 exponential of input.\n\n JAX implementation of :obj:`numpy.exp2`.\n\n Args:\n x: input array or scalar\n\n Returns:\n An array containing the base-2 exponential of each element in ``x``, promotes\n to inexact dtype.\n\n See also:\n - :func:`jax.numpy.log2`: Calculates base-2 logarithm of each element of input.\n - :func:`jax.numpy.exp`: Calculates exponential of each element of the input.\n - :func:`jax.numpy.expm1`: Calculates :math:`e^x-1` of each element of the\n input.\n\n Examples:\n ``jnp.exp2`` follows the properties of the exponential such as :math:`2^{a+b}\n = 2^a * 2^b`.\n\n >>> x1 = jnp.array([2, -4, 3, -1])\n >>> x2 = jnp.array([-1, 3, -2, 3])\n >>> jnp.exp2(x1+x2)\n Array([2. , 0.5, 2. , 4. ], dtype=float32)\n >>> jnp.exp2(x1)*jnp.exp2(x2)\n Array([2. , 0.5, 2. , 4. ], dtype=float32)\n """"""\n x, = promote_args_inexact(""exp2"", x)\n return lax.exp2(x)\n\n\n@export\n@jit\ndef signbit(x: ArrayLike, /) -> Array:\n """"""Return the sign bit of array elements.\n\n JAX implementation of :obj:`numpy.signbit`.\n\n Args:\n x: input array. Complex values are not supported.\n\n Returns:\n A boolean array of the same shape as ``x``, containing ``True``\n where the sign of ``x`` is negative, and ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.sign`: return the mathematical sign of array elements,\n i.e. ``-1``, ``0``, or ``+1``.\n\n Examples:\n :func:`signbit` on boolean values is always ``False``:\n\n >>> x = jnp.array([True, False])\n >>> jnp.signbit(x)\n Array([False, False], dtype=bool)\n\n :func:`signbit` on integer values is equivalent to ``x < 0``:\n\n >>> x = jnp.array([-2, -1, 0, 1, 2])\n >>> jnp.signbit(x)\n Array([ True, True, False, False, False], dtype=bool)\n\n :func:`signbit` on floating point values returns the value of the actual\n sign bit from the float representation, including signed zero:\n\n >>> x = jnp.array([-1.5, -0.0, 0.0, 1.5])\n >>> jnp.signbit(x)\n Array([ True, True, False, False], dtype=bool)\n\n This also returns the sign bit for special values such as signed NaN\n and signed infinity:\n\n >>> x = jnp.array([jnp.nan, -jnp.nan, jnp.inf, -jnp.inf])\n >>> jnp.signbit(x)\n Array([False, True, False, True], dtype=bool)\n """"""\n x, = promote_args(""signbit"", x)\n dtype = dtypes.dtype(x)\n if dtypes.issubdtype(dtype, np.integer):\n return lax.lt(x, _constant_like(x, 0))\n elif dtypes.issubdtype(dtype, np.bool_):\n return lax.full_like(x, False, dtype=np.bool_)\n elif not dtypes.issubdtype(dtype, np.floating):\n raise ValueError(\n ""jax.numpy.signbit is not well defined for %s"" % dtype)\n\n info = dtypes.finfo(dtype)\n if info.bits not in _INT_DTYPES:\n raise NotImplementedError(\n ""jax.numpy.signbit only supports 16, 32, and 64-bit types."")\n int_type = _INT_DTYPES[info.bits]\n x = lax.bitcast_convert_type(x, int_type)\n return lax.convert_element_type(x >> (info.nexp + info.nmant), np.bool_)\n\n\ndef _normalize_float(x):\n info = dtypes.finfo(dtypes.dtype(x))\n int_type = _INT_DTYPES[info.bits]\n cond = lax.abs(x) < info.tiny\n x1 = _where(cond, x * _lax_const(x, 1 << info.nmant), x)\n x2 = _where(cond, int_type(-info.nmant), int_type(0))\n return lax.bitcast_convert_type(x1, int_type), x2\n\n\n@export\n@jit\ndef ldexp(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Compute x1 * 2 ** x2\n\n JAX implementation of :func:`numpy.ldexp`.\n\n Note that XLA does not provide an ``ldexp`` operation, so this\n is implemneted in JAX via a standard multiplication and\n exponentiation.\n\n Args:\n x1: real-valued input array.\n x2: integer input array. Must be broadcast-compatible with ``x1``.\n\n Returns:\n ``x1 * 2 ** x2`` computed element-wise.\n\n See also:\n - :func:`jax.numpy.frexp`: decompose values into mantissa and exponent.\n\n Examples:\n >>> x1 = jnp.arange(5.0)\n >>> x2 = 10\n >>> jnp.ldexp(x1, x2)\n Array([ 0., 1024., 2048., 3072., 4096.], dtype=float32)\n\n ``ldexp`` can be used to reconstruct the input to ``frexp``:\n\n >>> x = jnp.array([2., 3., 5., 11.])\n >>> m, e = jnp.frexp(x)\n >>> m\n Array([0.5 , 0.75 , 0.625 , 0.6875], dtype=float32)\n >>> e\n Array([2, 2, 3, 4], dtype=int32)\n >>> jnp.ldexp(m, e)\n Array([ 2., 3., 5., 11.], dtype=float32)\n """"""\n x1, x2 = ensure_arraylike(""ldexp"", x1, x2)\n x1_dtype = dtypes.dtype(x1)\n x2_dtype = dtypes.dtype(x2)\n if (dtypes.issubdtype(x1_dtype, np.complexfloating)\n or dtypes.issubdtype(x2_dtype, np.inexact)):\n raise ValueError(f""ldexp not supported for input types {(x1_dtype, x2_dtype)}"")\n x1, = promote_args_inexact(""ldexp"", x1)\n x2 = lax.convert_element_type(x2, dtypes.dtype(x1))\n\n # Split off the exponent to avoid overflow for small x1 and large x2.\n m, e = frexp(x1)\n e = (e.astype(x2.dtype) + x2).astype(x1.dtype)\n\n # exponent may overflow by 1 and still have a finite result.\n m = _where(e > 0, m * 2, m)\n e = _where(e > 0, e - 1, e)\n\n x = m * (2 ** e.astype(m.dtype))\n return _where(isinf(x1) | (x1 == 0), x1, x)\n\n\n@export\n@jit\ndef frexp(x: ArrayLike, /) -> tuple[Array, Array]:\n """"""Split floating point values into mantissa and twos exponent.\n\n JAX implementation of :func:`numpy.frexp`.\n\n Args:\n x: real-valued array\n\n Returns:\n A tuple ``(mantissa, exponent)`` where ``mantissa`` is a floating point\n value between -1 and 1, and ``exponent`` is an integer such that\n ``x == mantissa * 2 ** exponent``.\n\n See also:\n - :func:`jax.numpy.ldexp`: compute the inverse of ``frexp``.\n\n Examples:\n Split values into mantissa and exponent:\n\n >>> x = jnp.array([1., 2., 3., 4., 5.])\n >>> m, e = jnp.frexp(x)\n >>> m\n Array([0.5 , 0.5 , 0.75 , 0.5 , 0.625], dtype=float32)\n >>> e\n Array([1, 2, 2, 3, 3], dtype=int32)\n\n Reconstruct the original array:\n\n >>> m * 2 ** e\n Array([1., 2., 3., 4., 5.], dtype=float32)\n """"""\n x = ensure_arraylike(""frexp"", x)\n x, = promote_dtypes_inexact(x)\n if dtypes.issubdtype(x.dtype, np.complexfloating):\n raise TypeError(""frexp does not support complex-valued inputs"")\n return _frexp(x)\n\n@custom_jvp\ndef _frexp(x):\n dtype = dtypes.dtype(x)\n info = dtypes.finfo(dtype)\n mask = (1 << info.nexp) - 1\n bias = 1 - info.minexp\n\n x1, x2 = _normalize_float(x)\n x2 += ((x1 >> info.nmant) & mask) - bias + 1\n x1 &= ~(mask << info.nmant)\n x1 |= (bias - 1) << info.nmant\n x1 = lax.bitcast_convert_type(x1, dtype)\n\n cond = isinf(x) | isnan(x) | (x == 0)\n x2 = _where(cond, lax._zeros(x2), x2)\n return _where(cond, x, x1), lax.convert_element_type(x2, np.int32)\n\n\n@_frexp.defjvp\ndef _frexp_jvp(primals, tangents):\n x, = primals\n t, = tangents\n m, e = frexp(x)\n mdot = t * exp2(-e.astype(t.dtype))\n edot = np.empty(e.shape, dtypes.float0)\n return (m, e), (mdot, edot)\n\n\n@export\n@jit\ndef remainder(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Returns element-wise remainder of the division.\n\n JAX implementation of :obj:`numpy.remainder`.\n\n Args:\n x1: scalar or array. Specifies the dividend.\n x2: scalar or array. Specifies the divisor. ``x1`` and ``x2`` should either\n have same shape or be broadcast compatible.\n\n Returns:\n An array containing the remainder of element-wise division of ``x1`` by\n ``x2`` with same sign as the elements of ``x2``.\n\n Note:\n The result of ``jnp.remainder`` is equivalent to ``x1 - x2 * jnp.floor(x1 / x2)``.\n\n See also:\n - :func:`jax.numpy.mod`: Returns the element-wise remainder of the division.\n - :func:`jax.numpy.fmod`: Calculates the element-wise floating-point modulo\n operation.\n - :func:`jax.numpy.divmod`: Calculates the integer quotient and remainder of\n ``x1`` by ``x2``, element-wise.\n\n Examples:\n >>> x1 = jnp.array([[3, -1, 4],\n ... [8, 5, -2]])\n >>> x2 = jnp.array([2, 3, -5])\n >>> jnp.remainder(x1, x2)\n Array([[ 1, 2, -1],\n [ 0, 2, -2]], dtype=int32)\n >>> x1 - x2 * jnp.floor(x1 / x2)\n Array([[ 1., 2., -1.],\n [ 0., 2., -2.]], dtype=float32)\n """"""\n x1, x2 = promote_args_numeric(""remainder"", x1, x2)\n jnp_error._set_error_if_divide_by_zero(x2)\n zero = _constant_like(x1, 0)\n if dtypes.issubdtype(x2.dtype, np.integer):\n x2 = _where(x2 == 0, lax._ones(x2), x2)\n trunc_mod = lax.rem(x1, x2)\n trunc_mod_not_zero = lax.ne(trunc_mod, zero)\n do_plus = lax.bitwise_and(\n lax.ne(lax.lt(trunc_mod, zero), lax.lt(x2, zero)), trunc_mod_not_zero)\n out = lax.select(do_plus, lax.add(trunc_mod, x2), trunc_mod)\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\ndef mod(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.remainder`""""""\n return remainder(x1, x2)\n\n\n@export\n@jit\ndef fmod(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n """"""Calculate element-wise floating-point modulo operation.\n\n JAX implementation of :obj:`numpy.fmod`.\n\n Args:\n x1: scalar or array. Specifies the dividend.\n x2: scalar or array. Specifies the divisor. ``x1`` and ``x2`` should either\n have same shape or be broadcast compatible.\n\n Returns:\n An array containing the result of the element-wise floating-point modulo\n operation of ``x1`` and ``x2`` with same sign as the elements of ``x1``.\n\n Note:\n The result of ``jnp.fmod`` is equivalent to ``x1 - x2 * jnp.fix(x1 / x2)``.\n\n See also:\n - :func:`jax.numpy.mod` and :func:`jax.numpy.remainder`: Returns the element-wise\n remainder of the division.\n - :func:`jax.numpy.divmod`: Calculates the integer quotient and remainder of\n ``x1`` by ``x2``, element-wise.\n\n Examples:\n >>> x1 = jnp.array([[3, -1, 4],\n ... [8, 5, -2]])\n >>> x2 = jnp.array([2, 3, -5])\n >>> jnp.fmod(x1, x2)\n Array([[ 1, -1, 4],\n [ 0, 2, -2]], dtype=int32)\n >>> x1 - x2 * jnp.fix(x1 / x2)\n Array([[ 1., -1., 4.],\n [ 0., 2., -2.]], dtype=float32)\n """"""\n x1, x2 = ensure_arraylike(""fmod"", x1, x2)\n if dtypes.issubdtype(dtypes.result_type(x1, x2), np.integer):\n x2 = _where(x2 == 0, lax._ones(x2), x2)\n out = lax.rem(*promote_args_numeric(""fmod"", x1, x2))\n jnp_error._set_error_if_nan(out)\n return out\n\n\n@export\n@partial(jit, inline=True)\ndef square(x: ArrayLike, /) -> Array:\n """"""Calculate element-wise square of the input array.\n\n JAX implementation of :obj:`numpy.square`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array containing the square of the elements of ``x``.\n\n Note:\n ``jnp.square`` is equivalent to computing ``jnp.power(x, 2)``.\n\n See also:\n - :func:`jax.numpy.sqrt`: Calculates the element-wise non-negative square root\n of the input array.\n - :func:`jax.numpy.power`: Calculates the element-wise base ``x1`` exponential\n of ``x2``.\n - :func:`jax.lax.integer_pow`: Computes element-wise power :math:`x^y`, where\n :math:`y` is a fixed integer.\n - :func:`jax.numpy.float_power`: Computes the first array raised to the power\n of second array, element-wise, by promoting to the inexact dtype.\n\n Examples:\n >>> x = jnp.array([3, -2, 5.3, 1])\n >>> jnp.square(x)\n Array([ 9. , 4. , 28.090002, 1. ], dtype=float32)\n >>> jnp.power(x, 2)\n Array([ 9. , 4. , 28.090002, 1. ], dtype=float32)\n\n For integer inputs:\n\n >>> x1 = jnp.array([2, 4, 5, 6])\n >>> jnp.square(x1)\n Array([ 4, 16, 25, 36], dtype=int32)\n\n For complex-valued inputs:\n\n >>> x2 = jnp.array([1-3j, -1j, 2])\n >>> jnp.square(x2)\n Array([-8.-6.j, -1.+0.j, 4.+0.j], dtype=complex64)\n """"""\n x = ensure_arraylike(""square"", x)\n x, = promote_dtypes_numeric(x)\n return lax.square(x)\n\n\n@export\n@partial(jit, inline=True)\ndef deg2rad(x: ArrayLike, /) -> Array:\n r""""""Convert angles from degrees to radians.\n\n JAX implementation of :obj:`numpy.deg2rad`.\n\n The angle in degrees is converted to radians by:\n\n .. math::\n\n deg2rad(x) = x * \frac{pi}{180}\n\n Args:\n x: scalar or array. Specifies the angle in degrees.\n\n Returns:\n An array containing the angles in radians.\n\n See also:\n - :func:`jax.numpy.rad2deg` and :func:`jax.numpy.degrees`: Converts the angles\n from radians to degrees.\n - :func:`jax.numpy.radians`: Alias of ``deg2rad``.\n\n Examples:\n >>> x = jnp.array([60, 90, 120, 180])\n >>> jnp.deg2rad(x)\n Array([1.0471976, 1.5707964, 2.0943952, 3.1415927], dtype=float32)\n >>> x * jnp.pi / 180\n Array([1.0471976, 1.5707964, 2.0943952, 3.1415927], dtype=float32, weak_type=True)\n """"""\n x, = promote_args_inexact(""deg2rad"", x)\n return lax.mul(x, _lax_const(x, np.pi / 180))\n\n\n@export\n@partial(jit, inline=True)\ndef rad2deg(x: ArrayLike, /) -> Array:\n r""""""Convert angles from radians to degrees.\n\n JAX implementation of :obj:`numpy.rad2deg`.\n\n The angle in radians is converted to degrees by:\n\n .. math::\n\n rad2deg(x) = x * \frac{180}{pi}\n\n Args:\n x: scalar or array. Specifies the angle in radians.\n\n Returns:\n An array containing the angles in degrees.\n\n See also:\n - :func:`jax.numpy.deg2rad` and :func:`jax.numpy.radians`: Converts the angles\n from degrees to radians.\n - :func:`jax.numpy.degrees`: Alias of ``rad2deg``.\n\n Examples:\n >>> pi = jnp.pi\n >>> x = jnp.array([pi/4, pi/2, 2*pi/3])\n >>> jnp.rad2deg(x)\n Array([ 45. , 90. , 120.00001], dtype=float32)\n >>> x * 180 / pi\n Array([ 45., 90., 120.], dtype=float32)\n """"""\n x, = promote_args_inexact(""rad2deg"", x)\n return lax.mul(x, _lax_const(x, 180 / np.pi))\n\n\n@export\ndef degrees(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.rad2deg`""""""\n return rad2deg(x)\n\n\n@export\ndef radians(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.deg2rad`""""""\n return deg2rad(x)\n\n\n@export\n@partial(jit, inline=True)\ndef conjugate(x: ArrayLike, /) -> Array:\n """"""Return element-wise complex-conjugate of the input.\n\n JAX implementation of :obj:`numpy.conjugate`.\n\n Args:\n x: inpuat array or scalar.\n\n Returns:\n An array containing the complex-conjugate of ``x``.\n\n See also:\n - :func:`jax.numpy.real`: Returns the element-wise real part of the complex\n argument.\n - :func:`jax.numpy.imag`: Returns the element-wise imaginary part of the\n complex argument.\n\n Examples:\n >>> jnp.conjugate(3)\n Array(3, dtype=int32, weak_type=True)\n >>> x = jnp.array([2-1j, 3+5j, 7])\n >>> jnp.conjugate(x)\n Array([2.+1.j, 3.-5.j, 7.-0.j], dtype=complex64)\n """"""\n x = ensure_arraylike(""conjugate"", x)\n return lax.conj(x) if np.iscomplexobj(x) else lax.asarray(x)\n\n\n@export\ndef conj(x: ArrayLike, /) -> Array:\n """"""Alias of :func:`jax.numpy.conjugate`""""""\n return conjugate(x)\n\n\n@export\n@partial(jit, inline=True)\ndef imag(val: ArrayLike, /) -> Array:\n """"""Return element-wise imaginary of part of the complex argument.\n\n JAX implementation of :obj:`numpy.imag`.\n\n Args:\n val: input array or scalar.\n\n Returns:\n An array containing the imaginary part of the elements of ``val``.\n\n See also:\n - :func:`jax.numpy.conjugate` and :func:`jax.numpy.conj`: Returns the element-wise\n complex-conjugate of the input.\n - :func:`jax.numpy.real`: Returns the element-wise real part of the complex\n argument.\n\n Examples:\n >>> jnp.imag(4)\n Array(0, dtype=int32, weak_type=True)\n >>> jnp.imag(5j)\n Array(5., dtype=float32, weak_type=True)\n >>> x = jnp.array([2+3j, 5-1j, -3])\n >>> jnp.imag(x)\n Array([ 3., -1., 0.], dtype=float32)\n """"""\n val = ensure_arraylike(""imag"", val)\n return lax.imag(val) if np.iscomplexobj(val) else lax.full_like(val, 0)\n\n\n@export\n@partial(jit, inline=True)\ndef real(val: ArrayLike, /) -> Array:\n """"""Return element-wise real part of the complex argument.\n\n JAX implementation of :obj:`numpy.real`.\n\n Args:\n val: input array or scalar.\n\n Returns:\n An array containing the real part of the elements of ``val``.\n\n See also:\n - :func:`jax.numpy.conjugate` and :func:`jax.numpy.conj`: Returns the element-wise\n complex-conjugate of the input.\n - :func:`jax.numpy.imag`: Returns the element-wise imaginary part of the\n complex argument.\n\n Examples:\n >>> jnp.real(5)\n Array(5, dtype=int32, weak_type=True)\n >>> jnp.real(2j)\n Array(0., dtype=float32, weak_type=True)\n >>> x = jnp.array([3-2j, 4+7j, -2j])\n >>> jnp.real(x)\n Array([ 3., 4., -0.], dtype=float32)\n """"""\n val = ensure_arraylike(""real"", val)\n return lax.real(val) if np.iscomplexobj(val) else lax.asarray(val)\n\n\n@export\n@jit\ndef modf(x: ArrayLike, /, out=None) -> tuple[Array, Array]:\n """"""Return element-wise fractional and integral parts of the input array.\n\n JAX implementation of :obj:`numpy.modf`.\n\n Args:\n x: input array or scalar.\n out: Not used by JAX.\n\n Returns:\n An array containing the fractional and integral parts of the elements of ``x``,\n promoting dtypes inexact.\n\n See also:\n - :func:`jax.numpy.divmod`: Calculates the integer quotient and remainder of\n ``x1`` by ``x2`` element-wise.\n\n Examples:\n >>> jnp.modf(4.8)\n (Array(0.8000002, dtype=float32, weak_type=True), Array(4., dtype=float32, weak_type=True))\n >>> x = jnp.array([-3.4, -5.7, 0.6, 1.5, 2.3])\n >>> jnp.modf(x)\n (Array([-0.4000001 , -0.6999998 , 0.6 , 0.5 , 0.29999995], dtype=float32), Array([-3., -5., 0., 1., 2.], dtype=float32))\n """"""\n x = ensure_arraylike(""modf"", x)\n x, = promote_dtypes_inexact(x)\n if out is not None:\n raise NotImplementedError(""The 'out' argument to jnp.modf is not supported."")\n whole = _where(lax.ge(x, lax._zero(x)), floor(x), ceil(x))\n return x - whole, whole\n\n\n@export\n@partial(jit, inline=True)\ndef isfinite(x: ArrayLike, /) -> Array:\n """"""Return a boolean array indicating whether each element of input is finite.\n\n JAX implementation of :obj:`numpy.isfinite`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n A boolean array of same shape as ``x`` containing ``True`` where ``x`` is\n not ``inf``, ``-inf``, or ``NaN``, and ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.isinf`: Returns a boolean array indicating whether each\n element of input is either positive or negative infinity.\n - :func:`jax.numpy.isposinf`: Returns a boolean array indicating whether each\n element of input is positive infinity.\n - :func:`jax.numpy.isneginf`: Returns a boolean array indicating whether each\n element of input is negative infinity.\n - :func:`jax.numpy.isnan`: Returns a boolean array indicating whether each\n element of input is not a number (``NaN``).\n\n Examples:\n >>> x = jnp.array([-1, 3, jnp.inf, jnp.nan])\n >>> jnp.isfinite(x)\n Array([ True, True, False, False], dtype=bool)\n >>> jnp.isfinite(3-4j)\n Array(True, dtype=bool, weak_type=True)\n """"""\n x = ensure_arraylike(""isfinite"", x)\n dtype = dtypes.dtype(x)\n if dtypes.issubdtype(dtype, np.floating):\n return lax.is_finite(x)\n elif dtypes.issubdtype(dtype, np.complexfloating):\n return lax.bitwise_and(lax.is_finite(real(x)), lax.is_finite(imag(x)))\n else:\n return lax.full_like(x, True, dtype=np.bool_)\n\n\n@export\n@jit\ndef isinf(x: ArrayLike, /) -> Array:\n """"""Return a boolean array indicating whether each element of input is infinite.\n\n JAX implementation of :obj:`numpy.isinf`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n A boolean array of same shape as ``x`` containing ``True`` where ``x`` is\n ``inf`` or ``-inf``, and ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.isposinf`: Returns a boolean array indicating whether each\n element of input is positive infinity.\n - :func:`jax.numpy.isneginf`: Returns a boolean array indicating whether each\n element of input is negative infinity.\n - :func:`jax.numpy.isfinite`: Returns a boolean array indicating whether each\n element of input is finite.\n - :func:`jax.numpy.isnan`: Returns a boolean array indicating whether each\n element of input is not a number (``NaN``).\n\n Examples:\n >>> jnp.isinf(jnp.inf)\n Array(True, dtype=bool)\n >>> x = jnp.array([2+3j, -jnp.inf, 6, jnp.inf, jnp.nan])\n >>> jnp.isinf(x)\n Array([False, True, False, True, False], dtype=bool)\n """"""\n x = ensure_arraylike(""isinf"", x)\n dtype = dtypes.dtype(x)\n if dtypes.issubdtype(dtype, np.floating):\n return lax.eq(lax.abs(x), _constant_like(x, np.inf))\n elif dtypes.issubdtype(dtype, np.complexfloating):\n re = lax.real(x)\n im = lax.imag(x)\n return lax.bitwise_or(lax.eq(lax.abs(re), _constant_like(re, np.inf)),\n lax.eq(lax.abs(im), _constant_like(im, np.inf)))\n else:\n return lax.full_like(x, False, dtype=np.bool_)\n\n\ndef _isposneginf(infinity: float, x: Array, out) -> Array:\n if out is not None:\n raise NotImplementedError(""The 'out' argument to isneginf/isposinf is not supported."")\n dtype = dtypes.dtype(x)\n if dtypes.issubdtype(dtype, np.floating):\n return lax.eq(x, _constant_like(x, infinity))\n elif dtypes.issubdtype(dtype, np.complexfloating):\n raise ValueError(""isposinf/isneginf are not well defined for complex types"")\n else:\n return lax.full_like(x, False, dtype=np.bool_)\n\n\n@export\ndef isposinf(x, /, out=None):\n """"""\n Return boolean array indicating whether each element of input is positive infinite.\n\n JAX implementation of :obj:`numpy.isposinf`.\n\n Args:\n x: input array or scalar. ``complex`` dtype are not supported.\n\n Returns:\n A boolean array of same shape as ``x`` containing ``True`` where ``x`` is\n ``inf``, and ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.isinf`: Returns a boolean array indicating whether each\n element of input is either positive or negative infinity.\n - :func:`jax.numpy.isneginf`: Returns a boolean array indicating whether each\n element of input is negative infinity.\n - :func:`jax.numpy.isfinite`: Returns a boolean array indicating whether each\n element of input is finite.\n - :func:`jax.numpy.isnan`: Returns a boolean array indicating whether each\n element of input is not a number (``NaN``).\n\n Examples:\n >>> jnp.isposinf(5)\n Array(False, dtype=bool)\n >>> x = jnp.array([-jnp.inf, 5, jnp.inf, jnp.nan, 1])\n >>> jnp.isposinf(x)\n Array([False, False, True, False, False], dtype=bool)\n """"""\n x = ensure_arraylike(""isposinf"", x)\n return _isposneginf(np.inf, x, out)\n\n\n@export\ndef isneginf(x, /, out=None):\n """"""\n Return boolean array indicating whether each element of input is negative infinite.\n\n JAX implementation of :obj:`numpy.isneginf`.\n\n Args:\n x: input array or scalar. ``complex`` dtype are not supported.\n\n Returns:\n A boolean array of same shape as ``x`` containing ``True`` where ``x`` is\n ``-inf``, and ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.isinf`: Returns a boolean array indicating whether each\n element of input is either positive or negative infinity.\n - :func:`jax.numpy.isposinf`: Returns a boolean array indicating whether each\n element of input is positive infinity.\n - :func:`jax.numpy.isfinite`: Returns a boolean array indicating whether each\n element of input is finite.\n - :func:`jax.numpy.isnan`: Returns a boolean array indicating whether each\n element of input is not a number (``NaN``).\n\n Examples:\n >>> jnp.isneginf(jnp.inf)\n Array(False, dtype=bool)\n >>> x = jnp.array([-jnp.inf, 5, jnp.inf, jnp.nan, 1])\n >>> jnp.isneginf(x)\n Array([ True, False, False, False, False], dtype=bool)\n """"""\n x = ensure_arraylike(""isneginf"", x)\n return _isposneginf(-np.inf, x, out)\n\n\n@export\n@partial(jit, inline=True)\ndef isnan(x: ArrayLike, /) -> Array:\n """"""Returns a boolean array indicating whether each element of input is ``NaN``.\n\n JAX implementation of :obj:`numpy.isnan`.\n\n Args:\n x: input array or scalar.\n\n Returns:\n A boolean array of same shape as ``x`` containing ``True`` where ``x`` is\n not a number (i.e. ``NaN``) and ``False`` otherwise.\n\n See also:\n - :func:`jax.numpy.isfinite`: Returns a boolean array indicating whether each\n element of input is finite.\n - :func:`jax.numpy.isinf`: Returns a boolean array indicating whether each\n element of input is either positive or negative infinity.\n - :func:`jax.numpy.isposinf`: Returns a boolean array indicating whether each\n element of input is positive infinity.\n - :func:`jax.numpy.isneginf`: Returns a boolean array indicating whether each\n element of input is negative infinity.\n\n Examples:\n >>> jnp.isnan(6)\n Array(False, dtype=bool, weak_type=True)\n >>> x = jnp.array([2, 1+4j, jnp.inf, jnp.nan])\n >>> jnp.isnan(x)\n Array([False, False, False, True], dtype=bool)\n """"""\n x = ensure_arraylike(""isnan"", x)\n return lax.ne(x, x)\n\n\n@export\n@jit\ndef heaviside(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n r""""""Compute the heaviside step function.\n\n JAX implementation of :obj:`numpy.heaviside`.\n\n The heaviside step function is defined by:\n\n .. math::\n\n \mathrm{heaviside}(x1, x2) = \begin{cases}\n 0, & x1 < 0\\\n x2, & x1 = 0\\\n 1, & x1 > 0.\n \end{cases}\n\n Args:\n x1: input array or scalar. ``complex`` dtype are not supported.\n x2: scalar or array. Specifies the return values when ``x1`` is ``0``. ``complex``\n dtype are not supported. ``x1`` and ``x2`` must either have same shape or\n broadcast compatible.\n\n Returns:\n An array containing the heaviside step function of ``x1``, promoting to\n inexact dtype.\n\n Examples:\n >>> x1 = jnp.array([[-2, 0, 3],\n ... [5, -1, 0],\n ... [0, 7, -3]])\n >>> x2 = jnp.array([2, 0.5, 1])\n >>> jnp.heaviside(x1, x2)\n Array([[0. , 0.5, 1. ],\n [1. , 0. , 1. ],\n [2. , 1. , 0. ]], dtype=float32)\n >>> jnp.heaviside(x1, 0.5)\n Array([[0. , 0.5, 1. ],\n [1. , 0. , 0.5],\n [0.5, 1. , 0. ]], dtype=float32)\n >>> jnp.heaviside(-3, x2)\n Array([0., 0., 0.], dtype=float32)\n """"""\n x1, x2 = ensure_arraylike(""heaviside"", x1, x2)\n x1, x2 = promote_dtypes_inexact(x1, x2)\n zero = _lax_const(x1, 0)\n return _where(lax.lt(x1, zero), zero,\n _where(lax.gt(x1, zero), _lax_const(x1, 1), x2))\n\n\n@export\n@jit\ndef hypot(x1: ArrayLike, x2: ArrayLike, /) -> Array:\n r""""""\n Return element-wise hypotenuse for the given legs of a right angle triangle.\n\n JAX implementation of :obj:`numpy.hypot`.\n\n Args:\n x1: scalar or array. Specifies one of the legs of right angle triangle.\n ``complex`` dtype are not supported.\n x2: scalar or array. Specifies the other leg of right angle triangle.\n ``complex`` dtype are not supported. ``x1`` and ``x2`` must either have\n same shape or be broadcast compatible.\n\n Returns:\n An array containing the hypotenuse for the given given legs ``x1`` and ``x2``\n of a right angle triangle, promoting to inexact dtype.\n\n Note:\n ``jnp.hypot`` is a more numerically stable way of computing\n ``jnp.sqrt(x1 ** 2 + x2 **2)``.\n\n Examples:\n >>> jnp.hypot(3, 4)\n Array(5., dtype=float32, weak_type=True)\n >>> x1 = jnp.array([[3, -2, 5],\n ... [9, 1, -4]])\n >>> x2 = jnp.array([-5, 6, 8])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.hypot(x1, x2)\n Array([[ 5.831, 6.325, 9.434],\n [10.296, 6.083, 8.944]], dtype=float32)\n """"""\n x1, x2 = promote_args_inexact(""hypot"", x1, x2)\n\n # TODO(micky774): Promote to ValueError when deprecation is complete\n # (began 2024-4-14).\n if dtypes.issubdtype(x1.dtype, np.complexfloating):\n raise ValueError(\n ""jnp.hypot is not well defined for complex-valued inputs. ""\n ""Please convert to real values first, such as by using abs(x)"")\n x1, x2 = lax.abs(x1), lax.abs(x2)\n idx_inf = lax.bitwise_or(isposinf(x1), isposinf(x2))\n x1, x2 = maximum(x1, x2), minimum(x1, x2)\n x = _where(x1 == 0, x1, x1 * lax.sqrt(1 + lax.square(lax.div(x2, _where(x1 == 0, lax._ones(x1), x1)))))\n return _where(idx_inf, _lax_const(x, np.inf), x)\n\n\n@export\n@partial(jit, inline=True)\ndef reciprocal(x: ArrayLike, /) -> Array:\n """"""Calculate element-wise reciprocal of the input.\n\n JAX implementation of :obj:`numpy.reciprocal`.\n\n The reciprocal is calculated by ``1/x``.\n\n Args:\n x: input array or scalar.\n\n Returns:\n An array of same shape as ``x`` containing the reciprocal of each element of\n ``x``.\n\n Note:\n For integer inputs, ``np.reciprocal`` returns rounded integer output, while\n ``jnp.reciprocal`` promotes integer inputs to floating point.\n\n Examples:\n >>> jnp.reciprocal(2)\n Array(0.5, dtype=float32, weak_type=True)\n >>> jnp.reciprocal(0.)\n Array(inf, dtype=float32, weak_type=True)\n >>> x = jnp.array([1, 5., 4.])\n >>> jnp.reciprocal(x)\n Array([1. , 0.2 , 0.25], dtype=float32)\n """"""\n x = ensure_arraylike(""reciprocal"", x)\n x, = promote_dtypes_inexact(x)\n return lax.integer_pow(x, -1)\n\n\n@export\n@jit\ndef sinc(x: ArrayLike, /) -> Array:\n r""""""Calculate the normalized sinc function.\n\n JAX implementation of :func:`numpy.sinc`.\n\n The normalized sinc function is given by\n\n .. math::\n \mathrm{sinc}(x) = \frac{\sin({\pi x})}{\pi x}\n\n where ``sinc(0)`` returns the limit value of ``1``. The sinc function is\n smooth and infinitely differentiable.\n\n Args:\n x : input array; will be promoted to an inexact type.\n\n Returns:\n An array of the same shape as ``x`` containing the result.\n\n Examples:\n >>> x = jnp.array([-1, -0.5, 0, 0.5, 1])\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.sinc(x)\n Array([-0. , 0.637, 1. , 0.637, -0. ], dtype=float32)\n\n Compare this to the naive approach to computing the function, which is\n undefined at zero:\n\n >>> with jnp.printoptions(precision=3, suppress=True):\n ... jnp.sin(jnp.pi * x) / (jnp.pi * x)\n Array([-0. , 0.637, nan, 0.637, -0. ], dtype=float32)\n\n JAX defines a custom gradient rule for sinc to allow accurate evaluation\n of the gradient at zero even for higher-order derivatives:\n\n >>> f = jnp.sinc\n >>> for i in range(1, 6):\n ... f = jax.grad(f)\n ... print(f""(d/dx)^{i} f(0.0) = {f(0.0):.2f}"")\n ...\n (d/dx)^1 f(0.0) = 0.00\n (d/dx)^2 f(0.0) = -3.29\n (d/dx)^3 f(0.0) = 0.00\n (d/dx)^4 f(0.0) = 19.48\n (d/dx)^5 f(0.0) = 0.00\n """"""\n x = ensure_arraylike(""sinc"", x)\n x, = promote_dtypes_inexact(x)\n eq_zero = lax.eq(x, _lax_const(x, 0))\n pi_x = lax.mul(_lax_const(x, np.pi), x)\n safe_pi_x = _where(eq_zero, _lax_const(x, 1), pi_x)\n return _where(eq_zero, _sinc_maclaurin(0, pi_x),\n lax.div(lax.sin(safe_pi_x), safe_pi_x))\n\n\n@partial(custom_jvp, nondiff_argnums=(0,))\ndef _sinc_maclaurin(k, x):\n # compute the kth derivative of x -> sin(x)/x evaluated at zero (since we\n # compute the monomial term in the jvp rule)\n # TODO(mattjj): see https://github.com/jax-ml/jax/issues/10750\n if k % 2:\n return x * 0\n else:\n return x * 0 + _lax_const(x, (-1) ** (k // 2) / (k + 1))\n\n@_sinc_maclaurin.defjvp\ndef _sinc_maclaurin_jvp(k, primals, tangents):\n (x,), (t,) = primals, tangents\n return _sinc_maclaurin(k, x), _sinc_maclaurin(k + 1, x) * t\n",python,tab +8985,24384799,".venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py",58119,0,"",python,selection_command +8986,24385558,".venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py",58130,0,"",python,selection_command +8987,24385842,".venv/lib/python3.10/site-packages/jax/_src/numpy/ufuncs.py",58131,0,"",python,selection_command +8988,24391212,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",0,0,"",python,tab +8989,24391212,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",26883,0,"",python,selection_command +8990,24394953,".venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py",21774,0,"",python,selection_command +8991,24404721,"utils/nn.py",0,0,"",python,tab +8992,24405978,"utils/nn.py",11818,0,"",python,selection_command +8993,24406314,"utils/nn.py",10207,0,"",python,selection_command +8994,24407543,"utils/nn.py",10164,0,"",python,selection_command +8995,24407798,"utils/nn.py",10135,0,"",python,selection_command +8996,24407824,"utils/nn.py",10042,0,"",python,selection_command +8997,24407849,"utils/nn.py",10021,0,"",python,selection_command +8998,24408026,"utils/nn.py",10008,0,"",python,selection_command +8999,24408245,"utils/nn.py",10021,0,"",python,selection_command +9000,24423493,"/fast/home/franz.srambical/jafar/models/dynamics_causal.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=True,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> tuple[jax.Array, jax.Array | None]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n # FIXME: this is the culprit!\n # vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n vid_embed += act_embed\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n\n # vid_embed_padded.shape (1, 1, 921, 512)\n logits = self.transformer(vid_embed_padded)[:, :, :-1]\n\n mask = jnp.ones(vid_embed.shape[:-1])\n return logits, mask\n",python,tab +9001,24423493,"/fast/home/franz.srambical/jafar/models/dynamics_causal.py",2213,0,"",python,selection_command +9002,24427366,"/fast/home/franz.srambical/jafar/utils/nn.py",0,0,"",python,tab +9003,24427366,"/fast/home/franz.srambical/jafar/utils/nn.py",6866,0,"",python,selection_command +9004,24427455,"/fast/home/franz.srambical/jafar/utils/nn.py",2260,8478," decode=self.decode,\n )\n\n self.temporal_pos_enc = PositionalEncoding(self.dim)\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = self.spatial_pos_enc(x)\n z = self.spatial_norm(z)\n # z.shape (1, 1, 921, 512)\n z = self.spatial_attention(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = self.temporal_pos_enc(x)\n z = self.temporal_norm(z)\n # z.shape (1, 921, 1, 512)\n z = self.temporal_attention(z)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = self.ffn_norm(x)\n z = self.ffn_dense1(z)\n z = jax.nn.gelu(z)\n z = self.ffn_dense2(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nnx.Module):\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n spatial_causal: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.spatial_causal = spatial_causal\n self.decode = decode\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n self.blocks: list[STBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n spatial_causal=self.spatial_causal,\n decode=self.decode,\n rngs=rngs,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x: jax.Array) -> jax.Array:\n # x.shape (1, 1, 921, 512)\n x = self.input_norm1(x)\n x = self.input_dense(x)\n x = self.input_norm2(x)\n\n for block in self.blocks:\n # x.shape (1, 1, 921, 512)\n x = block(x)\n\n x = self.output_dense(x)\n return x # (B, T, E)\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n def __init__(\n self, latent_dim: int, num_latents: int, dropout: float, rngs: nnx.Rngs\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n normalized_codebook = normalize(self.codebook.value)\n distance = -jnp.matmul(x, normalized_codebook.T)\n if training:\n distance = self.drop(distance)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array) -> jax.Array:\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n # for temporal attention (using kv cache)\n # FIRST PASS: qkv.shape (1, 921, 1, 8, 64)\n # SECOND PASS: qkv.shape \n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n # query_4d.shape (921, 4, 8, 64)\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n breakpoint()\n # Handle causal mask for cached decoder self-attention (from nnx.MultiHeadAttention)\n if mask is not None:\n # mask.shape (1, 921, 1, 1, 1)\n # FIXME (f.srambical): the rearrange depends on whether this is spatial or temporal attention\n mask_4d = _rearrange(mask)\n # mask_4d.shape (921, 1, 1, 1) (B, H-broadcast, Q-broadcast, K-broadcast)\n # NOTE: We need to broadcast T and S dimensions to target_seq_len since cudnn attention strictly checks the mask shape\n # https://github.com/jax-ml/jax/issues/28974\n # https://github.com/jax-ml/jax/blob/08c7677393672ccb85c10f1ed0bd506905c3c994/jax/_src/cudnn/fused_attention_stablehlo.py#L1830\n # https://github.com/jax-ml/jax/blob/08c7677393672ccb85c10f1ed0bd506905c3c994/jax/_src/cudnn/fused_attention_stablehlo.py#L337\n mask_4d = einops.repeat(mask_4d, ""... 1 1 -> ... t s"", t=target_seq_len, s=target_seq_len)\n mask_4d = mask_4d.astype(jnp.bool)\n else:\n # FIXME (f.srambical): Investigate whether/why this is needed\n mask_4d = attention_mask[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n # if query.shape == (1, 921, 1, 8, 64):\n # breakpoint()\n",python,content +9005,24428933,"/fast/home/franz.srambical/jafar/utils/nn.py",6877,0,"",python,selection_command +9006,24433307,"/fast/home/franz.srambical/jafar/utils/nn.py",3833,0,"",python,selection_command +9007,24436355,"/fast/home/franz.srambical/jafar/utils/nn.py",4367,0,"",python,selection_mouse +9008,24436358,"/fast/home/franz.srambical/jafar/utils/nn.py",4366,0,"",python,selection_command +9009,24436699,"/fast/home/franz.srambical/jafar/utils/nn.py",11818,0,"",python,selection_command +9010,24437717,"/fast/home/franz.srambical/jafar/utils/nn.py",10207,0,"",python,selection_command +9011,24438378,"/fast/home/franz.srambical/jafar/utils/nn.py",7864,0,"",python,selection_command +9012,24440451,"TERMINAL",0,0,"q",,terminal_output +9013,24440614,"TERMINAL",0,0,"u",,terminal_output +9014,24440684,"TERMINAL",0,0,"e",,terminal_output +9015,24440733,"TERMINAL",0,0,"r",,terminal_output +9016,24440831,"TERMINAL",0,0,"y",,terminal_output +9017,24441484,"TERMINAL",0,0,".",,terminal_output +9018,24441553,"TERMINAL",0,0,"s",,terminal_output +9019,24441729,"TERMINAL",0,0,"ha",,terminal_output +9020,24441884,"TERMINAL",0,0,"p",,terminal_output +9021,24441965,"TERMINAL",0,0,"e",,terminal_output +9022,24442122,"TERMINAL",0,0,"\r\n(Pdb) (1, 2, 61, 8, 64)\r\n",,terminal_output +9023,24443751,"TERMINAL",0,0,"z",,terminal_output +9024,24443924,"TERMINAL",0,0,"z",,terminal_output +9025,24446074,"/fast/home/franz.srambical/jafar/utils/nn.py",9039,0,"",python,selection_command +9026,24482961,"/fast/home/franz.srambical/jafar/utils/nn.py",9090,0,"",python,selection_command +9027,24483145,"/fast/home/franz.srambical/jafar/utils/nn.py",9115,0,"\n ",python,content +9028,24483735,"/fast/home/franz.srambical/jafar/utils/nn.py",9124,0,"#",python,content +9029,24483735,"/fast/home/franz.srambical/jafar/utils/nn.py",9125,0,"",python,selection_keyboard +9030,24483852,"/fast/home/franz.srambical/jafar/utils/nn.py",9125,0," ",python,content +9031,24483853,"/fast/home/franz.srambical/jafar/utils/nn.py",9126,0,"",python,selection_keyboard +9032,24484027,"/fast/home/franz.srambical/jafar/utils/nn.py",9126,0,"f",python,content +9033,24484028,"/fast/home/franz.srambical/jafar/utils/nn.py",9127,0,"",python,selection_keyboard +9034,24484120,"/fast/home/franz.srambical/jafar/utils/nn.py",9127,0,"o",python,content +9035,24484121,"/fast/home/franz.srambical/jafar/utils/nn.py",9128,0,"",python,selection_keyboard +9036,24484211,"/fast/home/franz.srambical/jafar/utils/nn.py",9128,0,"r",python,content +9037,24484212,"/fast/home/franz.srambical/jafar/utils/nn.py",9129,0,"",python,selection_keyboard +9038,24484256,"/fast/home/franz.srambical/jafar/utils/nn.py",9129,0," ",python,content +9039,24484256,"/fast/home/franz.srambical/jafar/utils/nn.py",9130,0,"",python,selection_keyboard +9040,24484490,"/fast/home/franz.srambical/jafar/utils/nn.py",9130,0,"s",python,content +9041,24484490,"/fast/home/franz.srambical/jafar/utils/nn.py",9131,0,"",python,selection_keyboard +9042,24484588,"/fast/home/franz.srambical/jafar/utils/nn.py",9131,0,"p",python,content +9043,24484588,"/fast/home/franz.srambical/jafar/utils/nn.py",9132,0,"",python,selection_keyboard +9044,24484652,"/fast/home/franz.srambical/jafar/utils/nn.py",9132,0,"a",python,content +9045,24484653,"/fast/home/franz.srambical/jafar/utils/nn.py",9133,0,"",python,selection_keyboard +9046,24484736,"/fast/home/franz.srambical/jafar/utils/nn.py",9133,0,"t",python,content +9047,24484737,"/fast/home/franz.srambical/jafar/utils/nn.py",9134,0,"",python,selection_keyboard +9048,24484802,"/fast/home/franz.srambical/jafar/utils/nn.py",9134,0,"i",python,content +9049,24484803,"/fast/home/franz.srambical/jafar/utils/nn.py",9135,0,"",python,selection_keyboard +9050,24484909,"/fast/home/franz.srambical/jafar/utils/nn.py",9135,0,"a",python,content +9051,24484909,"/fast/home/franz.srambical/jafar/utils/nn.py",9136,0,"",python,selection_keyboard +9052,24484975,"/fast/home/franz.srambical/jafar/utils/nn.py",9136,0,"l",python,content +9053,24484976,"/fast/home/franz.srambical/jafar/utils/nn.py",9137,0,"",python,selection_keyboard +9054,24485057,"/fast/home/franz.srambical/jafar/utils/nn.py",9137,0," ",python,content +9055,24485057,"/fast/home/franz.srambical/jafar/utils/nn.py",9138,0,"",python,selection_keyboard +9056,24485142,"/fast/home/franz.srambical/jafar/utils/nn.py",9138,0,"a",python,content +9057,24485142,"/fast/home/franz.srambical/jafar/utils/nn.py",9139,0,"",python,selection_keyboard +9058,24485239,"/fast/home/franz.srambical/jafar/utils/nn.py",9139,0,"t",python,content +9059,24485240,"/fast/home/franz.srambical/jafar/utils/nn.py",9140,0,"",python,selection_keyboard +9060,24485364,"/fast/home/franz.srambical/jafar/utils/nn.py",9140,0,"t",python,content +9061,24485365,"/fast/home/franz.srambical/jafar/utils/nn.py",9141,0,"",python,selection_keyboard +9062,24485457,"/fast/home/franz.srambical/jafar/utils/nn.py",9141,0,"e",python,content +9063,24485458,"/fast/home/franz.srambical/jafar/utils/nn.py",9142,0,"",python,selection_keyboard +9064,24485583,"/fast/home/franz.srambical/jafar/utils/nn.py",9142,0,"n",python,content +9065,24485583,"/fast/home/franz.srambical/jafar/utils/nn.py",9143,0,"",python,selection_keyboard +9066,24485674,"/fast/home/franz.srambical/jafar/utils/nn.py",9143,0,"t",python,content +9067,24485675,"/fast/home/franz.srambical/jafar/utils/nn.py",9144,0,"",python,selection_keyboard +9068,24485773,"/fast/home/franz.srambical/jafar/utils/nn.py",9144,0,"i",python,content +9069,24485774,"/fast/home/franz.srambical/jafar/utils/nn.py",9145,0,"",python,selection_keyboard +9070,24485843,"/fast/home/franz.srambical/jafar/utils/nn.py",9145,0,"o",python,content +9071,24485843,"/fast/home/franz.srambical/jafar/utils/nn.py",9146,0,"",python,selection_keyboard +9072,24485890,"/fast/home/franz.srambical/jafar/utils/nn.py",9146,0,"n",python,content +9073,24485890,"/fast/home/franz.srambical/jafar/utils/nn.py",9147,0,"",python,selection_keyboard +9074,24486163,"/fast/home/franz.srambical/jafar/utils/nn.py",9147,0," ",python,content +9075,24486163,"/fast/home/franz.srambical/jafar/utils/nn.py",9148,0,"",python,selection_keyboard +9076,24486299,"/fast/home/franz.srambical/jafar/utils/nn.py",9148,0,"()",python,content +9077,24486300,"/fast/home/franz.srambical/jafar/utils/nn.py",9149,0,"",python,selection_keyboard +9078,24486791,"/fast/home/franz.srambical/jafar/utils/nn.py",9149,0,"u",python,content +9079,24486792,"/fast/home/franz.srambical/jafar/utils/nn.py",9150,0,"",python,selection_keyboard +9080,24486840,"/fast/home/franz.srambical/jafar/utils/nn.py",9150,0,"s",python,content +9081,24486841,"/fast/home/franz.srambical/jafar/utils/nn.py",9151,0,"",python,selection_keyboard +9082,24486943,"/fast/home/franz.srambical/jafar/utils/nn.py",9151,0,"i",python,content +9083,24486943,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"",python,selection_keyboard +9084,24487022,"/fast/home/franz.srambical/jafar/utils/nn.py",9152,0,"n",python,content +9085,24487023,"/fast/home/franz.srambical/jafar/utils/nn.py",9153,0,"",python,selection_keyboard +9086,24487043,"/fast/home/franz.srambical/jafar/utils/nn.py",9153,0,"g",python,content +9087,24487043,"/fast/home/franz.srambical/jafar/utils/nn.py",9154,0,"",python,selection_keyboard +9088,24487124,"/fast/home/franz.srambical/jafar/utils/nn.py",9154,0," ",python,content +9089,24487124,"/fast/home/franz.srambical/jafar/utils/nn.py",9155,0,"",python,selection_keyboard +9090,24487261,"/fast/home/franz.srambical/jafar/utils/nn.py",9155,0,"k",python,content +9091,24487261,"/fast/home/franz.srambical/jafar/utils/nn.py",9156,0,"",python,selection_keyboard +9092,24487325,"/fast/home/franz.srambical/jafar/utils/nn.py",9156,0,"v",python,content +9093,24487325,"/fast/home/franz.srambical/jafar/utils/nn.py",9157,0,"",python,selection_keyboard +9094,24487445,"/fast/home/franz.srambical/jafar/utils/nn.py",9157,0," ",python,content +9095,24487446,"/fast/home/franz.srambical/jafar/utils/nn.py",9158,0,"",python,selection_keyboard +9096,24487563,"/fast/home/franz.srambical/jafar/utils/nn.py",9158,0,"c",python,content +9097,24487564,"/fast/home/franz.srambical/jafar/utils/nn.py",9159,0,"",python,selection_keyboard +9098,24487628,"/fast/home/franz.srambical/jafar/utils/nn.py",9159,0,"a",python,content +9099,24487629,"/fast/home/franz.srambical/jafar/utils/nn.py",9160,0,"",python,selection_keyboard +9100,24487724,"/fast/home/franz.srambical/jafar/utils/nn.py",9160,0,"c",python,content +9101,24487725,"/fast/home/franz.srambical/jafar/utils/nn.py",9161,0,"",python,selection_keyboard +9102,24487823,"/fast/home/franz.srambical/jafar/utils/nn.py",9161,0,"h",python,content +9103,24487824,"/fast/home/franz.srambical/jafar/utils/nn.py",9162,0,"",python,selection_keyboard +9104,24487891,"/fast/home/franz.srambical/jafar/utils/nn.py",9162,0,"e",python,content +9105,24487892,"/fast/home/franz.srambical/jafar/utils/nn.py",9163,0,"",python,selection_keyboard +9106,24488095,"/fast/home/franz.srambical/jafar/utils/nn.py",9162,0,"",python,selection_command +9107,24488400,"/fast/home/franz.srambical/jafar/utils/nn.py",9164,0,"",python,selection_command +9108,24488480,"/fast/home/franz.srambical/jafar/utils/nn.py",9164,0,")",python,content +9109,24488480,"/fast/home/franz.srambical/jafar/utils/nn.py",9165,0,"",python,selection_keyboard +9110,24488945,"/fast/home/franz.srambical/jafar/utils/nn.py",9164,1,"",python,content +9111,24489076,"/fast/home/franz.srambical/jafar/utils/nn.py",9164,0,"\n ",python,content +9112,24489824,"/fast/home/franz.srambical/jafar/utils/nn.py",9173,0,"#",python,content +9113,24489824,"/fast/home/franz.srambical/jafar/utils/nn.py",9174,0,"",python,selection_keyboard +9114,24490264,"/fast/home/franz.srambical/jafar/utils/nn.py",9173,0,"",python,selection_command +9115,24490463,"/fast/home/franz.srambical/jafar/utils/nn.py",9124,0,"",python,selection_command +9116,24490629,"/fast/home/franz.srambical/jafar/utils/nn.py",9090,0,"",python,selection_command +9117,24490775,"/fast/home/franz.srambical/jafar/utils/nn.py",9039,0,"",python,selection_command +9118,24490925,"/fast/home/franz.srambical/jafar/utils/nn.py",9090,0,"",python,selection_command +9119,24491095,"/fast/home/franz.srambical/jafar/utils/nn.py",9124,0,"",python,selection_command +9120,24491227,"/fast/home/franz.srambical/jafar/utils/nn.py",9173,0,"",python,selection_command +9121,24491570,"/fast/home/franz.srambical/jafar/utils/nn.py",9174,0,"",python,selection_command +9122,24491634,"/fast/home/franz.srambical/jafar/utils/nn.py",9174,0," ",python,content +9123,24491634,"/fast/home/franz.srambical/jafar/utils/nn.py",9175,0,"",python,selection_keyboard +9124,24491782,"/fast/home/franz.srambical/jafar/utils/nn.py",9175,0,"F",python,content +9125,24491782,"/fast/home/franz.srambical/jafar/utils/nn.py",9176,0,"",python,selection_keyboard +9126,24491924,"/fast/home/franz.srambical/jafar/utils/nn.py",9176,0,"I",python,content +9127,24491924,"/fast/home/franz.srambical/jafar/utils/nn.py",9177,0,"",python,selection_keyboard +9128,24491963,"/fast/home/franz.srambical/jafar/utils/nn.py",9177,0,"R",python,content +9129,24491963,"/fast/home/franz.srambical/jafar/utils/nn.py",9178,0,"",python,selection_keyboard +9130,24492231,"/fast/home/franz.srambical/jafar/utils/nn.py",9178,0,"S",python,content +9131,24492232,"/fast/home/franz.srambical/jafar/utils/nn.py",9179,0,"",python,selection_keyboard +9132,24492383,"/fast/home/franz.srambical/jafar/utils/nn.py",9179,0,"T",python,content +9133,24492383,"/fast/home/franz.srambical/jafar/utils/nn.py",9180,0,"",python,selection_keyboard +9134,24492409,"/fast/home/franz.srambical/jafar/utils/nn.py",9180,0," ",python,content +9135,24492410,"/fast/home/franz.srambical/jafar/utils/nn.py",9181,0,"",python,selection_keyboard +9136,24492598,"/fast/home/franz.srambical/jafar/utils/nn.py",9181,0,"P",python,content +9137,24492599,"/fast/home/franz.srambical/jafar/utils/nn.py",9182,0,"",python,selection_keyboard +9138,24492678,"/fast/home/franz.srambical/jafar/utils/nn.py",9182,0,"A",python,content +9139,24492679,"/fast/home/franz.srambical/jafar/utils/nn.py",9183,0,"",python,selection_keyboard +9140,24492700,"/fast/home/franz.srambical/jafar/utils/nn.py",9183,0,"S",python,content +9141,24492700,"/fast/home/franz.srambical/jafar/utils/nn.py",9184,0,"",python,selection_keyboard +9142,24492842,"/fast/home/franz.srambical/jafar/utils/nn.py",9184,0,"S",python,content +9143,24492843,"/fast/home/franz.srambical/jafar/utils/nn.py",9185,0,"",python,selection_keyboard +9144,24493081,"/fast/home/franz.srambical/jafar/utils/nn.py",9185,0,":",python,content +9145,24493081,"/fast/home/franz.srambical/jafar/utils/nn.py",9186,0,"",python,selection_keyboard +9146,24493199,"/fast/home/franz.srambical/jafar/utils/nn.py",9186,0," ",python,content +9147,24493200,"/fast/home/franz.srambical/jafar/utils/nn.py",9187,0,"",python,selection_keyboard +9148,24493515,"/fast/home/franz.srambical/jafar/utils/nn.py",9187,0,"k",python,content +9149,24493516,"/fast/home/franz.srambical/jafar/utils/nn.py",9188,0,"",python,selection_keyboard +9150,24493836,"/fast/home/franz.srambical/jafar/utils/nn.py",9188,0,"q",python,content +9151,24493836,"/fast/home/franz.srambical/jafar/utils/nn.py",9189,0,"",python,selection_keyboard +9152,24494180,"/fast/home/franz.srambical/jafar/utils/nn.py",9188,1,"",python,content +9153,24494297,"/fast/home/franz.srambical/jafar/utils/nn.py",9187,1,"",python,content +9154,24494398,"/fast/home/franz.srambical/jafar/utils/nn.py",9187,0,"q",python,content +9155,24494398,"/fast/home/franz.srambical/jafar/utils/nn.py",9188,0,"",python,selection_keyboard +9156,24494499,"/fast/home/franz.srambical/jafar/utils/nn.py",9188,0,"k",python,content +9157,24494499,"/fast/home/franz.srambical/jafar/utils/nn.py",9189,0,"",python,selection_keyboard +9158,24494597,"/fast/home/franz.srambical/jafar/utils/nn.py",9189,0,"v",python,content +9159,24494597,"/fast/home/franz.srambical/jafar/utils/nn.py",9190,0,"",python,selection_keyboard +9160,24495273,"/fast/home/franz.srambical/jafar/utils/nn.py",9190,0,".",python,content +9161,24495273,"/fast/home/franz.srambical/jafar/utils/nn.py",9191,0,"",python,selection_keyboard +9162,24495349,"/fast/home/franz.srambical/jafar/utils/nn.py",9191,0,"s",python,content +9163,24495349,"/fast/home/franz.srambical/jafar/utils/nn.py",9192,0,"",python,selection_keyboard +9164,24495465,"/fast/home/franz.srambical/jafar/utils/nn.py",9192,0,"h",python,content +9165,24495466,"/fast/home/franz.srambical/jafar/utils/nn.py",9193,0,"",python,selection_keyboard +9166,24495516,"/fast/home/franz.srambical/jafar/utils/nn.py",9193,0,"a",python,content +9167,24495516,"/fast/home/franz.srambical/jafar/utils/nn.py",9194,0,"",python,selection_keyboard +9168,24495649,"/fast/home/franz.srambical/jafar/utils/nn.py",9194,0,"p",python,content +9169,24495649,"/fast/home/franz.srambical/jafar/utils/nn.py",9195,0,"",python,selection_keyboard +9170,24495700,"/fast/home/franz.srambical/jafar/utils/nn.py",9195,0,"e",python,content +9171,24495701,"/fast/home/franz.srambical/jafar/utils/nn.py",9196,0,"",python,selection_keyboard +9172,24495833,"/fast/home/franz.srambical/jafar/utils/nn.py",9196,0," ",python,content +9173,24495834,"/fast/home/franz.srambical/jafar/utils/nn.py",9197,0,"",python,selection_keyboard +9174,24496033,"/fast/home/franz.srambical/jafar/utils/nn.py",9197,0,"()",python,content +9175,24496034,"/fast/home/franz.srambical/jafar/utils/nn.py",9198,0,"",python,selection_keyboard +9176,24497760,"/fast/home/franz.srambical/jafar/utils/nn.py",9198,0,"1",python,content +9177,24497761,"/fast/home/franz.srambical/jafar/utils/nn.py",9199,0,"",python,selection_keyboard +9178,24497833,"/fast/home/franz.srambical/jafar/utils/nn.py",9199,0,",",python,content +9179,24497834,"/fast/home/franz.srambical/jafar/utils/nn.py",9200,0,"",python,selection_keyboard +9180,24497933,"/fast/home/franz.srambical/jafar/utils/nn.py",9200,0," ",python,content +9181,24497933,"/fast/home/franz.srambical/jafar/utils/nn.py",9201,0,"",python,selection_keyboard +9182,24498060,"/fast/home/franz.srambical/jafar/utils/nn.py",9201,0,"2",python,content +9183,24498060,"/fast/home/franz.srambical/jafar/utils/nn.py",9202,0,"",python,selection_keyboard +9184,24498165,"/fast/home/franz.srambical/jafar/utils/nn.py",9202,0,",",python,content +9185,24498165,"/fast/home/franz.srambical/jafar/utils/nn.py",9203,0,"",python,selection_keyboard +9186,24499255,"/fast/home/franz.srambical/jafar/utils/nn.py",9203,0," ",python,content +9187,24499255,"/fast/home/franz.srambical/jafar/utils/nn.py",9204,0,"",python,selection_keyboard +9188,24500043,"/fast/home/franz.srambical/jafar/utils/nn.py",9204,0,"6",python,content +9189,24500043,"/fast/home/franz.srambical/jafar/utils/nn.py",9205,0,"",python,selection_keyboard +9190,24500184,"/fast/home/franz.srambical/jafar/utils/nn.py",9205,0,"1",python,content +9191,24500185,"/fast/home/franz.srambical/jafar/utils/nn.py",9206,0,"",python,selection_keyboard +9192,24500592,"/fast/home/franz.srambical/jafar/utils/nn.py",9206,0,",",python,content +9193,24500592,"/fast/home/franz.srambical/jafar/utils/nn.py",9207,0,"",python,selection_keyboard +9194,24500651,"/fast/home/franz.srambical/jafar/utils/nn.py",9207,0,",",python,content +9195,24500651,"/fast/home/franz.srambical/jafar/utils/nn.py",9208,0,"",python,selection_keyboard +9196,24501382,"/fast/home/franz.srambical/jafar/utils/nn.py",9207,1,"",python,content +9197,24501571,"/fast/home/franz.srambical/jafar/utils/nn.py",9207,0," ",python,content +9198,24501572,"/fast/home/franz.srambical/jafar/utils/nn.py",9208,0,"",python,selection_keyboard +9199,24501655,"/fast/home/franz.srambical/jafar/utils/nn.py",9208,0,"8",python,content +9200,24501655,"/fast/home/franz.srambical/jafar/utils/nn.py",9209,0,"",python,selection_keyboard +9201,24501812,"/fast/home/franz.srambical/jafar/utils/nn.py",9209,0," ",python,content +9202,24501813,"/fast/home/franz.srambical/jafar/utils/nn.py",9210,0,"",python,selection_keyboard +9203,24502123,"/fast/home/franz.srambical/jafar/utils/nn.py",9210,0,",",python,content +9204,24502124,"/fast/home/franz.srambical/jafar/utils/nn.py",9211,0,"",python,selection_keyboard +9205,24502480,"/fast/home/franz.srambical/jafar/utils/nn.py",9210,1,"",python,content +9206,24502602,"/fast/home/franz.srambical/jafar/utils/nn.py",9209,1,"",python,content +9207,24502777,"/fast/home/franz.srambical/jafar/utils/nn.py",9209,0,",",python,content +9208,24502778,"/fast/home/franz.srambical/jafar/utils/nn.py",9210,0,"",python,selection_keyboard +9209,24502920,"/fast/home/franz.srambical/jafar/utils/nn.py",9210,0," ",python,content +9210,24502920,"/fast/home/franz.srambical/jafar/utils/nn.py",9211,0,"",python,selection_keyboard +9211,24503240,"/fast/home/franz.srambical/jafar/utils/nn.py",9211,0,"6",python,content +9212,24503240,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,0,"",python,selection_keyboard +9213,24503310,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,0,"4",python,content +9214,24503311,"/fast/home/franz.srambical/jafar/utils/nn.py",9213,0,"",python,selection_keyboard +9215,24503523,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,0,"",python,selection_command +9216,24503620,"/fast/home/franz.srambical/jafar/utils/nn.py",9165,0,"",python,selection_command +9217,24504944,"/fast/home/franz.srambical/jafar/utils/nn.py",9164,0,"\n ",python,content +9218,24505089,"/fast/home/franz.srambical/jafar/utils/nn.py",9173,0,"#",python,content +9219,24505089,"/fast/home/franz.srambical/jafar/utils/nn.py",9174,0,"",python,selection_keyboard +9220,24505160,"/fast/home/franz.srambical/jafar/utils/nn.py",9174,0," ",python,content +9221,24505160,"/fast/home/franz.srambical/jafar/utils/nn.py",9175,0,"",python,selection_keyboard +9222,24505494,"/fast/home/franz.srambical/jafar/utils/nn.py",9175,0,"I",python,content +9223,24505495,"/fast/home/franz.srambical/jafar/utils/nn.py",9176,0,"",python,selection_keyboard +9224,24505787,"/fast/home/franz.srambical/jafar/utils/nn.py",9175,1,"",python,content +9225,24505822,"/fast/home/franz.srambical/jafar/utils/nn.py",9175,0,"F",python,content +9226,24505823,"/fast/home/franz.srambical/jafar/utils/nn.py",9176,0,"",python,selection_keyboard +9227,24505956,"/fast/home/franz.srambical/jafar/utils/nn.py",9176,0,"I",python,content +9228,24505957,"/fast/home/franz.srambical/jafar/utils/nn.py",9177,0,"",python,selection_keyboard +9229,24506216,"/fast/home/franz.srambical/jafar/utils/nn.py",9177,0,"X",python,content +9230,24506216,"/fast/home/franz.srambical/jafar/utils/nn.py",9178,0,"",python,selection_keyboard +9231,24506302,"/fast/home/franz.srambical/jafar/utils/nn.py",9178,0,"M",python,content +9232,24506302,"/fast/home/franz.srambical/jafar/utils/nn.py",9179,0,"",python,selection_keyboard +9233,24506403,"/fast/home/franz.srambical/jafar/utils/nn.py",9179,0,"E",python,content +9234,24506404,"/fast/home/franz.srambical/jafar/utils/nn.py",9180,0,"",python,selection_keyboard +9235,24506471,"/fast/home/franz.srambical/jafar/utils/nn.py",9180,0,":",python,content +9236,24506472,"/fast/home/franz.srambical/jafar/utils/nn.py",9181,0,"",python,selection_keyboard +9237,24506638,"/fast/home/franz.srambical/jafar/utils/nn.py",9181,0," ",python,content +9238,24506638,"/fast/home/franz.srambical/jafar/utils/nn.py",9182,0,"",python,selection_keyboard +9239,24507517,"/fast/home/franz.srambical/jafar/utils/nn.py",9182,0,"c",python,content +9240,24507517,"/fast/home/franz.srambical/jafar/utils/nn.py",9183,0,"",python,selection_keyboard +9241,24507828,"/fast/home/franz.srambical/jafar/utils/nn.py",9182,1,"",python,content +9242,24508227,"/fast/home/franz.srambical/jafar/utils/nn.py",9182,0,"t",python,content +9243,24508227,"/fast/home/franz.srambical/jafar/utils/nn.py",9183,0,"",python,selection_keyboard +9244,24508257,"/fast/home/franz.srambical/jafar/utils/nn.py",9183,0,"h",python,content +9245,24508257,"/fast/home/franz.srambical/jafar/utils/nn.py",9184,0,"",python,selection_keyboard +9246,24508321,"/fast/home/franz.srambical/jafar/utils/nn.py",9184,0,"i",python,content +9247,24508322,"/fast/home/franz.srambical/jafar/utils/nn.py",9185,0,"",python,selection_keyboard +9248,24508462,"/fast/home/franz.srambical/jafar/utils/nn.py",9185,0,"n",python,content +9249,24508463,"/fast/home/franz.srambical/jafar/utils/nn.py",9186,0,"",python,selection_keyboard +9250,24508538,"/fast/home/franz.srambical/jafar/utils/nn.py",9186,0,"k",python,content +9251,24508538,"/fast/home/franz.srambical/jafar/utils/nn.py",9187,0,"",python,selection_keyboard +9252,24508707,"/fast/home/franz.srambical/jafar/utils/nn.py",9187,0," ",python,content +9253,24508707,"/fast/home/franz.srambical/jafar/utils/nn.py",9188,0,"",python,selection_keyboard +9254,24508878,"/fast/home/franz.srambical/jafar/utils/nn.py",9188,0,"a",python,content +9255,24508878,"/fast/home/franz.srambical/jafar/utils/nn.py",9189,0,"",python,selection_keyboard +9256,24508940,"/fast/home/franz.srambical/jafar/utils/nn.py",9189,0,"b",python,content +9257,24508941,"/fast/home/franz.srambical/jafar/utils/nn.py",9190,0,"",python,selection_keyboard +9258,24509041,"/fast/home/franz.srambical/jafar/utils/nn.py",9190,0,"o",python,content +9259,24509041,"/fast/home/franz.srambical/jafar/utils/nn.py",9191,0,"",python,selection_keyboard +9260,24509106,"/fast/home/franz.srambical/jafar/utils/nn.py",9191,0,"u",python,content +9261,24509106,"/fast/home/franz.srambical/jafar/utils/nn.py",9192,0,"",python,selection_keyboard +9262,24509156,"/fast/home/franz.srambical/jafar/utils/nn.py",9192,0,"t",python,content +9263,24509157,"/fast/home/franz.srambical/jafar/utils/nn.py",9193,0,"",python,selection_keyboard +9264,24509208,"/fast/home/franz.srambical/jafar/utils/nn.py",9193,0," ",python,content +9265,24509209,"/fast/home/franz.srambical/jafar/utils/nn.py",9194,0,"",python,selection_keyboard +9266,24509326,"/fast/home/franz.srambical/jafar/utils/nn.py",9194,0,"w",python,content +9267,24509326,"/fast/home/franz.srambical/jafar/utils/nn.py",9195,0,"",python,selection_keyboard +9268,24509409,"/fast/home/franz.srambical/jafar/utils/nn.py",9195,0,"h",python,content +9269,24509409,"/fast/home/franz.srambical/jafar/utils/nn.py",9196,0,"",python,selection_keyboard +9270,24509491,"/fast/home/franz.srambical/jafar/utils/nn.py",9196,0,"e",python,content +9271,24509491,"/fast/home/franz.srambical/jafar/utils/nn.py",9197,0,"",python,selection_keyboard +9272,24509555,"/fast/home/franz.srambical/jafar/utils/nn.py",9197,0,"t",python,content +9273,24509556,"/fast/home/franz.srambical/jafar/utils/nn.py",9198,0,"",python,selection_keyboard +9274,24509712,"/fast/home/franz.srambical/jafar/utils/nn.py",9198,0,"h",python,content +9275,24509712,"/fast/home/franz.srambical/jafar/utils/nn.py",9199,0,"",python,selection_keyboard +9276,24509740,"/fast/home/franz.srambical/jafar/utils/nn.py",9199,0,"e",python,content +9277,24509740,"/fast/home/franz.srambical/jafar/utils/nn.py",9200,0,"",python,selection_keyboard +9278,24509807,"/fast/home/franz.srambical/jafar/utils/nn.py",9200,0,"r",python,content +9279,24509808,"/fast/home/franz.srambical/jafar/utils/nn.py",9201,0,"",python,selection_keyboard +9280,24509895,"/fast/home/franz.srambical/jafar/utils/nn.py",9201,0," ",python,content +9281,24509895,"/fast/home/franz.srambical/jafar/utils/nn.py",9202,0,"",python,selection_keyboard +9282,24510035,"/fast/home/franz.srambical/jafar/utils/nn.py",9202,0,"t",python,content +9283,24510036,"/fast/home/franz.srambical/jafar/utils/nn.py",9203,0,"",python,selection_keyboard +9284,24510143,"/fast/home/franz.srambical/jafar/utils/nn.py",9203,0,"h",python,content +9285,24510144,"/fast/home/franz.srambical/jafar/utils/nn.py",9204,0,"",python,selection_keyboard +9286,24510180,"/fast/home/franz.srambical/jafar/utils/nn.py",9204,0,"i",python,content +9287,24510180,"/fast/home/franz.srambical/jafar/utils/nn.py",9205,0,"",python,selection_keyboard +9288,24510221,"/fast/home/franz.srambical/jafar/utils/nn.py",9205,0,"s",python,content +9289,24510222,"/fast/home/franz.srambical/jafar/utils/nn.py",9206,0,"",python,selection_keyboard +9290,24510283,"/fast/home/franz.srambical/jafar/utils/nn.py",9206,0," ",python,content +9291,24510284,"/fast/home/franz.srambical/jafar/utils/nn.py",9207,0,"",python,selection_keyboard +9292,24510450,"/fast/home/franz.srambical/jafar/utils/nn.py",9207,0,"m",python,content +9293,24510450,"/fast/home/franz.srambical/jafar/utils/nn.py",9208,0,"",python,selection_keyboard +9294,24510543,"/fast/home/franz.srambical/jafar/utils/nn.py",9208,0,"a",python,content +9295,24510543,"/fast/home/franz.srambical/jafar/utils/nn.py",9209,0,"",python,selection_keyboard +9296,24510644,"/fast/home/franz.srambical/jafar/utils/nn.py",9209,0,"k",python,content +9297,24510644,"/fast/home/franz.srambical/jafar/utils/nn.py",9210,0,"",python,selection_keyboard +9298,24510709,"/fast/home/franz.srambical/jafar/utils/nn.py",9210,0,"e",python,content +9299,24510710,"/fast/home/franz.srambical/jafar/utils/nn.py",9211,0,"",python,selection_keyboard +9300,24510858,"/fast/home/franz.srambical/jafar/utils/nn.py",9211,0," ",python,content +9301,24510859,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,0,"",python,selection_keyboard +9302,24510964,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,0,"s",python,content +9303,24510964,"/fast/home/franz.srambical/jafar/utils/nn.py",9213,0,"",python,selection_keyboard +9304,24511157,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,1,"",python,content +9305,24511271,"/fast/home/franz.srambical/jafar/utils/nn.py",9211,1,"",python,content +9306,24511295,"/fast/home/franz.srambical/jafar/utils/nn.py",9211,0,"s",python,content +9307,24511295,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,0,"",python,selection_keyboard +9308,24511359,"/fast/home/franz.srambical/jafar/utils/nn.py",9212,0," ",python,content +9309,24511359,"/fast/home/franz.srambical/jafar/utils/nn.py",9213,0,"",python,selection_keyboard +9310,24511478,"/fast/home/franz.srambical/jafar/utils/nn.py",9213,0,"s",python,content +9311,24511479,"/fast/home/franz.srambical/jafar/utils/nn.py",9214,0,"",python,selection_keyboard +9312,24511574,"/fast/home/franz.srambical/jafar/utils/nn.py",9214,0,"e",python,content +9313,24511575,"/fast/home/franz.srambical/jafar/utils/nn.py",9215,0,"",python,selection_keyboard +9314,24511764,"/fast/home/franz.srambical/jafar/utils/nn.py",9215,0,"n",python,content +9315,24511764,"/fast/home/franz.srambical/jafar/utils/nn.py",9216,0,"",python,selection_keyboard +9316,24511810,"/fast/home/franz.srambical/jafar/utils/nn.py",9216,0,"s",python,content +9317,24511811,"/fast/home/franz.srambical/jafar/utils/nn.py",9217,0,"",python,selection_keyboard +9318,24512098,"/fast/home/franz.srambical/jafar/utils/nn.py",9217,0,"e",python,content +9319,24512098,"/fast/home/franz.srambical/jafar/utils/nn.py",9218,0,"",python,selection_keyboard +9320,24512339,"/fast/home/franz.srambical/jafar/utils/nn.py",9217,0,"",python,selection_command +9321,24512410,"/fast/home/franz.srambical/jafar/utils/nn.py",9165,0,"",python,selection_command +9322,24875659,"/fast/home/franz.srambical/jafar/utils/nn.py",9116,0,"",python,selection_command +9323,24875757,"/fast/home/franz.srambical/jafar/utils/nn.py",9082,0,"",python,selection_command +9324,24875922,"/fast/home/franz.srambical/jafar/utils/nn.py",9115,0,"\n ",python,content +9325,24875987,"/fast/home/franz.srambical/jafar/utils/nn.py",9116,8,"",python,content +9326,24877095,"/fast/home/franz.srambical/jafar/utils/nn.py",9117,0,"",python,selection_command +9327,24877226,"/fast/home/franz.srambical/jafar/utils/nn.py",9166,0,"",python,selection_command +9328,24989819,"/fast/home/franz.srambical/jafar/utils/nn.py",9117,0,"",python,selection_command +9329,24990063,"/fast/home/franz.srambical/jafar/utils/nn.py",9116,0,"",python,selection_command +9330,24990086,"/fast/home/franz.srambical/jafar/utils/nn.py",9082,0,"",python,selection_command +9331,24990205,"/fast/home/franz.srambical/jafar/utils/nn.py",9031,0,"",python,selection_command +9332,24990363,"/fast/home/franz.srambical/jafar/utils/nn.py",8981,0,"",python,selection_command +9333,24990731,"/fast/home/franz.srambical/jafar/utils/nn.py",9031,0,"",python,selection_command +9334,24990830,"/fast/home/franz.srambical/jafar/utils/nn.py",9039,0,"",python,selection_command +9335,24991001,"/fast/home/franz.srambical/jafar/utils/nn.py",9041,0,"",python,selection_command +9336,24991124,"/fast/home/franz.srambical/jafar/utils/nn.py",9047,0,"",python,selection_command +9337,24991279,"/fast/home/franz.srambical/jafar/utils/nn.py",9051,0,"",python,selection_command +9338,24991430,"/fast/home/franz.srambical/jafar/utils/nn.py",9053,0,"",python,selection_command +9339,24991581,"/fast/home/franz.srambical/jafar/utils/nn.py",9056,0,"",python,selection_command +9340,24991778,"/fast/home/franz.srambical/jafar/utils/nn.py",9057,0,"",python,selection_command +9341,24991861,"/fast/home/franz.srambical/jafar/utils/nn.py",9063,0,"",python,selection_command +9342,24992012,"/fast/home/franz.srambical/jafar/utils/nn.py",9064,0,"",python,selection_command +9343,24992706,"/fast/home/franz.srambical/jafar/utils/nn.py",6944,0,"",python,selection_command +9344,24993082,"/fast/home/franz.srambical/jafar/utils/nn.py",5050,0,"",python,selection_command +9345,24993351,"/fast/home/franz.srambical/jafar/utils/nn.py",3498,0,"",python,selection_command +9346,25017566,"/fast/home/franz.srambical/jafar/utils/nn.py",5050,0,"",python,selection_command +9347,25017991,"/fast/home/franz.srambical/jafar/utils/nn.py",6944,0,"",python,selection_command +9348,25019421,"/fast/home/franz.srambical/jafar/utils/nn.py",9039,0,"",python,selection_command +9349,25035045,"/fast/home/franz.srambical/jafar/utils/nn.py",6944,0,"",python,selection_command +9350,25035195,"/fast/home/franz.srambical/jafar/utils/nn.py",5050,0,"",python,selection_command +9351,25035673,"/fast/home/franz.srambical/jafar/utils/nn.py",3498,0,"",python,selection_command +9352,25036943,"/fast/home/franz.srambical/jafar/utils/nn.py",3533,0,"",python,selection_command +9353,25037187,"/fast/home/franz.srambical/jafar/utils/nn.py",3575,0,"",python,selection_command +9354,25037210,"/fast/home/franz.srambical/jafar/utils/nn.py",3605,0,"",python,selection_command +9355,25037241,"/fast/home/franz.srambical/jafar/utils/nn.py",3624,0,"",python,selection_command +9356,25037273,"/fast/home/franz.srambical/jafar/utils/nn.py",3626,0,"",python,selection_command +9357,25037307,"/fast/home/franz.srambical/jafar/utils/nn.py",3639,0,"",python,selection_command +9358,25037341,"/fast/home/franz.srambical/jafar/utils/nn.py",3654,0,"",python,selection_command +9359,25037374,"/fast/home/franz.srambical/jafar/utils/nn.py",3705,0,"",python,selection_command +9360,25037407,"/fast/home/franz.srambical/jafar/utils/nn.py",3741,0,"",python,selection_command +9361,25037441,"/fast/home/franz.srambical/jafar/utils/nn.py",3777,0,"",python,selection_command +9362,25037474,"/fast/home/franz.srambical/jafar/utils/nn.py",3810,0,"",python,selection_command +9363,25037506,"/fast/home/franz.srambical/jafar/utils/nn.py",3845,0,"",python,selection_command +9364,25037540,"/fast/home/franz.srambical/jafar/utils/nn.py",3883,0,"",python,selection_command +9365,25037710,"/fast/home/franz.srambical/jafar/utils/nn.py",3889,0,"",python,selection_command +9366,25037851,"/fast/home/franz.srambical/jafar/utils/nn.py",3902,0,"",python,selection_command +9367,25038016,"/fast/home/franz.srambical/jafar/utils/nn.py",3939,0,"",python,selection_command +9368,25038191,"/fast/home/franz.srambical/jafar/utils/nn.py",3902,0,"",python,selection_command +9369,25038248,"/fast/home/franz.srambical/jafar/utils/nn.py",3889,0,"",python,selection_command +9370,25038417,"/fast/home/franz.srambical/jafar/utils/nn.py",3883,0,"",python,selection_command +9371,25038538,"/fast/home/franz.srambical/jafar/utils/nn.py",3845,0,"",python,selection_command +9372,25038715,"/fast/home/franz.srambical/jafar/utils/nn.py",3810,0,"",python,selection_command +9373,25038974,"/fast/home/franz.srambical/jafar/utils/nn.py",3845,0,"",python,selection_command +9374,25039242,"/fast/home/franz.srambical/jafar/utils/nn.py",3832,0,"\n ",python,content +9375,25039345,"/fast/home/franz.srambical/jafar/utils/nn.py",3841,0,"b",python,content +9376,25039345,"/fast/home/franz.srambical/jafar/utils/nn.py",3842,0,"",python,selection_keyboard +9377,25039480,"/fast/home/franz.srambical/jafar/utils/nn.py",3842,0,"r",python,content +9378,25039481,"/fast/home/franz.srambical/jafar/utils/nn.py",3843,0,"",python,selection_keyboard +9379,25039500,"/fast/home/franz.srambical/jafar/utils/nn.py",3843,0,"e",python,content +9380,25039500,"/fast/home/franz.srambical/jafar/utils/nn.py",3844,0,"",python,selection_keyboard +9381,25039560,"/fast/home/franz.srambical/jafar/utils/nn.py",3844,0,"a",python,content +9382,25039561,"/fast/home/franz.srambical/jafar/utils/nn.py",3845,0,"",python,selection_keyboard +9383,25039565,"/fast/home/franz.srambical/jafar/utils/nn.py",3845,0,"k",python,content +9384,25039565,"/fast/home/franz.srambical/jafar/utils/nn.py",3846,0,"",python,selection_keyboard +9385,25039765,"/fast/home/franz.srambical/jafar/utils/nn.py",3846,0,"p",python,content +9386,25039765,"/fast/home/franz.srambical/jafar/utils/nn.py",3847,0,"",python,selection_keyboard +9387,25039766,"/fast/home/franz.srambical/jafar/utils/nn.py",3847,0,"o",python,content +9388,25039766,"/fast/home/franz.srambical/jafar/utils/nn.py",3848,0,"",python,selection_keyboard +9389,25039844,"/fast/home/franz.srambical/jafar/utils/nn.py",3848,0,"i",python,content +9390,25039845,"/fast/home/franz.srambical/jafar/utils/nn.py",3849,0,"",python,selection_keyboard +9391,25039862,"/fast/home/franz.srambical/jafar/utils/nn.py",3849,0,"n",python,content +9392,25039862,"/fast/home/franz.srambical/jafar/utils/nn.py",3850,0,"",python,selection_keyboard +9393,25039929,"/fast/home/franz.srambical/jafar/utils/nn.py",3850,0,"t",python,content +9394,25039929,"/fast/home/franz.srambical/jafar/utils/nn.py",3851,0,"",python,selection_keyboard +9395,25040199,"/fast/home/franz.srambical/jafar/utils/nn.py",3851,0,"()",python,content +9396,25040199,"/fast/home/franz.srambical/jafar/utils/nn.py",3852,0,"",python,selection_keyboard +9397,25040200,"/fast/home/franz.srambical/jafar/utils/nn.py",3852,1,")",python,content +9398,25040200,"/fast/home/franz.srambical/jafar/utils/nn.py",3853,0,"",python,selection_keyboard +9399,25040417,"/fast/home/franz.srambical/jafar/utils/nn.py",3852,0,"",python,selection_command +9400,25043031,"TERMINAL",0,0,"^L^P",,terminal_output +9401,25043743,"TERMINAL",0,0,"q",,terminal_output +9402,25043831,"TERMINAL",0,0,"u",,terminal_output +9403,25043960,"TERMINAL",0,0,"i",,terminal_output +9404,25044201,"TERMINAL",0,0,"         ",,terminal_output +9405,25044664,"TERMINAL",0,0,"q",,terminal_output +9406,25044817,"TERMINAL",0,0,"uit",,terminal_output +9407,25045103,"TERMINAL",0,0,"()",,terminal_output +9408,25045241,"TERMINAL",0,0,"\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 193, in \r\n action_batch = jasmine.vq_encode(batch, training=False)\r\n File ""/fast/home/franz.srambical/jafar/jasmine.py"", line 391, in vq_encode\r\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\r\n File ""/fast/home/franz.srambical/jafar/models/lam.py"", line 133, in vq_encode\r\n z = self.encoder(padded_patches) # (B, T, N, E)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 230, in __call__\r\n # x.shape (1, 1, 921, 512)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py"", line 73, in resolve_kwargs_wrapper\r\n return f(*args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py"", line 2051, in update_context_manager_wrapper\r\n return f(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 153, in split_inputs_wrapper\r\n pure_args_out, pure_out = f(*pure_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py"", line 198, in merge_inputs_wrapper\r\n out = f(*args)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 125, in __call__\r\n breakpoint()\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/nn/attention.py"", line 605, in __call__\r\n x = self.attention_fn(\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 321, in attention_fn\r\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\r\n File ""/fast/home/franz.srambical/jafar/utils/nn.py"", line 321, in attention_fn\r\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 90, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/bdb.py"", line 115, in dispatch_line\r\n if self.quitting: raise BdbQuit\r\nbdb.BdbQuit\r\n",,terminal_output +9409,25045430,"TERMINAL",0,0,"^L",,terminal_output +9410,25045592,"TERMINAL",0,0,"^P",,terminal_output +9411,25045761,"TERMINAL",0,0,"(Pdb) ",,terminal_output +9412,25046057,"TERMINAL",0,0,"srun: error: hai003: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ [franz.srambical@hai003.haicore.berlin:~/jafar] $ bash experiments/sample.sh ",,terminal_output +9413,25046436,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +9414,25058117,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1256: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +9415,25067610,"TERMINAL",0,0,"> /fast/home/franz.srambical/jafar/utils/nn.py(126)__call__()\r\n-> z = self.spatial_attention(z)\r\n",,terminal_output +9416,25071439,"TERMINAL",0,0,"z",,terminal_output +9417,25071567,"TERMINAL",0,0,".",,terminal_output +9418,25071745,"TERMINAL",0,0,"s",,terminal_output +9419,25071934,"TERMINAL",0,0,"hap",,terminal_output +9420,25072057,"TERMINAL",0,0,"e",,terminal_output +9421,25072148,"TERMINAL",0,0,"\r\n(Pdb) (1, 2, 61, 512)\r\n",,terminal_output +9422,25078285,"TERMINAL",0,0,"u",,terminal_output +9423,25078797,"TERMINAL",0,0,"p",,terminal_output +9424,25079085,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py(198)merge_inputs_wrapper()\r\n-> out = f(*args)\r\n",,terminal_output +9425,25080817,"TERMINAL",0,0,"up",,terminal_output +9426,25081183,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/linear_util.py(396)_get_result_paths_thunk()\r\n-> ans = _fun(*args, **kwargs)\r\n",,terminal_output +9427,25083341,"TERMINAL",0,0,"u",,terminal_output +9428,25083402,"TERMINAL",0,0,"p",,terminal_output +9429,25085289,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/api_util.py(73)flatten_fun()\r\n-> ans = f(*py_args, **py_kwargs)\r\n",,terminal_output +9430,25086293,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/linear_util.py(211)call_wrapped()\r\n-> return self.f_transformed(*args, **kwargs)\r\n",,terminal_output +9431,25086951,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/partial_eval.py(2292)trace_to_jaxpr_dynamic()\r\n-> ans = fun.call_wrapped(*in_tracers)\r\n",,terminal_output +9432,25088064,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py(354)wrapper()\r\n-> return func(*args, **kwargs)\r\n",,terminal_output +9433,25088712,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/ad_checkpoint.py(425)_trace_to_jaxpr()\r\n-> jaxpr, _, consts, () = pe.trace_to_jaxpr_dynamic(flat_fun, in_avals)\r\n",,terminal_output +9434,25089716,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/ad_checkpoint.py(332)fun_remat()\r\n-> jaxpr, consts, out_tree = _trace_to_jaxpr(fun_, in_tree, tuple(in_avals), debug)\r\n",,terminal_output +9435,25090423,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py(182)reraise_with_filtered_traceback()\r\n-> return fun(*args, **kwargs)\r\n",,terminal_output +9436,25090957,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/general.py(153)split_inputs_wrapper()\r\n-> pure_args_out, pure_out = f(*pure_args)\r\n",,terminal_output +9437,25091422,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2051)update_context_manager_wrapper()\r\n-> return f(*args, **kwargs)\r\n",,terminal_output +9438,25092718,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/graph.py(2051)update_context_manager_wrapper()\r\n-> return f(*args, **kwargs)\r\n",,terminal_output +9439,25093350,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/nnx/transforms/transforms.py(73)resolve_kwargs_wrapper()\r\n-> return f(*args)\r\n",,terminal_output +9440,25093874,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/utils/nn.py(231)__call__()\r\n-> x = block(x)\r\n",,terminal_output +9441,25103306,"TERMINAL",0,0,"\r\n(Pdb) > /fast/home/franz.srambical/jafar/models/lam.py(133)vq_encode()\r\n-> z = self.encoder(padded_patches) # (B, T, N, E)\r\n",,terminal_output +9442,25127664,"/fast/home/franz.srambical/jafar/utils/nn.py",1946,0,"",python,selection_command +9443,25129194,"/fast/home/franz.srambical/jafar/utils/nn.py",1912,0,"",python,selection_command +9444,25129447,"/fast/home/franz.srambical/jafar/utils/nn.py",1874,0,"",python,selection_command +9445,25129467,"/fast/home/franz.srambical/jafar/utils/nn.py",1817,0,"",python,selection_command +9446,25129496,"/fast/home/franz.srambical/jafar/utils/nn.py",1803,0,"",python,selection_command +9447,25129528,"/fast/home/franz.srambical/jafar/utils/nn.py",1784,0,"",python,selection_command +9448,25129561,"/fast/home/franz.srambical/jafar/utils/nn.py",1754,0,"",python,selection_command +9449,25130007,"/fast/home/franz.srambical/jafar/utils/nn.py",1784,0,"",python,selection_command +9450,25130258,"/fast/home/franz.srambical/jafar/utils/nn.py",1803,0,"",python,selection_command +9451,25130287,"/fast/home/franz.srambical/jafar/utils/nn.py",1817,0,"",python,selection_command +9452,25130312,"/fast/home/franz.srambical/jafar/utils/nn.py",1874,0,"",python,selection_command +9453,25130346,"/fast/home/franz.srambical/jafar/utils/nn.py",1912,0,"",python,selection_command +9454,25130379,"/fast/home/franz.srambical/jafar/utils/nn.py",1946,0,"",python,selection_command +9455,25130414,"/fast/home/franz.srambical/jafar/utils/nn.py",1981,0,"",python,selection_command +9456,25130460,"/fast/home/franz.srambical/jafar/utils/nn.py",2020,0,"",python,selection_command +9457,25130499,"/fast/home/franz.srambical/jafar/utils/nn.py",2062,0,"",python,selection_command +9458,25130535,"/fast/home/franz.srambical/jafar/utils/nn.py",2092,0,"",python,selection_command +9459,25130553,"/fast/home/franz.srambical/jafar/utils/nn.py",2145,0,"",python,selection_command +9460,25130584,"/fast/home/franz.srambical/jafar/utils/nn.py",2187,0,"",python,selection_command +9461,25130619,"/fast/home/franz.srambical/jafar/utils/nn.py",2234,0,"",python,selection_command +9462,25130652,"/fast/home/franz.srambical/jafar/utils/nn.py",2249,0,"",python,selection_command +9463,25130684,"/fast/home/franz.srambical/jafar/utils/nn.py",2272,0,"",python,selection_command +9464,25131943,"/fast/home/franz.srambical/jafar/utils/nn.py",2278,0,"",python,selection_command +9465,25132096,"/fast/home/franz.srambical/jafar/utils/nn.py",2279,0,"",python,selection_command +9466,25132394,"/fast/home/franz.srambical/jafar/utils/nn.py",2283,0,"",python,selection_command +9467,25132590,"/fast/home/franz.srambical/jafar/utils/nn.py",2284,0,"",python,selection_command +9468,25133599,"/fast/home/franz.srambical/jafar/utils/nn.py",1545,0,"",python,selection_command +9469,25134036,"/fast/home/franz.srambical/jafar/utils/nn.py",1544,0,"",python,selection_command +9470,25134284,"/fast/home/franz.srambical/jafar/utils/nn.py",1540,0,"",python,selection_command +9471,25134306,"/fast/home/franz.srambical/jafar/utils/nn.py",1517,0,"",python,selection_command +9472,25134341,"/fast/home/franz.srambical/jafar/utils/nn.py",1515,0,"",python,selection_command +9473,25134365,"/fast/home/franz.srambical/jafar/utils/nn.py",1500,0,"",python,selection_command +9474,25134398,"/fast/home/franz.srambical/jafar/utils/nn.py",1499,0,"",python,selection_command +9475,25134432,"/fast/home/franz.srambical/jafar/utils/nn.py",1495,0,"",python,selection_command +9476,25134465,"/fast/home/franz.srambical/jafar/utils/nn.py",1467,0,"",python,selection_command +9477,25134498,"/fast/home/franz.srambical/jafar/utils/nn.py",1465,0,"",python,selection_command +9478,25134530,"/fast/home/franz.srambical/jafar/utils/nn.py",1445,0,"",python,selection_command +9479,25134603,"/fast/home/franz.srambical/jafar/utils/nn.py",1444,0,"",python,selection_command +9480,25134668,"/fast/home/franz.srambical/jafar/utils/nn.py",1440,0,"",python,selection_command +9481,25134668,"/fast/home/franz.srambical/jafar/utils/nn.py",1426,0,"",python,selection_command +9482,25134669,"/fast/home/franz.srambical/jafar/utils/nn.py",1424,0,"",python,selection_command +9483,25134706,"/fast/home/franz.srambical/jafar/utils/nn.py",1418,0,"",python,selection_command +9484,25134735,"/fast/home/franz.srambical/jafar/utils/nn.py",1417,0,"",python,selection_command +9485,25134928,"/fast/home/franz.srambical/jafar/utils/nn.py",1378,0,"",python,selection_command +9486,25135184,"/fast/home/franz.srambical/jafar/utils/nn.py",1347,0,"",python,selection_command +9487,25135209,"/fast/home/franz.srambical/jafar/utils/nn.py",1312,0,"",python,selection_command +9488,25135238,"/fast/home/franz.srambical/jafar/utils/nn.py",1281,0,"",python,selection_command +9489,25135273,"/fast/home/franz.srambical/jafar/utils/nn.py",1258,0,"",python,selection_command +9490,25135304,"/fast/home/franz.srambical/jafar/utils/nn.py",1244,0,"",python,selection_command +9491,25135342,"/fast/home/franz.srambical/jafar/utils/nn.py",1227,0,"",python,selection_command +9492,25135371,"/fast/home/franz.srambical/jafar/utils/nn.py",1205,0,"",python,selection_command +9493,25135404,"/fast/home/franz.srambical/jafar/utils/nn.py",1175,0,"",python,selection_command +9494,25135439,"/fast/home/franz.srambical/jafar/utils/nn.py",1140,0,"",python,selection_command +9495,25135470,"/fast/home/franz.srambical/jafar/utils/nn.py",1114,0,"",python,selection_command +9496,25135503,"/fast/home/franz.srambical/jafar/utils/nn.py",1082,0,"",python,selection_command +9497,25135535,"/fast/home/franz.srambical/jafar/utils/nn.py",1058,0,"",python,selection_command +9498,25135572,"/fast/home/franz.srambical/jafar/utils/nn.py",1034,0,"",python,selection_command +9499,25135604,"/fast/home/franz.srambical/jafar/utils/nn.py",1012,0,"",python,selection_command +9500,25135638,"/fast/home/franz.srambical/jafar/utils/nn.py",994,0,"",python,selection_command +9501,25135671,"/fast/home/franz.srambical/jafar/utils/nn.py",980,0,"",python,selection_command +9502,25135704,"/fast/home/franz.srambical/jafar/utils/nn.py",962,0,"",python,selection_command +9503,25135883,"/fast/home/franz.srambical/jafar/utils/nn.py",935,0,"",python,selection_command +9504,25139776,"/fast/home/franz.srambical/jafar/utils/nn.py",11993,0,"",python,selection_command +9505,25140133,"/fast/home/franz.srambical/jafar/utils/nn.py",11969,0,"",python,selection_command +9506,25140385,"/fast/home/franz.srambical/jafar/utils/nn.py",11968,0,"",python,selection_command +9507,25140617,"/fast/home/franz.srambical/jafar/utils/nn.py",11459,0,"",python,selection_command +9508,25140968,"/fast/home/franz.srambical/jafar/utils/nn.py",9912,0,"",python,selection_command +9509,25141791,"/fast/home/franz.srambical/jafar/utils/nn.py",9945,0,"",python,selection_command +9510,25142041,"/fast/home/franz.srambical/jafar/utils/nn.py",9954,0,"",python,selection_command +9511,25142064,"/fast/home/franz.srambical/jafar/utils/nn.py",10039,0,"",python,selection_command +9512,25142094,"/fast/home/franz.srambical/jafar/utils/nn.py",10115,0,"",python,selection_command +9513,25142127,"/fast/home/franz.srambical/jafar/utils/nn.py",10183,0,"",python,selection_command +9514,25142163,"/fast/home/franz.srambical/jafar/utils/nn.py",10192,0,"",python,selection_command +9515,25142196,"/fast/home/franz.srambical/jafar/utils/nn.py",10213,0,"",python,selection_command +9516,25142231,"/fast/home/franz.srambical/jafar/utils/nn.py",10306,0,"",python,selection_command +9517,25142263,"/fast/home/franz.srambical/jafar/utils/nn.py",10335,0,"",python,selection_command +9518,25142298,"/fast/home/franz.srambical/jafar/utils/nn.py",10378,0,"",python,selection_command +9519,25143877,"/fast/home/franz.srambical/jafar/utils/nn.py",10335,0,"",python,selection_command +9520,25144208,"/fast/home/franz.srambical/jafar/utils/nn.py",7998,0,"",python,selection_command +9521,25144827,"/fast/home/franz.srambical/jafar/utils/nn.py",10339,0,"",python,selection_command +9522,25150281,"/fast/home/franz.srambical/jafar/utils/nn.py",11993,0,"",python,selection_command +9523,25151499,"/fast/home/franz.srambical/jafar/utils/nn.py",9397,0,"",python,selection_command +9524,25152729,"/fast/home/franz.srambical/jafar/utils/nn.py",8351,0,"",python,selection_keyboard +9525,25153175,"/fast/home/franz.srambical/jafar/utils/nn.py",8352,0,"",python,selection_command +9526,25153433,"/fast/home/franz.srambical/jafar/utils/nn.py",8353,0,"",python,selection_command +9527,25153453,"/fast/home/franz.srambical/jafar/utils/nn.py",8441,0,"",python,selection_command +9528,25153482,"/fast/home/franz.srambical/jafar/utils/nn.py",8449,0,"",python,selection_command +9529,25153514,"/fast/home/franz.srambical/jafar/utils/nn.py",8520,0,"",python,selection_command +9530,25153547,"/fast/home/franz.srambical/jafar/utils/nn.py",8521,0,"",python,selection_command +9531,25153580,"/fast/home/franz.srambical/jafar/utils/nn.py",8617,0,"",python,selection_command +9532,25153617,"/fast/home/franz.srambical/jafar/utils/nn.py",8696,0,"",python,selection_command +9533,25153648,"/fast/home/franz.srambical/jafar/utils/nn.py",8697,0,"",python,selection_command +9534,25153682,"/fast/home/franz.srambical/jafar/utils/nn.py",8782,0,"",python,selection_command +9535,25153720,"/fast/home/franz.srambical/jafar/utils/nn.py",8880,0,"",python,selection_command +9536,25153754,"/fast/home/franz.srambical/jafar/utils/nn.py",8920,0,"",python,selection_command +9537,25153787,"/fast/home/franz.srambical/jafar/utils/nn.py",8928,0,"",python,selection_command +9538,25153820,"/fast/home/franz.srambical/jafar/utils/nn.py",8929,0,"",python,selection_command +9539,25153853,"/fast/home/franz.srambical/jafar/utils/nn.py",9002,0,"",python,selection_command +9540,25153886,"/fast/home/franz.srambical/jafar/utils/nn.py",9052,0,"",python,selection_command +9541,25153919,"/fast/home/franz.srambical/jafar/utils/nn.py",9103,0,"",python,selection_command +9542,25153952,"/fast/home/franz.srambical/jafar/utils/nn.py",9137,0,"",python,selection_command +9543,25153987,"/fast/home/franz.srambical/jafar/utils/nn.py",9138,0,"",python,selection_command +9544,25154019,"/fast/home/franz.srambical/jafar/utils/nn.py",9187,0,"",python,selection_command +9545,25155097,"/fast/home/franz.srambical/jafar/utils/nn.py",9240,0,"\n # FIXME: think about whether this makes sense",python,content +9546,25155109,"/fast/home/franz.srambical/jafar/utils/nn.py",9249,0,"",python,selection_command +9547,25155238,"/fast/home/franz.srambical/jafar/utils/nn.py",9251,0,"",python,selection_command +9548,25155405,"/fast/home/franz.srambical/jafar/utils/nn.py",9256,0,"",python,selection_command +9549,25155522,"/fast/home/franz.srambical/jafar/utils/nn.py",9258,0,"",python,selection_command +9550,25155690,"/fast/home/franz.srambical/jafar/utils/nn.py",9264,0,"",python,selection_command +9551,25156195,"/fast/home/franz.srambical/jafar/utils/nn.py",9258,0,"",python,selection_command +9552,25156325,"/fast/home/franz.srambical/jafar/utils/nn.py",9258,36,"",python,content +9553,25156660,"/fast/home/franz.srambical/jafar/utils/nn.py",9258,0,"i",python,content +9554,25156660,"/fast/home/franz.srambical/jafar/utils/nn.py",9259,0,"",python,selection_keyboard +9555,25156701,"/fast/home/franz.srambical/jafar/utils/nn.py",9259,0,"s",python,content +9556,25156701,"/fast/home/franz.srambical/jafar/utils/nn.py",9260,0,"",python,selection_keyboard +9557,25156792,"/fast/home/franz.srambical/jafar/utils/nn.py",9260,0," ",python,content +9558,25156793,"/fast/home/franz.srambical/jafar/utils/nn.py",9261,0,"",python,selection_keyboard +9559,25156877,"/fast/home/franz.srambical/jafar/utils/nn.py",9261,0,"t",python,content +9560,25156877,"/fast/home/franz.srambical/jafar/utils/nn.py",9262,0,"",python,selection_keyboard +9561,25156976,"/fast/home/franz.srambical/jafar/utils/nn.py",9262,0,"h",python,content +9562,25156977,"/fast/home/franz.srambical/jafar/utils/nn.py",9263,0,"",python,selection_keyboard +9563,25157040,"/fast/home/franz.srambical/jafar/utils/nn.py",9263,0,"i",python,content +9564,25157040,"/fast/home/franz.srambical/jafar/utils/nn.py",9264,0,"",python,selection_keyboard +9565,25157081,"/fast/home/franz.srambical/jafar/utils/nn.py",9264,0,"s",python,content +9566,25157081,"/fast/home/franz.srambical/jafar/utils/nn.py",9265,0,"",python,selection_keyboard +9567,25157132,"/fast/home/franz.srambical/jafar/utils/nn.py",9265,0," ",python,content +9568,25157133,"/fast/home/franz.srambical/jafar/utils/nn.py",9266,0,"",python,selection_keyboard +9569,25157523,"/fast/home/franz.srambical/jafar/utils/nn.py",9266,0,"d",python,content +9570,25157523,"/fast/home/franz.srambical/jafar/utils/nn.py",9267,0,"",python,selection_keyboard +9571,25157633,"/fast/home/franz.srambical/jafar/utils/nn.py",9267,0,"u",python,content +9572,25157633,"/fast/home/franz.srambical/jafar/utils/nn.py",9268,0,"",python,selection_keyboard +9573,25157748,"/fast/home/franz.srambical/jafar/utils/nn.py",9268,0,"r",python,content +9574,25157749,"/fast/home/franz.srambical/jafar/utils/nn.py",9269,0,"",python,selection_keyboard +9575,25158114,"/fast/home/franz.srambical/jafar/utils/nn.py",9269,0,"i",python,content +9576,25158115,"/fast/home/franz.srambical/jafar/utils/nn.py",9270,0,"",python,selection_keyboard +9577,25158193,"/fast/home/franz.srambical/jafar/utils/nn.py",9270,0,"n",python,content +9578,25158193,"/fast/home/franz.srambical/jafar/utils/nn.py",9271,0,"",python,selection_keyboard +9579,25158241,"/fast/home/franz.srambical/jafar/utils/nn.py",9271,0,"g",python,content +9580,25158242,"/fast/home/franz.srambical/jafar/utils/nn.py",9272,0,"",python,selection_keyboard +9581,25158332,"/fast/home/franz.srambical/jafar/utils/nn.py",9272,0," ",python,content +9582,25158333,"/fast/home/franz.srambical/jafar/utils/nn.py",9273,0,"",python,selection_keyboard +9583,25159269,"/fast/home/franz.srambical/jafar/utils/nn.py",9273,0,"l",python,content +9584,25159270,"/fast/home/franz.srambical/jafar/utils/nn.py",9274,0,"",python,selection_keyboard +9585,25159293,"/fast/home/franz.srambical/jafar/utils/nn.py",9274,0,"a",python,content +9586,25159293,"/fast/home/franz.srambical/jafar/utils/nn.py",9275,0,"",python,selection_keyboard +9587,25159395,"/fast/home/franz.srambical/jafar/utils/nn.py",9275,0,"m",python,content +9588,25159396,"/fast/home/franz.srambical/jafar/utils/nn.py",9276,0,"",python,selection_keyboard +9589,25159555,"/fast/home/franz.srambical/jafar/utils/nn.py",9276,0,"e",python,content +9590,25159556,"/fast/home/franz.srambical/jafar/utils/nn.py",9277,0,"",python,selection_keyboard +9591,25160133,"/fast/home/franz.srambical/jafar/utils/nn.py",9276,1,"",python,content +9592,25160268,"/fast/home/franz.srambical/jafar/utils/nn.py",9276,0," ",python,content +9593,25160268,"/fast/home/franz.srambical/jafar/utils/nn.py",9277,0,"",python,selection_keyboard +9594,25160296,"/fast/home/franz.srambical/jafar/utils/nn.py",9277,0,"e",python,content +9595,25160296,"/fast/home/franz.srambical/jafar/utils/nn.py",9278,0,"",python,selection_keyboard +9596,25160378,"/fast/home/franz.srambical/jafar/utils/nn.py",9278,0,"n",python,content +9597,25160378,"/fast/home/franz.srambical/jafar/utils/nn.py",9279,0,"",python,selection_keyboard +9598,25160460,"/fast/home/franz.srambical/jafar/utils/nn.py",9279,0,"c",python,content +9599,25160460,"/fast/home/franz.srambical/jafar/utils/nn.py",9280,0,"",python,selection_keyboard +9600,25160565,"/fast/home/franz.srambical/jafar/utils/nn.py",9280,0,"o",python,content +9601,25160565,"/fast/home/franz.srambical/jafar/utils/nn.py",9281,0,"",python,selection_keyboard +9602,25160629,"/fast/home/franz.srambical/jafar/utils/nn.py",9281,0,"d",python,content +9603,25160629,"/fast/home/franz.srambical/jafar/utils/nn.py",9282,0,"",python,selection_keyboard +9604,25160833,"/fast/home/franz.srambical/jafar/utils/nn.py",9282,0,"e",python,content +9605,25160834,"/fast/home/franz.srambical/jafar/utils/nn.py",9283,0,"",python,selection_keyboard +9606,25160899,"/fast/home/franz.srambical/jafar/utils/nn.py",9283,0,"r",python,content +9607,25160900,"/fast/home/franz.srambical/jafar/utils/nn.py",9284,0,"",python,selection_keyboard +9608,25160979,"/fast/home/franz.srambical/jafar/utils/nn.py",9284,0," ",python,content +9609,25160979,"/fast/home/franz.srambical/jafar/utils/nn.py",9285,0,"",python,selection_keyboard +9610,25161128,"/fast/home/franz.srambical/jafar/utils/nn.py",9285,0,"c",python,content +9611,25161129,"/fast/home/franz.srambical/jafar/utils/nn.py",9286,0,"",python,selection_keyboard +9612,25161193,"/fast/home/franz.srambical/jafar/utils/nn.py",9286,0,"a",python,content +9613,25161194,"/fast/home/franz.srambical/jafar/utils/nn.py",9287,0,"",python,selection_keyboard +9614,25161261,"/fast/home/franz.srambical/jafar/utils/nn.py",9287,0,"l",python,content +9615,25161261,"/fast/home/franz.srambical/jafar/utils/nn.py",9288,0,"",python,selection_keyboard +9616,25161395,"/fast/home/franz.srambical/jafar/utils/nn.py",9288,0,"l",python,content +9617,25161395,"/fast/home/franz.srambical/jafar/utils/nn.py",9289,0,"",python,selection_keyboard +9618,25161581,"/fast/home/franz.srambical/jafar/utils/nn.py",9289,0,"?",python,content +9619,25161582,"/fast/home/franz.srambical/jafar/utils/nn.py",9290,0,"",python,selection_keyboard +9620,25162164,"/fast/home/franz.srambical/jafar/utils/nn.py",9289,0,"",python,selection_command +9621,25163990,"/fast/home/franz.srambical/jafar/utils/nn.py",9241,0,"",python,selection_command +9622,25164421,"/fast/home/franz.srambical/jafar/utils/nn.py",7105,0,"",python,selection_command +9623,25164746,"/fast/home/franz.srambical/jafar/utils/nn.py",5247,0,"",python,selection_command +9624,25165830,"/fast/home/franz.srambical/jafar/utils/nn.py",3631,0,"",python,selection_command +9625,25166593,"/fast/home/franz.srambical/jafar/utils/nn.py",5247,0,"",python,selection_command +9626,25167275,"/fast/home/franz.srambical/jafar/utils/nn.py",5205,0,"",python,selection_command +9627,25167523,"/fast/home/franz.srambical/jafar/utils/nn.py",5192,0,"",python,selection_command +9628,25167552,"/fast/home/franz.srambical/jafar/utils/nn.py",5175,0,"",python,selection_command +9629,25168141,"/fast/home/franz.srambical/jafar/utils/nn.py",5176,0,"",python,selection_command +9630,25168297,"/fast/home/franz.srambical/jafar/utils/nn.py",5183,0,"",python,selection_command +9631,25168470,"/fast/home/franz.srambical/jafar/utils/nn.py",5185,0,"",python,selection_command +9632,25169934,"/fast/home/franz.srambical/jafar/utils/nn.py",6333,0,"",python,selection_command +9633,25171302,"/fast/home/franz.srambical/jafar/utils/nn.py",5185,0,"",python,selection_command +9634,25171621,"/fast/home/franz.srambical/jafar/utils/nn.py",5176,0,"",python,selection_command +9635,25175781,"/fast/home/franz.srambical/jafar/utils/nn.py",4362,0,"",python,selection_command +9636,25178678,"/fast/home/franz.srambical/jafar/utils/nn.py",4395,0,"",python,selection_command +9637,25178799,"/fast/home/franz.srambical/jafar/utils/nn.py",4362,0,"",python,selection_command +9638,25182566,"/fast/home/franz.srambical/jafar/utils/nn.py",4395,0,"",python,selection_command +9639,25182739,"/fast/home/franz.srambical/jafar/utils/nn.py",4362,0,"",python,selection_command +9640,25192222,"models/lam.py",0,0,"",python,tab +9641,25192223,"models/lam.py",1240,13,"STTransformer",python,selection_command +9642,25219702,"models/lam.py",2428,13,"STTransformer",python,selection_command +9643,25233925,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=14007.5 task 0: running\r\n",,terminal_output +9644,25234061,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=14007.5\r\nsrun: forcing job termination\r\n(Pdb) --KeyboardInterrupt--\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T16:19:13.863] error: *** STEP 14007.5 ON hai003 CANCELLED AT 2025-07-27T16:19:13 DUE to SIGNAL Killed ***\r\n",,terminal_output +9645,25234163,"TERMINAL",0,0,"(Pdb) ",,terminal_output +9646,25234399,"TERMINAL",0,0,"q]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ q",,terminal_output +9647,25234586,"TERMINAL",0,0,"uit",,terminal_output +9648,25235339,"TERMINAL",0,0,"()",,terminal_output +9649,25235630,"TERMINAL",0,0,"\r\n[?2004l\r[?2004h> ",,terminal_output +9650,25238827,"TERMINAL",0,0,"\r\n[?2004l\r[?2004h> ",,terminal_output +9651,25239818,"TERMINAL",0,0,"q",,terminal_output +9652,25239943,"TERMINAL",0,0,"ui",,terminal_output +9653,25240015,"TERMINAL",0,0,"t",,terminal_output +9654,25240272,"TERMINAL",0,0,"()",,terminal_output +9655,25240622,"TERMINAL",0,0,"\r\n[?2004l\rbash: syntax error near unexpected token `quit'\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai003.haicore.berlin:~/jafar] $ ",,terminal_output +9656,27753074,"TERMINAL",0,0,"salloc: Job 14007 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n[2025-07-27T17:00:12.014] error: *** STEP 14007.interactive ON hai003 CANCELLED AT 2025-07-27T17:00:12 DUE TO TIME LIMIT ***\r\n",,terminal_output +9657,27753074,"TERMINAL",0,0,"srun: error: hai003: task 0: Killed\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output diff --git a/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-3f2b1a99-0d75-466c-970c-4deff62cba851753462933379-2025_07_25-19.02.23.245/source.csv b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-3f2b1a99-0d75-466c-970c-4deff62cba851753462933379-2025_07_25-19.02.23.245/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..d0e49e46d666da4213bc9780f0c3b66a0dcdd10b --- /dev/null +++ b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-3f2b1a99-0d75-466c-970c-4deff62cba851753462933379-2025_07_25-19.02.23.245/source.csv @@ -0,0 +1,94 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,3,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n # --- Dynamics ---\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: patches per frame\n S: sequence length\n A: action space\n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # Define the inner MaskGIT loop using nnx.scan\n maskgit_step = MaskGITStep(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def maskgit_scan_fn(module, carry, x):\n new_carry, _ = module(carry, x)\n return new_carry, None\n\n MaskGITLoop = nnx.scan(\n maskgit_scan_fn,\n in_axes=(None, nnx.Carry, 0), # (module, carry, x)\n out_axes=(nnx.Carry, None), # (new_carry, None)\n )\n\n # Define the outer autoregressive loop's body function\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)).astype(bool) # (B, S, N)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = MaskGITLoop(\n maskgit_step, init_carry_maskgit, jnp.arange(steps)\n )\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nnx.Module):\n def __init__(\n self,\n dynamics: DynamicsMaskGIT,\n tokenizer: TokenizerVQVAE,\n temperature: float,\n sample_argmax: bool,\n steps: int,\n ):\n self.dynamics = dynamics\n self.tokenizer = tokenizer\n self.temperature = temperature\n self.sample_argmax = sample_argmax\n self.steps = steps\n\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token.value # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1)\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rngs = nnx.Rngs(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, dummy_tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(\n abstract_sharded_tokenizer_optimizer_state\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n optimizer.model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, dummy_tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(\n abstract_sharded_lam_optimizer_state\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n optimizer.model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del optimizer.model.lam.decoder\n lam_checkpoint_manager.close()\n\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +2,357,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"7:02:23 PM [info] Activating crowd-code\n7:02:23 PM [info] Recording started\n7:02:23 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,587,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"7:02:23 PM [info] Git repository found\n7:02:23 PM [info] Git provider initialized successfully\n7:02:23 PM [info] Initial git state: [object Object]\n",Log,content +4,2898,"genie.py",0,0,"",python,tab +5,3378,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab +6,5988,"genie.py",0,0,"",python,tab +7,19218,"genie.py",0,0,"",python,tab +8,19298,"genie.py",6670,0,"",python,selection_command +9,56549,"/fast/home/franz.srambical/jafar/sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state),\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n # @nnx.jit\n # @jax.jit\n def _sampling_fn(model, batch):\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n return model.sample(\n batch,\n args.seq_len,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n )\n\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = genie.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n return generated_vid\n\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n video_batch = next(iter(dataloader))\n # Get latent actions for all videos in the batch\n batch = dict(videos=video_batch)\n action_batch = genie.vq_encode(batch, training=False) # type: ignore[arg-type]\n action_batch = jnp.asarray(action_batch).reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n # --- Sample + evaluate video ---\n vid = _autoreg_sample(rng, video_batch, action_batch)\n gt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\n recon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\n # FIXME (f.srambical): investigate why this is needed\n gt = gt.astype(jnp.float32)\n ssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n # true_videos = (video_batch * 255).astype(np.uint8)\n # pred_videos = (vid * 255).astype(np.uint8)\n # video_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\n # video_comparison[0] = true_videos[:, : args.seq_len]\n # video_comparison[1] = pred_videos\n # frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # # --- Save video ---\n # imgs = [Image.fromarray(img) for img in frames]\n # # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n # for t, img in enumerate(imgs[1:]):\n # d = ImageDraw.Draw(img)\n # for row in range(action_batch.shape[0]):\n # action = action_batch[row, t, 0]\n # y_offset = row * video_batch.shape[2] + 2\n # d.text((2, y_offset), f""{action}"", fill=255)\n # imgs[0].save(\n # f""generation_{time.time()}.gif"",\n # save_all=True,\n # append_images=imgs[1:],\n # duration=250,\n # loop=0,\n # )\n",python,tab +10,56549,"/fast/home/franz.srambical/jafar/sample.py",5611,0,"",python,selection_command +11,58027,"/fast/home/franz.srambical/jafar/sample.py",4644,0,"",python,selection_command +12,59949,"/fast/home/franz.srambical/jafar/genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n # --- Dynamics ---\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: patches per frame\n S: sequence length\n A: action space\n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # Define the inner MaskGIT loop using nnx.scan\n maskgit_step = MaskGITStep(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def maskgit_scan_fn(module, carry, x):\n new_carry, _ = module(carry, x)\n return new_carry, None\n\n MaskGITLoop = nnx.scan(\n maskgit_scan_fn,\n in_axes=(None, nnx.Carry, 0), # (module, carry, x)\n out_axes=(nnx.Carry, None), # (new_carry, None)\n )\n\n # Define the outer autoregressive loop's body function\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)).astype(bool) # (B, S, N)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = MaskGITLoop(\n maskgit_step, init_carry_maskgit, jnp.arange(steps)\n )\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nnx.Module):\n def __init__(\n self,\n dynamics: DynamicsMaskGIT,\n tokenizer: TokenizerVQVAE,\n temperature: float,\n sample_argmax: bool,\n steps: int,\n ):\n self.dynamics = dynamics\n self.tokenizer = tokenizer\n self.temperature = temperature\n self.sample_argmax = sample_argmax\n self.steps = steps\n\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token.value # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1)\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rngs = nnx.Rngs(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, dummy_tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(\n abstract_sharded_tokenizer_optimizer_state\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n optimizer.model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, dummy_tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(\n abstract_sharded_lam_optimizer_state\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n optimizer.model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del optimizer.model.lam.decoder\n lam_checkpoint_manager.close()\n\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +13,59950,"/fast/home/franz.srambical/jafar/genie.py",7076,0,"",python,selection_command +14,77428,"/fast/home/franz.srambical/jafar/sample.py",0,0,"",python,tab +15,79472,"genie.py",0,0,"",python,tab +16,96262,"genie.py",7213,0,"",python,selection_command +17,97679,"genie.py",7201,63," out_axes=(nnx.Carry, 0), # (new_carry, None)\n",python,content +18,113555,"/fast/home/franz.srambical/jafar/genie.py",0,0,"",python,tab +19,113557,"/fast/home/franz.srambical/jafar/genie.py",7201,0,"",python,selection_command +20,116189,"genie.py",0,0,"",python,tab +21,117566,"genie.py",7258,0,"",python,selection_mouse +22,155903,"/fast/home/franz.srambical/jafar/genie.py",0,0,"",python,tab +23,157626,"/fast/home/franz.srambical/jafar/genie.py",7201,62," out_axes=(nnx.Carry, None), # (new_carry, None)",python,content +24,159605,"genie.py",0,0,"",python,tab +25,179385,"/fast/home/franz.srambical/jafar/genie.py",0,0,"",python,tab +26,179385,"/fast/home/franz.srambical/jafar/genie.py",7979,0,"",python,selection_command +27,213168,"/fast/home/franz.srambical/jafar/genie.py",7979,117," final_carry_maskgit, _ = jax.lax.scan(\n maskgit_step_fn, init_carry_maskgit, jnp.arange(steps)",python,content +28,213169,"/fast/home/franz.srambical/jafar/genie.py",7076,199,"",python,content +29,213169,"/fast/home/franz.srambical/jafar/genie.py",6949,90," new_carry = (rng, token_idxs, new_mask, action_tokens)",python,content +30,213169,"/fast/home/franz.srambical/jafar/genie.py",6670,277," # Define the inner MaskGIT loop function\n def maskgit_step_fn(carry, step):\n rng, token_idxs, mask, action_tokens = carry\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token.value # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1)\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)",python,content +31,223860,"/fast/home/franz.srambical/jafar/genie.py",9430,121," final_carry_maskgit, _ = MaskGITLoop(\n maskgit_step, init_carry_maskgit, jnp.arange(steps)",python,content +32,223860,"/fast/home/franz.srambical/jafar/genie.py",8726,0," MaskGITLoop = nnx.scan(\n maskgit_scan_fn,\n in_axes=(None, nnx.Carry, 0), # (module, carry, x)\n out_axes=(nnx.Carry, None), # (new_carry, None)\n )\n\n",python,content +33,223860,"/fast/home/franz.srambical/jafar/genie.py",8623,66," def maskgit_scan_fn(module, carry, x):\n new_carry, _ = module(carry, x)",python,content +34,223860,"/fast/home/franz.srambical/jafar/genie.py",6670,1951," # Define the inner MaskGIT loop using nnx.scan\n maskgit_step = MaskGITStep(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )",python,content +35,224039,"/fast/home/franz.srambical/jafar/genie.py",11216,94,"",python,content +36,224039,"/fast/home/franz.srambical/jafar/genie.py",9013,2202,"",python,content +37,224039,"/fast/home/franz.srambical/jafar/genie.py",7979,117," final_carry_maskgit, _ = jax.lax.scan(\n maskgit_step_fn, init_carry_maskgit, jnp.arange(steps)",python,content +38,224039,"/fast/home/franz.srambical/jafar/genie.py",7076,197," # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None",python,content +39,224039,"/fast/home/franz.srambical/jafar/genie.py",6949,125," # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token.value # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1)\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)",python,content +40,224039,"/fast/home/franz.srambical/jafar/genie.py",6670,277," # Define the inner MaskGIT loop function\n def maskgit_step_fn(carry, step):\n rng, token_idxs, mask, action_tokens = carry\n N = token_idxs.shape[2]",python,content +41,272905,"/fast/home/franz.srambical/jafar/genie.py",6670,0,"",python,selection_command +42,380998,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state),\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n # @nnx.jit\n # @jax.jit\n def _sampling_fn(model, batch):\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n return model.sample(\n batch,\n args.seq_len,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n )\n\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = genie.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n return generated_vid\n\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n video_batch = next(iter(dataloader))\n # Get latent actions for all videos in the batch\n batch = dict(videos=video_batch)\n action_batch = genie.vq_encode(batch, training=False) # type: ignore[arg-type]\n action_batch = jnp.asarray(action_batch).reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n # --- Sample + evaluate video ---\n vid = _autoreg_sample(rng, video_batch, action_batch)\n gt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\n recon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\n # FIXME (f.srambical): investigate why this is needed\n gt = gt.astype(jnp.float32)\n ssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n # true_videos = (video_batch * 255).astype(np.uint8)\n # pred_videos = (vid * 255).astype(np.uint8)\n # video_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\n # video_comparison[0] = true_videos[:, : args.seq_len]\n # video_comparison[1] = pred_videos\n # frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # # --- Save video ---\n # imgs = [Image.fromarray(img) for img in frames]\n # # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n # for t, img in enumerate(imgs[1:]):\n # d = ImageDraw.Draw(img)\n # for row in range(action_batch.shape[0]):\n # action = action_batch[row, t, 0]\n # y_offset = row * video_batch.shape[2] + 2\n # d.text((2, y_offset), f""{action}"", fill=255)\n # imgs[0].save(\n # f""generation_{time.time()}.gif"",\n # save_all=True,\n # append_images=imgs[1:],\n # duration=250,\n # loop=0,\n # )\n",python,tab +43,382134,"sample.py",7041,0,"",python,selection_command +44,382137,"sample.py",7023,0,"",python,selection_command +45,382152,"sample.py",6999,0,"",python,selection_command +46,382225,"sample.py",6965,0,"",python,selection_command +47,382231,"sample.py",6940,0,"",python,selection_command +48,382323,"sample.py",6897,0,"",python,selection_command +49,382340,"sample.py",6877,0,"",python,selection_command +50,382345,"sample.py",6818,0,"",python,selection_command +51,382381,"sample.py",6762,0,"",python,selection_command +52,382416,"sample.py",6715,0,"",python,selection_command +53,382427,"sample.py",6664,0,"",python,selection_command +54,382469,"sample.py",6630,0,"",python,selection_command +55,382583,"sample.py",6589,0,"",python,selection_command +56,382584,"sample.py",6489,0,"",python,selection_command +57,382658,"sample.py",6435,0,"",python,selection_command +58,382658,"sample.py",6408,0,"",python,selection_command +59,382659,"sample.py",6407,0,"",python,selection_command +60,382692,"sample.py",6323,0,"",python,selection_command +61,382707,"sample.py",6283,0,"",python,selection_command +62,382729,"sample.py",6224,0,"",python,selection_command +63,382778,"sample.py",6157,0,"",python,selection_command +64,382822,"sample.py",6108,0,"",python,selection_command +65,382836,"sample.py",6051,0,"",python,selection_command +66,383110,"sample.py",6021,0,"",python,selection_command +67,409372,"sample.py",0,0,"",python,tab +68,423163,"sample.py",6020,0,"",python,selection_command +69,423280,"sample.py",5993,0,"",python,selection_command +70,456144,"genie.py",0,0,"",python,tab +71,456145,"genie.py",6670,0,"",python,selection_command +72,461257,"sample.py",0,0,"",python,tab +73,464425,"sample.py",6969,0,"",python,selection_command +74,464665,"sample.py",7049,0,"",python,selection_command +75,465095,"sample.py",6228,0,"",python,selection_command +76,465860,"sample.py",5207,0,"",python,selection_command +77,466191,"sample.py",4424,0,"",python,selection_command +78,466192,"sample.py",3753,0,"",python,selection_command +79,466193,"sample.py",2998,0,"",python,selection_command +80,466194,"sample.py",2219,0,"",python,selection_command +81,466211,"sample.py",1453,0,"",python,selection_command +82,466211,"sample.py",917,0,"",python,selection_command +83,466304,"sample.py",351,0,"",python,selection_command +84,466305,"sample.py",917,0,"",python,selection_command +85,466306,"sample.py",1453,0,"",python,selection_command +86,466306,"sample.py",2219,0,"",python,selection_command +87,466542,"sample.py",2998,0,"",python,selection_command +88,468353,"/fast/home/franz.srambical/jafar/genie.py",0,0,"",python,tab +89,470581,"/fast/home/franz.srambical/jafar/genie.py",7766,0,"",python,selection_command +90,470693,"/fast/home/franz.srambical/jafar/genie.py",8930,0,"",python,selection_command +91,470894,"/fast/home/franz.srambical/jafar/genie.py",9838,0,"",python,selection_command +92,471054,"/fast/home/franz.srambical/jafar/genie.py",10437,0,"",python,selection_command +93,475752,"sample.py",0,0,"",python,tab diff --git a/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-6410c04a-5509-42a0-b7ec-8fa2503faf3a1758380010770-2025_09_20-16.53.40.475/source.csv b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-6410c04a-5509-42a0-b7ec-8fa2503faf3a1758380010770-2025_09_20-16.53.40.475/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..281f08cf8263214f0d0a9afdfd4a3c0726069020 --- /dev/null +++ b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-6410c04a-5509-42a0-b7ec-8fa2503faf3a1758380010770-2025_09_20-16.53.40.475/source.csv @@ -0,0 +1,1778 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,3,"input_pipeline/generate_atari_dataset.py",0,0,"# docs and experiment results can be found at https://docs.cleanrl.dev/rl-algorithms/rainbow/#rainbow_ataripy\nimport collections\nimport math\nimport os\nimport random\nimport time\nfrom collections import deque\nfrom dataclasses import dataclass\n\nimport gymnasium as gym\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nimport tyro\nfrom typing import Optional, Any\nfrom torch.utils.tensorboard.writer import SummaryWriter\n\nfrom cleanrl_utils.atari_wrappers import (\n ClipRewardEnv,\n EpisodicLifeEnv,\n FireResetEnv,\n MaxAndSkipEnv,\n NoopResetEnv,\n)\ntry:\n from utils import save_chunks # type: ignore\nexcept Exception: # pragma: no cover\n from input_pipeline.utils import save_chunks # type: ignore\nimport json\n\n\n@dataclass\nclass Args:\n exp_name: str = os.path.basename(__file__)[: -len("".py"")]\n """"""the name of this experiment""""""\n seed: int = 1\n """"""seed of the experiment""""""\n torch_deterministic: bool = True\n """"""if toggled, `torch.backends.cudnn.deterministic=False`""""""\n cuda: bool = True\n """"""if toggled, cuda will be enabled by default""""""\n track: bool = False\n """"""if toggled, this experiment will be tracked with Weights and Biases""""""\n wandb_project_name: str = ""cleanRL""\n """"""the wandb's project name""""""\n wandb_entity: Optional[str] = None\n """"""the entity (team) of wandb's project""""""\n capture_video: bool = False\n """"""whether to capture videos of the agent performances (check out `videos` folder)""""""\n save_model: bool = False\n """"""whether to save model into the `runs/{run_name}` folder""""""\n upload_model: bool = False\n """"""whether to upload the saved model to huggingface""""""\n hf_entity: str = """"\n """"""the user or org name of the model repository from the Hugging Face Hub""""""\n\n env_id: str = ""BreakoutNoFrameskip-v4""\n """"""the id of the environment""""""\n total_timesteps: int = 10000000\n """"""total timesteps of the experiments""""""\n learning_rate: float = 0.0000625\n """"""the learning rate of the optimizer""""""\n num_envs: int = 1\n """"""the number of parallel game environments""""""\n buffer_size: int = 1000000\n """"""the replay memory buffer size""""""\n gamma: float = 0.99\n """"""the discount factor gamma""""""\n tau: float = 1.0\n """"""the target network update rate""""""\n target_network_frequency: int = 8000\n """"""the timesteps it takes to update the target network""""""\n batch_size: int = 32\n """"""the batch size of sample from the reply memory""""""\n start_e: float = 1\n """"""the starting epsilon for exploration""""""\n end_e: float = 0.01\n """"""the ending epsilon for exploration""""""\n exploration_fraction: float = 0.10\n """"""the fraction of `total-timesteps` it takes from start-e to go end-e""""""\n learning_starts: int = 80000\n """"""timestep to start learning""""""\n train_frequency: int = 4\n """"""the frequency of training""""""\n n_step: int = 3\n """"""the number of steps to look ahead for n-step Q learning""""""\n prioritized_replay_alpha: float = 0.5\n """"""alpha parameter for prioritized replay buffer""""""\n prioritized_replay_beta: float = 0.4\n """"""beta parameter for prioritized replay buffer""""""\n prioritized_replay_eps: float = 1e-6\n """"""epsilon parameter for prioritized replay buffer""""""\n n_atoms: int = 51\n """"""the number of atoms""""""\n v_min: float = -10\n """"""the return lower bound""""""\n v_max: float = 10\n """"""the return upper bound""""""\n\n # Dataset capture\n capture_dataset: bool = True\n num_episodes_train: int = 10000\n num_episodes_val: int = 500\n num_episodes_test: int = 500\n output_dir: str = ""data/atari_episodes""\n min_episode_length: int = 1\n chunk_size: int = 160\n chunks_per_file: int = 100\n stop_on_complete: bool = True\n\n\ndef make_env(env_id, seed, idx, capture_video, run_name):\n def thunk():\n if capture_video and idx == 0:\n env = gym.make(env_id, render_mode=""rgb_array"")\n env = gym.wrappers.RecordVideo(env, f""videos/{run_name}"")\n else:\n env = gym.make(env_id)\n env = gym.wrappers.RecordEpisodeStatistics(env)\n\n env = NoopResetEnv(env, noop_max=30)\n env = MaxAndSkipEnv(env, skip=4)\n env = EpisodicLifeEnv(env)\n if ""FIRE"" in env.unwrapped.get_action_meanings():\n env = FireResetEnv(env)\n env = ClipRewardEnv(env)\n env = gym.wrappers.ResizeObservation(env, (84, 84))\n env = gym.wrappers.GrayScaleObservation(env)\n env = gym.wrappers.FrameStack(env, 4)\n\n env.action_space.seed(seed)\n return env\n\n return thunk\n\n\nclass NoisyLinear(nn.Module):\n def __init__(self, in_features, out_features, std_init=0.5):\n super().__init__()\n self.in_features = in_features\n self.out_features = out_features\n self.std_init = std_init\n\n self.weight_mu = nn.Parameter(torch.FloatTensor(out_features, in_features))\n self.weight_sigma = nn.Parameter(torch.FloatTensor(out_features, in_features))\n self.register_buffer(""weight_epsilon"", torch.FloatTensor(out_features, in_features))\n self.bias_mu = nn.Parameter(torch.FloatTensor(out_features))\n self.bias_sigma = nn.Parameter(torch.FloatTensor(out_features))\n self.register_buffer(""bias_epsilon"", torch.FloatTensor(out_features))\n # factorized gaussian noise\n self.reset_parameters()\n self.reset_noise()\n\n def reset_parameters(self):\n mu_range = 1 / math.sqrt(self.in_features)\n self.weight_mu.data.uniform_(-mu_range, mu_range)\n self.weight_sigma.data.fill_(self.std_init / math.sqrt(self.in_features))\n self.bias_mu.data.uniform_(-mu_range, mu_range)\n self.bias_sigma.data.fill_(self.std_init / math.sqrt(self.out_features))\n\n def reset_noise(self):\n self.weight_epsilon.normal_()\n self.bias_epsilon.normal_()\n\n def forward(self, input):\n if self.training:\n weight = self.weight_mu + self.weight_sigma * self.weight_epsilon\n bias = self.bias_mu + self.bias_sigma * self.bias_epsilon\n else:\n weight = self.weight_mu\n bias = self.bias_mu\n return F.linear(input, weight, bias)\n\n\n# ALGO LOGIC: initialize agent here:\nclass NoisyDuelingDistributionalNetwork(nn.Module):\n def __init__(self, env, n_atoms, v_min, v_max):\n super().__init__()\n self.n_atoms = n_atoms\n self.v_min = v_min\n self.v_max = v_max\n self.delta_z = (v_max - v_min) / (n_atoms - 1)\n self.n_actions = env.single_action_space.n\n self.register_buffer(""support"", torch.linspace(v_min, v_max, n_atoms))\n\n self.network = nn.Sequential(\n nn.Conv2d(4, 32, 8, stride=4),\n nn.ReLU(),\n nn.Conv2d(32, 64, 4, stride=2),\n nn.ReLU(),\n nn.Conv2d(64, 64, 3, stride=1),\n nn.ReLU(),\n nn.Flatten(),\n )\n conv_output_size = 3136\n\n self.value_head = nn.Sequential(NoisyLinear(conv_output_size, 512), nn.ReLU(), NoisyLinear(512, n_atoms))\n\n self.advantage_head = nn.Sequential(\n NoisyLinear(conv_output_size, 512), nn.ReLU(), NoisyLinear(512, n_atoms * self.n_actions)\n )\n\n def forward(self, x):\n h = self.network(x / 255.0)\n value = self.value_head(h).view(-1, 1, self.n_atoms)\n advantage = self.advantage_head(h).view(-1, self.n_actions, self.n_atoms)\n q_atoms = value + advantage - advantage.mean(dim=1, keepdim=True)\n q_dist = F.softmax(q_atoms, dim=2)\n return q_dist\n\n def reset_noise(self):\n for layer in self.value_head:\n if isinstance(layer, NoisyLinear):\n layer.reset_noise()\n for layer in self.advantage_head:\n if isinstance(layer, NoisyLinear):\n layer.reset_noise()\n\n\nPrioritizedBatch = collections.namedtuple(\n ""PrioritizedBatch"", [""observations"", ""actions"", ""rewards"", ""next_observations"", ""dones"", ""indices"", ""weights""]\n)\n\n\n# adapted from: https://github.com/openai/baselines/blob/master/baselines/common/segment_tree.py\nclass SumSegmentTree:\n def __init__(self, capacity):\n self.capacity = capacity\n self.tree_size = 2 * capacity - 1\n self.tree = np.zeros(self.tree_size, dtype=np.float32)\n\n def _propagate(self, idx):\n parent = (idx - 1) // 2\n while parent >= 0:\n self.tree[parent] = self.tree[parent * 2 + 1] + self.tree[parent * 2 + 2]\n parent = (parent - 1) // 2\n\n def update(self, idx, value):\n tree_idx = idx + self.capacity - 1\n self.tree[tree_idx] = value\n self._propagate(tree_idx)\n\n def total(self):\n return self.tree[0]\n\n def retrieve(self, value):\n idx = 0\n while idx * 2 + 1 < self.tree_size:\n left = idx * 2 + 1\n right = left + 1\n if value <= self.tree[left]:\n idx = left\n else:\n value -= self.tree[left]\n idx = right\n return idx - (self.capacity - 1)\n\n\n# adapted from: https://github.com/openai/baselines/blob/master/baselines/common/segment_tree.py\nclass MinSegmentTree:\n def __init__(self, capacity):\n self.capacity = capacity\n self.tree_size = 2 * capacity - 1\n self.tree = np.full(self.tree_size, float(""inf""), dtype=np.float32)\n\n def _propagate(self, idx):\n parent = (idx - 1) // 2\n while parent >= 0:\n self.tree[parent] = np.minimum(self.tree[parent * 2 + 1], self.tree[parent * 2 + 2])\n parent = (parent - 1) // 2\n\n def update(self, idx, value):\n tree_idx = idx + self.capacity - 1\n self.tree[tree_idx] = value\n self._propagate(tree_idx)\n\n def min(self):\n return self.tree[0]\n\n\nclass PrioritizedReplayBuffer:\n def __init__(self, capacity, obs_shape, device, n_step, gamma, alpha=0.6, beta=0.4, eps=1e-6):\n self.capacity = capacity\n self.device = device\n self.n_step = n_step\n self.gamma = gamma\n self.alpha = alpha\n self.beta = beta\n self.eps = eps\n\n self.buffer_obs = np.zeros((capacity,) + obs_shape, dtype=np.uint8)\n self.buffer_next_obs = np.zeros((capacity,) + obs_shape, dtype=np.uint8)\n self.buffer_actions = np.zeros(capacity, dtype=np.int64)\n self.buffer_rewards = np.zeros(capacity, dtype=np.float32)\n self.buffer_dones = np.zeros(capacity, dtype=np.bool_)\n\n self.pos = 0\n self.size = 0\n self.max_priority = 1.0\n\n self.sum_tree = SumSegmentTree(capacity)\n self.min_tree = MinSegmentTree(capacity)\n\n # For n-step returns\n self.n_step_buffer = deque(maxlen=n_step)\n\n def _get_n_step_info(self):\n reward = 0.0\n next_obs = self.n_step_buffer[-1][3]\n done = self.n_step_buffer[-1][4]\n\n for i in range(len(self.n_step_buffer)):\n reward += self.gamma**i * self.n_step_buffer[i][2]\n if self.n_step_buffer[i][4]:\n next_obs = self.n_step_buffer[i][3]\n done = True\n break\n return reward, next_obs, done\n\n def add(self, obs, action, reward, next_obs, done):\n self.n_step_buffer.append((obs, action, reward, next_obs, done))\n\n if len(self.n_step_buffer) < self.n_step:\n return\n\n reward, next_obs, done = self._get_n_step_info()\n obs = self.n_step_buffer[0][0]\n action = self.n_step_buffer[0][1]\n\n idx = self.pos\n self.buffer_obs[idx] = obs\n self.buffer_next_obs[idx] = next_obs\n self.buffer_actions[idx] = action\n self.buffer_rewards[idx] = reward\n self.buffer_dones[idx] = done\n\n priority = self.max_priority**self.alpha\n self.sum_tree.update(idx, priority)\n self.min_tree.update(idx, priority)\n\n self.pos = (self.pos + 1) % self.capacity\n self.size = min(self.size + 1, self.capacity)\n\n if done:\n self.n_step_buffer.clear()\n\n def sample(self, batch_size):\n indices = []\n p_total = self.sum_tree.total()\n segment = p_total / batch_size\n\n for i in range(batch_size):\n a = segment * i\n b = segment * (i + 1)\n upperbound = np.random.uniform(a, b)\n idx = self.sum_tree.retrieve(upperbound)\n indices.append(idx)\n\n samples = {\n ""observations"": torch.from_numpy(self.buffer_obs[indices]).to(self.device),\n ""actions"": torch.from_numpy(self.buffer_actions[indices]).to(self.device).unsqueeze(1),\n ""rewards"": torch.from_numpy(self.buffer_rewards[indices]).to(self.device).unsqueeze(1),\n ""next_observations"": torch.from_numpy(self.buffer_next_obs[indices]).to(self.device),\n ""dones"": torch.from_numpy(self.buffer_dones[indices]).to(self.device).unsqueeze(1),\n }\n\n probs = np.array([self.sum_tree.tree[idx + self.capacity - 1] for idx in indices])\n weights = (self.size * probs / p_total) ** -self.beta\n weights = weights / weights.max()\n samples[""weights""] = torch.from_numpy(weights).to(self.device).unsqueeze(1)\n samples[""indices""] = indices\n\n return PrioritizedBatch(**samples)\n\n def update_priorities(self, indices, priorities):\n priorities = np.abs(priorities) + self.eps\n self.max_priority = max(self.max_priority, priorities.max())\n\n for idx, priority in zip(indices, priorities):\n priority = priority**self.alpha\n self.sum_tree.update(idx, priority)\n self.min_tree.update(idx, priority)\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n assert args.num_envs == 1, ""vectorized envs are not supported at the moment""\n run_name = f""{args.env_id}__{args.exp_name}__{args.seed}__{int(time.time())}""\n if args.track:\n import wandb\n\n wandb.init(\n project=args.wandb_project_name,\n entity=args.wandb_entity,\n sync_tensorboard=True,\n config=vars(args),\n name=run_name,\n monitor_gym=True,\n save_code=True,\n )\n writer = SummaryWriter(f""runs/{run_name}"")\n writer.add_text(\n ""hyperparameters"",\n ""|param|value|\n|-|-|\n%s"" % (""\n"".join([f""|{key}|{value}|"" for key, value in vars(args).items()])),\n )\n\n # TRY NOT TO MODIFY: seeding\n random.seed(args.seed)\n np.random.seed(args.seed)\n torch.manual_seed(args.seed)\n torch.backends.cudnn.deterministic = args.torch_deterministic\n\n device = torch.device(""cuda"" if torch.cuda.is_available() and args.cuda else ""cpu"")\n\n # env setup\n envs = gym.vector.SyncVectorEnv(\n [make_env(args.env_id, args.seed + i, i, args.capture_video, run_name) for i in range(args.num_envs)]\n )\n assert isinstance(envs.single_action_space, gym.spaces.Discrete), ""only discrete action space is supported""\n\n q_network = NoisyDuelingDistributionalNetwork(envs, args.n_atoms, args.v_min, args.v_max).to(device)\n optimizer = optim.Adam(q_network.parameters(), lr=args.learning_rate, eps=1.5e-4)\n target_network = NoisyDuelingDistributionalNetwork(envs, args.n_atoms, args.v_min, args.v_max).to(device)\n target_network.load_state_dict(q_network.state_dict())\n\n rb = PrioritizedReplayBuffer(\n args.buffer_size,\n envs.single_observation_space.shape,\n device,\n args.n_step,\n args.gamma,\n args.prioritized_replay_alpha,\n args.prioritized_replay_beta,\n args.prioritized_replay_eps,\n )\n\n # dataset capture state\n split_targets = {\n ""train"": args.num_episodes_train,\n ""val"": args.num_episodes_val,\n ""test"": args.num_episodes_test,\n }\n # Determine splits to run (order: train -> val -> test)\n splits_in_order = [s for s in [""train"", ""val"", ""test""] if split_targets[s] > 0]\n\n episodes_captured_per_split: dict[str, int] = {s: 0 for s in [""train"", ""val"", ""test""]}\n file_idx_by_split: dict[str, int] = {s: 0 for s in [""train"", ""val"", ""test""]}\n episode_metadata_by_split: dict[str, list[dict]] = {s: [] for s in [""train"", ""val"", ""test""]}\n\n obs_chunks: list[np.ndarray] = []\n act_chunks: list[np.ndarray] = []\n\n current_split_idx = 0\n current_split = splits_in_order[0]\n split_dir = os.path.join(args.output_dir, current_split)\n if args.capture_dataset:\n os.makedirs(split_dir, exist_ok=True)\n\n start_time = time.time()\n\n # TRY NOT TO MODIFY: start the game\n obs, _ = envs.reset(seed=args.seed)\n observations_seq: list[np.ndarray] = []\n actions_seq: list[np.ndarray] = []\n for global_step in range(args.total_timesteps):\n # anneal PER beta to 1\n rb.beta = min(\n 1.0, args.prioritized_replay_beta + global_step * (1.0 - args.prioritized_replay_beta) / args.total_timesteps\n )\n\n # ALGO LOGIC: put action logic here\n with torch.no_grad():\n q_dist = q_network(torch.Tensor(obs).to(device))\n q_values = torch.sum(q_dist * q_network.support, dim=2)\n actions = torch.argmax(q_values, dim=1).cpu().numpy()\n\n # TRY NOT TO MODIFY: execute the game and log data.\n next_obs, rewards, terminations, truncations, infos = envs.step(actions)\n\n if args.capture_dataset:\n observations_seq.append(next_obs.astype(np.uint8))\n actions_seq.append(actions.astype(np.int64))\n\n if ""final_info"" in infos:\n for info in infos[""final_info""]:\n if info and ""episode"" in info:\n print(f""global_step={global_step}, episodic_return={info['episode']['r']}"")\n writer.add_scalar(""charts/episodic_return"", info[""episode""][""r""], global_step)\n writer.add_scalar(""charts/episodic_length"", info[""episode""][""l""], global_step)\n\n continue_capturing_multi = any(\n episodes_captured_per_split[s] < split_targets[s]\n for s in splits_in_order\n )\n if args.capture_dataset and continue_capturing_multi:\n current_len = len(observations_seq)\n if current_len >= args.min_episode_length:\n frames = np.concatenate(observations_seq, axis=0).astype(np.uint8)\n acts = np.concatenate(actions_seq, axis=0).astype(np.int64)\n\n episode_obs_chunks = []\n episode_act_chunks = []\n start_idx = 0\n while start_idx < current_len:\n end_idx = min(start_idx + args.chunk_size, current_len)\n if end_idx - start_idx < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {current_len} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(frames[start_idx:end_idx][None, ...])\n episode_act_chunks.append(acts[start_idx:end_idx][None, ...])\n start_idx = end_idx\n\n obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8) for seq in episode_obs_chunks\n ]\n act_chunks_data = [np.concatenate(act, axis=0) for act in episode_act_chunks]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n # Save to the active split\n ep_metadata, obs_chunks, next_file_idx, act_chunks = save_chunks(\n obs_chunks,\n file_idx_by_split[current_split],\n args.chunks_per_file,\n split_dir,\n act_chunks,\n )\n file_idx_by_split[current_split] = next_file_idx\n episode_metadata_by_split[current_split].extend(ep_metadata)\n\n episodes_captured_per_split[current_split] += 1\n\n if episodes_captured_per_split[current_split] >= split_targets[current_split]:\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks before switching split '"",{current_split},""' for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n obs_chunks = []\n act_chunks = []\n if current_split_idx + 1 < len(splits_in_order):\n current_split_idx += 1\n current_split = splits_in_order[current_split_idx]\n split_dir = os.path.join(args.output_dir, current_split)\n os.makedirs(split_dir, exist_ok=True)\n else:\n print(f""Episode too short ({current_len}), skipping capture..."")\n\n observations_seq = []\n actions_seq = []\n\n # TRY NOT TO MODIFY: save data to reply buffer; handle `final_observation`\n real_next_obs = next_obs.copy()\n for idx, trunc in enumerate(truncations):\n if trunc:\n real_next_obs[idx] = infos[""final_observation""][idx]\n rb.add(obs, actions, rewards, real_next_obs, terminations)\n\n # TRY NOT TO MODIFY: CRUCIAL step easy to overlook\n obs = next_obs\n\n # ALGO LOGIC: training.\n if global_step > args.learning_starts:\n if global_step % args.train_frequency == 0:\n # reset the noise for both networks\n q_network.reset_noise()\n target_network.reset_noise()\n data = rb.sample(args.batch_size)\n\n with torch.no_grad():\n next_dist = target_network(data.next_observations) # [B, num_actions, n_atoms]\n support = target_network.support # [n_atoms]\n next_q_values = torch.sum(next_dist * support, dim=2) # [B, num_actions]\n\n # double q-learning\n next_dist_online = q_network(data.next_observations) # [B, num_actions, n_atoms]\n next_q_online = torch.sum(next_dist_online * support, dim=2) # [B, num_actions]\n best_actions = torch.argmax(next_q_online, dim=1) # [B]\n next_pmfs = next_dist[torch.arange(args.batch_size), best_actions] # [B, n_atoms]\n\n # compute the n-step Bellman update.\n gamma_n = args.gamma**args.n_step\n next_atoms = data.rewards + gamma_n * support * (1 - data.dones.float())\n tz = next_atoms.clamp(q_network.v_min, q_network.v_max)\n\n # projection\n delta_z = q_network.delta_z\n b = (tz - q_network.v_min) / delta_z # shape: [B, n_atoms]\n l = b.floor().clamp(0, args.n_atoms - 1)\n u = b.ceil().clamp(0, args.n_atoms - 1)\n\n # (l == u).float() handles the case where bj is exactly an integer\n # example bj = 1, then the upper ceiling should be uj= 2, and lj= 1\n d_m_l = (u.float() + (l == b).float() - b) * next_pmfs # [B, n_atoms]\n d_m_u = (b - l) * next_pmfs # [B, n_atoms]\n\n target_pmfs = torch.zeros_like(next_pmfs)\n for i in range(target_pmfs.size(0)):\n target_pmfs[i].index_add_(0, l[i].long(), d_m_l[i])\n target_pmfs[i].index_add_(0, u[i].long(), d_m_u[i])\n\n dist = q_network(data.observations) # [B, num_actions, n_atoms]\n pred_dist = dist.gather(1, data.actions.unsqueeze(-1).expand(-1, -1, args.n_atoms)).squeeze(1)\n log_pred = torch.log(pred_dist.clamp(min=1e-5, max=1 - 1e-5))\n\n loss_per_sample = -(target_pmfs * log_pred).sum(dim=1)\n loss = (loss_per_sample * data.weights.squeeze()).mean()\n\n # update priorities\n new_priorities = loss_per_sample.detach().cpu().numpy()\n rb.update_priorities(data.indices, new_priorities)\n\n if global_step % 100 == 0:\n writer.add_scalar(""losses/td_loss"", loss.item(), global_step)\n q_values = (pred_dist * q_network.support).sum(dim=1) # [B]\n writer.add_scalar(""losses/q_values"", q_values.mean().item(), global_step)\n sps = int(global_step / (time.time() - start_time))\n print(""SPS:"", sps)\n writer.add_scalar(""charts/SPS"", sps, global_step)\n writer.add_scalar(""charts/beta"", rb.beta, global_step)\n\n # optimize the model\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n\n # update target network\n if global_step % args.target_network_frequency == 0:\n for target_param, param in zip(target_network.parameters(), q_network.parameters()):\n target_param.data.copy_(args.tau * param.data + (1.0 - args.tau) * target_param.data)\n\n # optional early stop on dataset completion\n if args.capture_dataset and args.stop_on_complete:\n all_done = all(\n episodes_captured_per_split[s] >= split_targets[s]\n for s in splits_in_order\n ) and len(splits_in_order) > 0\n if all_done:\n break\n\n envs.close()\n writer.close()\n\n # write metadata for dataset\n if args.capture_dataset:\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n os.makedirs(args.output_dir, exist_ok=True)\n metadata_path = os.path.join(args.output_dir, ""metadata.json"")\n if os.path.exists(metadata_path):\n try:\n with open(metadata_path, ""r"") as f:\n metadata = json.load(f)\n except Exception:\n metadata = {}\n else:\n metadata = {}\n\n metadata.setdefault(""env"", args.env_id)\n metadata.setdefault(""num_actions"", int(envs.single_action_space.n))\n for split in [""train"", ""val"", ""test""]:\n metadata.setdefault(f""num_episodes_{split}"", 0)\n metadata.setdefault(f""avg_episode_len_{split}"", 0.0)\n metadata.setdefault(f""episode_metadata_{split}"", [])\n\n for split_key in splits_in_order:\n ep_meta_list = episode_metadata_by_split[split_key]\n if ep_meta_list:\n metadata[f""episode_metadata_{split_key}""].extend(ep_meta_list)\n metadata[f""num_episodes_{split_key}""] = len(metadata[f""episode_metadata_{split_key}""])\n metadata[f""avg_episode_len_{split_key}""] = float(\n np.mean([ep[""avg_seq_len""] for ep in metadata[f""episode_metadata_{split_key}""]])\n )\n\n with open(metadata_path, ""w"") as f:\n json.dump(metadata, f)\n""""""\nGenerates a dataset of random-action Atari episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\nReplicates the behavior of generate_coinrun_dataset.py but for Atari.\n""""""\n\nfrom dataclasses import dataclass\n\nimport gymnasium as gym\nimport numpy as np\nimport tyro\nimport json\nimport os\nfrom cleanrl_utils.atari_wrappers import (\n ClipRewardEnv,\n EpisodicLifeEnv,\n FireResetEnv,\n MaxAndSkipEnv,\n NoopResetEnv,\n)\nfrom utils import save_chunks # type: ignore\n\n\n""""""\nOld dataset-only generator removed in favor of integrated Rainbow + capture mode.\n""""""\n",python,tab +2,252,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"4:53:40 PM [info] Activating crowd-code\n4:53:40 PM [info] Recording started\n4:53:40 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,363,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"4:53:40 PM [info] Git repository found\n4:53:40 PM [info] Git provider initialized successfully\n4:53:40 PM [info] Initial git state: [object Object]\n",Log,content +4,3614,"TERMINAL",0,0,"",,terminal_command +5,5914,"input_pipeline/generate_coinrun_dataset.py",0,0,"""""""\nGenerates a dataset of random-action CoinRun episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\n""""""\n\nfrom dataclasses import dataclass\n\nfrom gym3 import types_np\nimport numpy as np\nfrom procgen import ProcgenGym3Env\nimport tyro\nimport json\nimport os\nfrom utils import save_chunks\n\n\n@dataclass\nclass Args:\n num_episodes_train: int = 10000\n num_episodes_val: int = 500\n num_episodes_test: int = 500\n output_dir: str = ""data/coinrun_episodes""\n min_episode_length: int = 1000\n max_episode_length: int = 1000\n chunk_size: int = 160\n chunks_per_file: int = 100\n seed: int = 0\n\n\nargs = tyro.cli(Args)\nassert (\n args.max_episode_length >= args.min_episode_length\n), ""Maximum episode length must be greater than or equal to minimum episode length.""\n\nif args.min_episode_length < args.chunk_size:\n print(\n ""Warning: Minimum episode length is smaller than chunk size. Note that episodes shorter than the chunk size will be discarded.""\n )\n\n\n# --- Generate episodes ---\ndef generate_episodes(num_episodes, split):\n episode_idx = 0\n episode_metadata = []\n obs_chunks = []\n act_chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n while episode_idx < num_episodes:\n seed = np.random.randint(0, 10000)\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=seed)\n\n observations_seq = []\n actions_seq = []\n episode_obs_chunks = []\n episode_act_chunks = []\n\n # --- Run episode ---\n step_t = 0\n for step_t in range(args.max_episode_length):\n action = types_np.sample(env.ac_space, bshape=(env.num,))\n env.act(action)\n _, obs, first = env.observe()\n observations_seq.append(obs[""rgb""])\n actions_seq.append(action)\n if len(observations_seq) == args.chunk_size:\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n observations_seq = []\n actions_seq = []\n if first:\n break\n\n # --- Save episode ---\n if step_t + 1 >= args.min_episode_length:\n if observations_seq:\n if len(observations_seq) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(observations_seq)} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n\n obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8)\n for seq in episode_obs_chunks\n ]\n act_chunks_data = [\n np.concatenate(act, axis=0) for act in episode_act_chunks\n ]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n ep_metadata, obs_chunks, file_idx, act_chunks = save_chunks(\n obs_chunks, file_idx, args.chunks_per_file, output_dir_split, act_chunks\n )\n episode_metadata.extend(ep_metadata)\n\n print(f""Episode {episode_idx} completed, length: {step_t + 1}."")\n episode_idx += 1\n else:\n print(f""Episode too short ({step_t + 1}), resampling..."")\n\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n print(f""Done generating {split} split"")\n return episode_metadata\n\n\ndef get_action_space():\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=0)\n return env.ac_space.eltype.n\n\n\ndef main():\n # Set random seed and create dataset directories\n np.random.seed(args.seed)\n # --- Generate episodes ---\n train_episode_metadata = generate_episodes(args.num_episodes_train, ""train"")\n val_episode_metadata = generate_episodes(args.num_episodes_val, ""val"")\n test_episode_metadata = generate_episodes(args.num_episodes_test, ""test"")\n\n # --- Save metadata ---\n metadata = {\n ""env"": ""coinrun"",\n ""num_actions"": get_action_space(),\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),\n ""avg_episode_len_val"": np.mean(\n [ep[""avg_seq_len""] for ep in val_episode_metadata]\n ),\n ""avg_episode_len_test"": np.mean(\n [ep[""avg_seq_len""] for ep in test_episode_metadata]\n ),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n }\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(f""Done generating dataset."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +6,8758,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +7,10505,"TERMINAL",0,0,"",,terminal_command +8,38856,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +9,39949,"input_pipeline/generate_coinrun_dataset.py",1639,0,"",python,selection_keyboard +10,40267,"input_pipeline/generate_coinrun_dataset.py",3040,0,"",python,selection_keyboard +11,42226,"input_pipeline/generate_coinrun_dataset.py",3041,0,"\n ",python,content +12,46908,"input_pipeline/generate_coinrun_dataset.py",3042,12,"",python,content +13,50829,"input_pipeline/generate_coinrun_dataset.py",3028,0,"",python,selection_command +14,67357,"input_pipeline/generate_coinrun_dataset.py",3042,0,"",python,selection_command +15,67601,"input_pipeline/generate_coinrun_dataset.py",3043,0,"",python,selection_command +16,67632,"input_pipeline/generate_coinrun_dataset.py",3090,0,"",python,selection_command +17,67893,"input_pipeline/generate_coinrun_dataset.py",3137,0,"",python,selection_command +18,68138,"input_pipeline/generate_coinrun_dataset.py",3138,0,"",python,selection_command +19,68173,"input_pipeline/generate_coinrun_dataset.py",3211,0,"",python,selection_command +20,68202,"input_pipeline/generate_coinrun_dataset.py",3300,0,"",python,selection_command +21,68771,"input_pipeline/generate_coinrun_dataset.py",3211,0,"",python,selection_command +22,69012,"input_pipeline/generate_coinrun_dataset.py",3138,0,"",python,selection_command +23,69044,"input_pipeline/generate_coinrun_dataset.py",3137,0,"",python,selection_command +24,69083,"input_pipeline/generate_coinrun_dataset.py",3090,0,"",python,selection_command +25,69118,"input_pipeline/generate_coinrun_dataset.py",3043,0,"",python,selection_command +26,69145,"input_pipeline/generate_coinrun_dataset.py",3042,0,"",python,selection_command +27,69188,"input_pipeline/generate_coinrun_dataset.py",3028,0,"",python,selection_command +28,69218,"input_pipeline/generate_coinrun_dataset.py",2954,0,"",python,selection_command +29,69247,"input_pipeline/generate_coinrun_dataset.py",2922,0,"",python,selection_command +30,69282,"input_pipeline/generate_coinrun_dataset.py",2908,0,"",python,selection_command +31,69312,"input_pipeline/generate_coinrun_dataset.py",2862,0,"",python,selection_command +32,69384,"input_pipeline/generate_coinrun_dataset.py",2801,0,"",python,selection_command +33,69562,"input_pipeline/generate_coinrun_dataset.py",2769,0,"",python,selection_command +34,69796,"input_pipeline/generate_coinrun_dataset.py",2781,0,"",python,selection_command +35,69953,"input_pipeline/generate_coinrun_dataset.py",2797,0,"",python,selection_command +36,70702,"input_pipeline/generate_coinrun_dataset.py",2781,0,"",python,selection_command +37,71638,"input_pipeline/generate_coinrun_dataset.py",3073,0,"",python,selection_command +38,74660,"input_pipeline/generate_coinrun_dataset.py",2781,0,"",python,selection_command +39,79204,"input_pipeline/generate_coinrun_dataset.py",2813,0,"",python,selection_command +40,80606,"input_pipeline/generate_coinrun_dataset.py",2832,0,"",python,selection_command +41,80858,"input_pipeline/generate_coinrun_dataset.py",2882,0,"",python,selection_command +42,82541,"input_pipeline/generate_coinrun_dataset.py",2886,0,"",python,selection_command +43,82665,"input_pipeline/generate_coinrun_dataset.py",2889,0,"",python,selection_command +44,93762,"input_pipeline/generate_coinrun_dataset.py",1467,0,"",python,selection_command +45,96394,"input_pipeline/generate_coinrun_dataset.py",1927,0,"",python,selection_command +46,98374,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +47,100357,"input_pipeline/generate_atari_dataset.py",19471,0,"",python,selection_command +48,100604,"input_pipeline/generate_atari_dataset.py",19473,0,"",python,selection_command +49,100627,"input_pipeline/generate_atari_dataset.py",19520,0,"",python,selection_command +50,100659,"input_pipeline/generate_atari_dataset.py",19614,0,"",python,selection_command +51,104474,"input_pipeline/generate_atari_dataset.py",18485,0,"",python,selection_command +52,105277,"input_pipeline/generate_atari_dataset.py",19263,0,"",python,selection_command +53,113270,"input_pipeline/generate_atari_dataset.py",19281,0,"",python,selection_command +54,113556,"input_pipeline/generate_atari_dataset.py",19282,0,"",python,selection_command +55,113556,"input_pipeline/generate_atari_dataset.py",19288,0,"",python,selection_command +56,113592,"input_pipeline/generate_atari_dataset.py",19289,0,"",python,selection_command +57,113622,"input_pipeline/generate_atari_dataset.py",19295,0,"",python,selection_command +58,114128,"input_pipeline/generate_atari_dataset.py",19289,0,"",python,selection_command +59,114223,"input_pipeline/generate_atari_dataset.py",19288,0,"",python,selection_command +60,115462,"/fast/home/franz.srambical/.cursor-server/extensions/anysphere.cursorpyright-1.0.9/dist/typeshed-fallback/stdlib/builtins.pyi",0,0,"""""""\nBuilt-in functions, types, exceptions, and other objects.\n\nThis module provides direct access to all 'built-in'\nidentifiers of Python; for example, builtins.len is\nthe full name for the built-in function len().\n\nThis module is not normally accessed explicitly by most\napplications, but can be useful in modules that provide\nobjects with the same name as a built-in value, but in\nwhich the built-in of that name is also needed.\n""""""\n\nimport _ast\nimport _sitebuiltins\nimport _typeshed\nimport sys\nimport types\nfrom _collections_abc import dict_items, dict_keys, dict_values\nfrom _typeshed import (\n AnnotationForm,\n ConvertibleToFloat,\n ConvertibleToInt,\n FileDescriptorOrPath,\n OpenBinaryMode,\n OpenBinaryModeReading,\n OpenBinaryModeUpdating,\n OpenBinaryModeWriting,\n OpenTextMode,\n ReadableBuffer,\n SupportsAdd,\n SupportsAiter,\n SupportsAnext,\n SupportsDivMod,\n SupportsFlush,\n SupportsIter,\n SupportsKeysAndGetItem,\n SupportsLenAndGetItem,\n SupportsNext,\n SupportsRAdd,\n SupportsRDivMod,\n SupportsRichComparison,\n SupportsRichComparisonT,\n SupportsWrite,\n)\nfrom collections.abc import Awaitable, Callable, Iterable, Iterator, MutableSet, Reversible, Set as AbstractSet, Sized\nfrom io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper\nfrom os import PathLike\nfrom types import CellType, CodeType, GenericAlias, TracebackType\n\n# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping}\n# are imported from collections.abc in builtins.pyi\nfrom typing import ( # noqa: Y022,UP035\n IO,\n Any,\n BinaryIO,\n ClassVar,\n Generic,\n Mapping,\n MutableMapping,\n MutableSequence,\n Protocol,\n Sequence,\n SupportsAbs,\n SupportsBytes,\n SupportsComplex,\n SupportsFloat,\n SupportsIndex,\n TypeVar,\n final,\n overload,\n type_check_only,\n)\n\n# we can't import `Literal` from typing or mypy crashes: see #11247\nfrom typing_extensions import ( # noqa: Y023\n Concatenate,\n Literal,\n LiteralString,\n ParamSpec,\n Self,\n TypeAlias,\n TypeGuard,\n TypeIs,\n TypeVarTuple,\n deprecated,\n)\n\nif sys.version_info >= (3, 14):\n from _typeshed import AnnotateFunc\n\n_T = TypeVar(""_T"")\n_I = TypeVar(""_I"", default=int)\n_T_co = TypeVar(""_T_co"", covariant=True)\n_T_contra = TypeVar(""_T_contra"", contravariant=True)\n_R_co = TypeVar(""_R_co"", covariant=True)\n_KT = TypeVar(""_KT"")\n_VT = TypeVar(""_VT"")\n_S = TypeVar(""_S"")\n_T1 = TypeVar(""_T1"")\n_T2 = TypeVar(""_T2"")\n_T3 = TypeVar(""_T3"")\n_T4 = TypeVar(""_T4"")\n_T5 = TypeVar(""_T5"")\n_SupportsNextT_co = TypeVar(""_SupportsNextT_co"", bound=SupportsNext[Any], covariant=True)\n_SupportsAnextT_co = TypeVar(""_SupportsAnextT_co"", bound=SupportsAnext[Any], covariant=True)\n_AwaitableT = TypeVar(""_AwaitableT"", bound=Awaitable[Any])\n_AwaitableT_co = TypeVar(""_AwaitableT_co"", bound=Awaitable[Any], covariant=True)\n_P = ParamSpec(""_P"")\n\n# Type variables for slice\n_StartT_co = TypeVar(""_StartT_co"", covariant=True, default=Any) # slice -> slice[Any, Any, Any]\n_StopT_co = TypeVar(""_StopT_co"", covariant=True, default=_StartT_co) # slice[A] -> slice[A, A, A]\n# NOTE: step could differ from start and stop, (e.g. datetime/timedelta)l\n# the default (start|stop) is chosen to cater to the most common case of int/index slices.\n# FIXME: https://github.com/python/typing/issues/213 (replace step=start|stop with step=start&stop)\n_StepT_co = TypeVar(""_StepT_co"", covariant=True, default=_StartT_co | _StopT_co) # slice[A,B] -> slice[A, B, A|B]\n\nclass object:\n """"""\n The base class of the class hierarchy.\n\n When called, it accepts no arguments and returns a new featureless\n instance that has no instance attributes and cannot be given any.\n """"""\n __doc__: str | None\n __dict__: dict[str, Any]\n __module__: str\n __annotations__: dict[str, Any]\n @property\n def __class__(self) -> type[Self]:\n """"""the object's class""""""\n ...\n @__class__.setter\n def __class__(self, type: type[Self], /) -> None:\n """"""the object's class""""""\n ...\n def __init__(self) -> None:\n """"""Initialize self. See help(type(self)) for accurate signature.""""""\n ...\n def __new__(cls) -> Self:\n """"""Create and return a new object. See help(type) for accurate signature.""""""\n ...\n # N.B. `object.__setattr__` and `object.__delattr__` are heavily special-cased by type checkers.\n # Overriding them in subclasses has different semantics, even if the override has an identical signature.\n def __setattr__(self, name: str, value: Any, /) -> None:\n """"""Implement setattr(self, name, value).""""""\n ...\n def __delattr__(self, name: str, /) -> None:\n """"""Implement delattr(self, name).""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __ne__(self, value: object, /) -> bool:\n """"""Return self!=value.""""""\n ...\n def __str__(self) -> str:\n """"""Return str(self).""""""\n ...\n def __repr__(self) -> str:\n """"""Return repr(self).""""""\n ...\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n def __format__(self, format_spec: str, /) -> str:\n """"""\n Default object formatter.\n\n Return str(self) if format_spec is empty. Raise TypeError otherwise.\n """"""\n ...\n def __getattribute__(self, name: str, /) -> Any:\n """"""Return getattr(self, name).""""""\n ...\n def __sizeof__(self) -> int:\n """"""Size of object in memory, in bytes.""""""\n ...\n # return type of pickle methods is rather hard to express in the current type system\n # see #6661 and https://docs.python.org/3/library/pickle.html#object.__reduce__\n def __reduce__(self) -> str | tuple[Any, ...]:\n """"""Helper for pickle.""""""\n ...\n def __reduce_ex__(self, protocol: SupportsIndex, /) -> str | tuple[Any, ...]:\n """"""Helper for pickle.""""""\n ...\n if sys.version_info >= (3, 11):\n def __getstate__(self) -> object:\n """"""Helper for pickle.""""""\n ...\n\n def __dir__(self) -> Iterable[str]:\n """"""Default dir() implementation.""""""\n ...\n def __init_subclass__(cls) -> None:\n """"""\n This method is called when a class is subclassed.\n\n The default implementation does nothing. It may be\n overridden to extend subclasses.\n """"""\n ...\n @classmethod\n def __subclasshook__(cls, subclass: type, /) -> bool:\n """"""\n Abstract classes can override this to customize issubclass().\n\n This is invoked early on by abc.ABCMeta.__subclasscheck__().\n It should return True, False or NotImplemented. If it returns\n NotImplemented, the normal algorithm is used. Otherwise, it\n overrides the normal algorithm (and the outcome is cached).\n """"""\n ...\n\nclass staticmethod(Generic[_P, _R_co]):\n """"""\n staticmethod(function) -> method\n\n Convert a function to be a static method.\n\n A static method does not receive an implicit first argument.\n To declare a static method, use this idiom:\n\n class C:\n @staticmethod\n def f(arg1, arg2, argN):\n ...\n\n It can be called either on the class (e.g. C.f()) or on an instance\n (e.g. C().f()). Both the class and the instance are ignored, and\n neither is passed implicitly as the first argument to the method.\n\n Static methods in Python are similar to those found in Java or C++.\n For a more advanced concept, see the classmethod builtin.\n """"""\n @property\n def __func__(self) -> Callable[_P, _R_co]: ...\n @property\n def __isabstractmethod__(self) -> bool: ...\n def __init__(self, f: Callable[_P, _R_co], /) -> None: ...\n @overload\n def __get__(self, instance: None, owner: type, /) -> Callable[_P, _R_co]:\n """"""Return an attribute of instance, which is of type owner.""""""\n ...\n @overload\n def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]:\n """"""Return an attribute of instance, which is of type owner.""""""\n ...\n if sys.version_info >= (3, 10):\n __name__: str\n __qualname__: str\n @property\n def __wrapped__(self) -> Callable[_P, _R_co]: ...\n def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co:\n """"""Call self as a function.""""""\n ...\n if sys.version_info >= (3, 14):\n def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...\n __annotate__: AnnotateFunc | None\n\nclass classmethod(Generic[_T, _P, _R_co]):\n """"""\n classmethod(function) -> method\n\n Convert a function to be a class method.\n\n A class method receives the class as implicit first argument,\n just like an instance method receives the instance.\n To declare a class method, use this idiom:\n\n class C:\n @classmethod\n def f(cls, arg1, arg2, argN):\n ...\n\n It can be called either on the class (e.g. C.f()) or on an instance\n (e.g. C().f()). The instance is ignored except for its class.\n If a class method is called for a derived class, the derived class\n object is passed as the implied first argument.\n\n Class methods are different than C++ or Java static methods.\n If you want those, see the staticmethod builtin.\n """"""\n @property\n def __func__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ...\n @property\n def __isabstractmethod__(self) -> bool: ...\n def __init__(self, f: Callable[Concatenate[type[_T], _P], _R_co], /) -> None: ...\n @overload\n def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]:\n """"""Return an attribute of instance, which is of type owner.""""""\n ...\n @overload\n def __get__(self, instance: None, owner: type[_T], /) -> Callable[_P, _R_co]:\n """"""Return an attribute of instance, which is of type owner.""""""\n ...\n if sys.version_info >= (3, 10):\n __name__: str\n __qualname__: str\n @property\n def __wrapped__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ...\n if sys.version_info >= (3, 14):\n def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...\n __annotate__: AnnotateFunc | None\n\nclass type:\n """"""\n type(object) -> the object's type\n type(name, bases, dict, **kwds) -> a new type\n """"""\n # object.__base__ is None. Otherwise, it would be a type.\n @property\n def __base__(self) -> type | None: ...\n __bases__: tuple[type, ...]\n @property\n def __basicsize__(self) -> int: ...\n @property\n def __dict__(self) -> types.MappingProxyType[str, Any]: ... # type: ignore[override]\n @property\n def __dictoffset__(self) -> int: ...\n @property\n def __flags__(self) -> int: ...\n @property\n def __itemsize__(self) -> int: ...\n __module__: str\n @property\n def __mro__(self) -> tuple[type, ...]: ...\n __name__: str\n __qualname__: str\n @property\n def __text_signature__(self) -> str | None: ...\n @property\n def __weakrefoffset__(self) -> int: ...\n @overload\n def __init__(self, o: object, /) -> None: ...\n @overload\n def __init__(self, name: str, bases: tuple[type, ...], dict: dict[str, Any], /, **kwds: Any) -> None: ...\n @overload\n def __new__(cls, o: object, /) -> type: ...\n @overload\n def __new__(\n cls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], /, **kwds: Any\n ) -> _typeshed.Self: ...\n def __call__(self, *args: Any, **kwds: Any) -> Any:\n """"""Call self as a function.""""""\n ...\n def __subclasses__(self: _typeshed.Self) -> list[_typeshed.Self]:\n """"""Return a list of immediate subclasses.""""""\n ...\n # Note: the documentation doesn't specify what the return type is, the standard\n # implementation seems to be returning a list.\n def mro(self) -> list[type]:\n """"""Return a type's method resolution order.""""""\n ...\n def __instancecheck__(self, instance: Any, /) -> bool:\n """"""Check if an object is an instance.""""""\n ...\n def __subclasscheck__(self, subclass: type, /) -> bool:\n """"""Check if a class is a subclass.""""""\n ...\n @classmethod\n def __prepare__(metacls, name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object]:\n """"""\n __prepare__() -> dict\n used to create the namespace for the class statement\n """"""\n ...\n if sys.version_info >= (3, 10):\n def __or__(self, value: Any, /) -> types.UnionType:\n """"""Return self|value.""""""\n ...\n def __ror__(self, value: Any, /) -> types.UnionType:\n """"""Return value|self.""""""\n ...\n if sys.version_info >= (3, 12):\n __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...]\n __annotations__: dict[str, AnnotationForm]\n if sys.version_info >= (3, 14):\n __annotate__: AnnotateFunc | None\n\nclass super:\n """"""\n super() -> same as super(__class__, )\n super(type) -> unbound super object\n super(type, obj) -> bound super object; requires isinstance(obj, type)\n super(type, type2) -> bound super object; requires issubclass(type2, type)\n Typical use to call a cooperative superclass method:\n class C(B):\n def meth(self, arg):\n super().meth(arg)\n This works for class methods too:\n class C(B):\n @classmethod\n def cmeth(cls, arg):\n super().cmeth(arg)\n """"""\n @overload\n def __init__(self, t: Any, obj: Any, /) -> None: ...\n @overload\n def __init__(self, t: Any, /) -> None: ...\n @overload\n def __init__(self) -> None: ...\n\n_PositiveInteger: TypeAlias = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]\n_NegativeInteger: TypeAlias = Literal[-1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16, -17, -18, -19, -20]\n_LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed\n\nclass int:\n """"""\n int([x]) -> integer\n int(x, base=10) -> integer\n\n Convert a number or string to an integer, or return 0 if no arguments\n are given. If x is a number, return x.__int__(). For floating point\n numbers, this truncates towards zero.\n\n If x is not a number or if base is given, then x must be a string,\n bytes, or bytearray instance representing an integer literal in the\n given base. The literal can be preceded by '+' or '-' and be surrounded\n by whitespace. The base defaults to 10. Valid bases are 0 and 2-36.\n Base 0 means to interpret the base from the string as an integer literal.\n >>> int('0b100', base=0)\n 4\n """"""\n @overload\n def __new__(cls, x: ConvertibleToInt = ..., /) -> Self: ...\n @overload\n def __new__(cls, x: str | bytes | bytearray, /, base: SupportsIndex) -> Self: ...\n def as_integer_ratio(self) -> tuple[int, Literal[1]]:\n """"""\n Return a pair of integers, whose ratio is equal to the original int.\n\n The ratio is in lowest terms and has a positive denominator.\n\n >>> (10).as_integer_ratio()\n (10, 1)\n >>> (-10).as_integer_ratio()\n (-10, 1)\n >>> (0).as_integer_ratio()\n (0, 1)\n """"""\n ...\n @property\n def real(self) -> int:\n """"""the real part of a complex number""""""\n ...\n @property\n def imag(self) -> Literal[0]:\n """"""the imaginary part of a complex number""""""\n ...\n @property\n def numerator(self) -> int:\n """"""the numerator of a rational number in lowest terms""""""\n ...\n @property\n def denominator(self) -> Literal[1]:\n """"""the denominator of a rational number in lowest terms""""""\n ...\n def conjugate(self) -> int:\n """"""Returns self, the complex conjugate of any int.""""""\n ...\n def bit_length(self) -> int:\n """"""\n Number of bits necessary to represent self in binary.\n\n >>> bin(37)\n '0b100101'\n >>> (37).bit_length()\n 6\n """"""\n ...\n if sys.version_info >= (3, 10):\n def bit_count(self) -> int:\n """"""\n Number of ones in the binary representation of the absolute value of self.\n\n Also known as the population count.\n\n >>> bin(13)\n '0b1101'\n >>> (13).bit_count()\n 3\n """"""\n ...\n\n if sys.version_info >= (3, 11):\n def to_bytes(\n self, length: SupportsIndex = 1, byteorder: Literal[""little"", ""big""] = ""big"", *, signed: bool = False\n ) -> bytes:\n """"""\n Return an array of bytes representing an integer.\n\n length\n Length of bytes object to use. An OverflowError is raised if the\n integer is not representable with the given number of bytes. Default\n is length 1.\n byteorder\n The byte order used to represent the integer. If byteorder is 'big',\n the most significant byte is at the beginning of the byte array. If\n byteorder is 'little', the most significant byte is at the end of the\n byte array. To request the native byte order of the host system, use\n `sys.byteorder' as the byte order value. Default is to use 'big'.\n signed\n Determines whether two's complement is used to represent the integer.\n If signed is False and a negative integer is given, an OverflowError\n is raised.\n """"""\n ...\n @classmethod\n def from_bytes(\n cls,\n bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer,\n byteorder: Literal[""little"", ""big""] = ""big"",\n *,\n signed: bool = False,\n ) -> Self:\n """"""\n Return the integer represented by the given array of bytes.\n\n bytes\n Holds the array of bytes to convert. The argument must either\n support the buffer protocol or be an iterable object producing bytes.\n Bytes and bytearray are examples of built-in objects that support the\n buffer protocol.\n byteorder\n The byte order used to represent the integer. If byteorder is 'big',\n the most significant byte is at the beginning of the byte array. If\n byteorder is 'little', the most significant byte is at the end of the\n byte array. To request the native byte order of the host system, use\n `sys.byteorder' as the byte order value. Default is to use 'big'.\n signed\n Indicates whether two's complement is used to represent the integer.\n """"""\n ...\n else:\n def to_bytes(self, length: SupportsIndex, byteorder: Literal[""little"", ""big""], *, signed: bool = False) -> bytes: ...\n @classmethod\n def from_bytes(\n cls,\n bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer,\n byteorder: Literal[""little"", ""big""],\n *,\n signed: bool = False,\n ) -> Self: ...\n\n if sys.version_info >= (3, 12):\n def is_integer(self) -> Literal[True]:\n """"""Returns True. Exists for duck type compatibility with float.is_integer.""""""\n ...\n\n def __add__(self, value: int, /) -> int:\n """"""Return self+value.""""""\n ...\n def __sub__(self, value: int, /) -> int:\n """"""Return self-value.""""""\n ...\n def __mul__(self, value: int, /) -> int:\n """"""Return self*value.""""""\n ...\n def __floordiv__(self, value: int, /) -> int:\n """"""Return self//value.""""""\n ...\n def __truediv__(self, value: int, /) -> float:\n """"""Return self/value.""""""\n ...\n def __mod__(self, value: int, /) -> int:\n """"""Return self%value.""""""\n ...\n def __divmod__(self, value: int, /) -> tuple[int, int]:\n """"""Return divmod(self, value).""""""\n ...\n def __radd__(self, value: int, /) -> int:\n """"""Return value+self.""""""\n ...\n def __rsub__(self, value: int, /) -> int:\n """"""Return value-self.""""""\n ...\n def __rmul__(self, value: int, /) -> int:\n """"""Return value*self.""""""\n ...\n def __rfloordiv__(self, value: int, /) -> int:\n """"""Return value//self.""""""\n ...\n def __rtruediv__(self, value: int, /) -> float:\n """"""Return value/self.""""""\n ...\n def __rmod__(self, value: int, /) -> int:\n """"""Return value%self.""""""\n ...\n def __rdivmod__(self, value: int, /) -> tuple[int, int]:\n """"""Return divmod(value, self).""""""\n ...\n @overload\n def __pow__(self, x: Literal[0], /) -> Literal[1]:\n """"""Return pow(self, value, mod).""""""\n ...\n @overload\n def __pow__(self, value: Literal[0], mod: None, /) -> Literal[1]:\n """"""Return pow(self, value, mod).""""""\n ...\n @overload\n def __pow__(self, value: _PositiveInteger, mod: None = None, /) -> int:\n """"""Return pow(self, value, mod).""""""\n ...\n @overload\n def __pow__(self, value: _NegativeInteger, mod: None = None, /) -> float:\n """"""Return pow(self, value, mod).""""""\n ...\n # positive __value -> int; negative __value -> float\n # return type must be Any as `int | float` causes too many false-positive errors\n @overload\n def __pow__(self, value: int, mod: None = None, /) -> Any:\n """"""Return pow(self, value, mod).""""""\n ...\n @overload\n def __pow__(self, value: int, mod: int, /) -> int:\n """"""Return pow(self, value, mod).""""""\n ...\n def __rpow__(self, value: int, mod: int | None = None, /) -> Any:\n """"""Return pow(value, self, mod).""""""\n ...\n def __and__(self, value: int, /) -> int:\n """"""Return self&value.""""""\n ...\n def __or__(self, value: int, /) -> int:\n """"""Return self|value.""""""\n ...\n def __xor__(self, value: int, /) -> int:\n """"""Return self^value.""""""\n ...\n def __lshift__(self, value: int, /) -> int:\n """"""Return self< int:\n """"""Return self>>value.""""""\n ...\n def __rand__(self, value: int, /) -> int:\n """"""Return value&self.""""""\n ...\n def __ror__(self, value: int, /) -> int:\n """"""Return value|self.""""""\n ...\n def __rxor__(self, value: int, /) -> int:\n """"""Return value^self.""""""\n ...\n def __rlshift__(self, value: int, /) -> int:\n """"""Return value< int:\n """"""Return value>>self.""""""\n ...\n def __neg__(self) -> int:\n """"""-self""""""\n ...\n def __pos__(self) -> int:\n """"""+self""""""\n ...\n def __invert__(self) -> int:\n """"""~self""""""\n ...\n def __trunc__(self) -> int:\n """"""Truncating an Integral returns itself.""""""\n ...\n def __ceil__(self) -> int:\n """"""Ceiling of an Integral returns itself.""""""\n ...\n def __floor__(self) -> int:\n """"""Flooring an Integral returns itself.""""""\n ...\n if sys.version_info >= (3, 14):\n def __round__(self, ndigits: SupportsIndex | None = None, /) -> int: ...\n else:\n def __round__(self, ndigits: SupportsIndex = ..., /) -> int:\n """"""\n Rounding an Integral returns itself.\n\n Rounding with an ndigits argument also returns an integer.\n """"""\n ...\n\n def __getnewargs__(self) -> tuple[int]: ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __ne__(self, value: object, /) -> bool:\n """"""Return self!=value.""""""\n ...\n def __lt__(self, value: int, /) -> bool:\n """"""Return self bool:\n """"""Return self<=value.""""""\n ...\n def __gt__(self, value: int, /) -> bool:\n """"""Return self>value.""""""\n ...\n def __ge__(self, value: int, /) -> bool:\n """"""Return self>=value.""""""\n ...\n def __float__(self) -> float:\n """"""float(self)""""""\n ...\n def __int__(self) -> int:\n """"""int(self)""""""\n ...\n def __abs__(self) -> int:\n """"""abs(self)""""""\n ...\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n def __bool__(self) -> bool:\n """"""True if self else False""""""\n ...\n def __index__(self) -> int:\n """"""Return self converted to an integer, if self is suitable for use as an index into a list.""""""\n ...\n\nclass float:\n """"""Convert a string or number to a floating point number, if possible.""""""\n def __new__(cls, x: ConvertibleToFloat = ..., /) -> Self: ...\n def as_integer_ratio(self) -> tuple[int, int]:\n """"""\n Return a pair of integers, whose ratio is exactly equal to the original float.\n\n The ratio is in lowest terms and has a positive denominator. Raise\n OverflowError on infinities and a ValueError on NaNs.\n\n >>> (10.0).as_integer_ratio()\n (10, 1)\n >>> (0.0).as_integer_ratio()\n (0, 1)\n >>> (-.25).as_integer_ratio()\n (-1, 4)\n """"""\n ...\n def hex(self) -> str:\n """"""\n Return a hexadecimal representation of a floating-point number.\n\n >>> (-0.1).hex()\n '-0x1.999999999999ap-4'\n >>> 3.14159.hex()\n '0x1.921f9f01b866ep+1'\n """"""\n ...\n def is_integer(self) -> bool:\n """"""Return True if the float is an integer.""""""\n ...\n @classmethod\n def fromhex(cls, string: str, /) -> Self:\n """"""\n Create a floating-point number from a hexadecimal string.\n\n >>> float.fromhex('0x1.ffffp10')\n 2047.984375\n >>> float.fromhex('-0x1p-1074')\n -5e-324\n """"""\n ...\n @property\n def real(self) -> float:\n """"""the real part of a complex number""""""\n ...\n @property\n def imag(self) -> float:\n """"""the imaginary part of a complex number""""""\n ...\n def conjugate(self) -> float:\n """"""Return self, the complex conjugate of any float.""""""\n ...\n def __add__(self, value: float, /) -> float:\n """"""Return self+value.""""""\n ...\n def __sub__(self, value: float, /) -> float:\n """"""Return self-value.""""""\n ...\n def __mul__(self, value: float, /) -> float:\n """"""Return self*value.""""""\n ...\n def __floordiv__(self, value: float, /) -> float:\n """"""Return self//value.""""""\n ...\n def __truediv__(self, value: float, /) -> float:\n """"""Return self/value.""""""\n ...\n def __mod__(self, value: float, /) -> float:\n """"""Return self%value.""""""\n ...\n def __divmod__(self, value: float, /) -> tuple[float, float]:\n """"""Return divmod(self, value).""""""\n ...\n @overload\n def __pow__(self, value: int, mod: None = None, /) -> float:\n """"""Return pow(self, value, mod).""""""\n ...\n # positive __value -> float; negative __value -> complex\n # return type must be Any as `float | complex` causes too many false-positive errors\n @overload\n def __pow__(self, value: float, mod: None = None, /) -> Any:\n """"""Return pow(self, value, mod).""""""\n ...\n def __radd__(self, value: float, /) -> float:\n """"""Return value+self.""""""\n ...\n def __rsub__(self, value: float, /) -> float:\n """"""Return value-self.""""""\n ...\n def __rmul__(self, value: float, /) -> float:\n """"""Return value*self.""""""\n ...\n def __rfloordiv__(self, value: float, /) -> float:\n """"""Return value//self.""""""\n ...\n def __rtruediv__(self, value: float, /) -> float:\n """"""Return value/self.""""""\n ...\n def __rmod__(self, value: float, /) -> float:\n """"""Return value%self.""""""\n ...\n def __rdivmod__(self, value: float, /) -> tuple[float, float]:\n """"""Return divmod(value, self).""""""\n ...\n @overload\n def __rpow__(self, value: _PositiveInteger, mod: None = None, /) -> float:\n """"""Return pow(value, self, mod).""""""\n ...\n @overload\n def __rpow__(self, value: _NegativeInteger, mod: None = None, /) -> complex:\n """"""Return pow(value, self, mod).""""""\n ...\n # Returning `complex` for the general case gives too many false-positive errors.\n @overload\n def __rpow__(self, value: float, mod: None = None, /) -> Any:\n """"""Return pow(value, self, mod).""""""\n ...\n def __getnewargs__(self) -> tuple[float]: ...\n def __trunc__(self) -> int:\n """"""Return the Integral closest to x between 0 and x.""""""\n ...\n def __ceil__(self) -> int:\n """"""Return the ceiling as an Integral.""""""\n ...\n def __floor__(self) -> int:\n """"""Return the floor as an Integral.""""""\n ...\n @overload\n def __round__(self, ndigits: None = None, /) -> int:\n """"""\n Return the Integral closest to x, rounding half toward even.\n\n When an argument is passed, work like built-in round(x, ndigits).\n """"""\n ...\n @overload\n def __round__(self, ndigits: SupportsIndex, /) -> float:\n """"""\n Return the Integral closest to x, rounding half toward even.\n\n When an argument is passed, work like built-in round(x, ndigits).\n """"""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __ne__(self, value: object, /) -> bool:\n """"""Return self!=value.""""""\n ...\n def __lt__(self, value: float, /) -> bool:\n """"""Return self bool:\n """"""Return self<=value.""""""\n ...\n def __gt__(self, value: float, /) -> bool:\n """"""Return self>value.""""""\n ...\n def __ge__(self, value: float, /) -> bool:\n """"""Return self>=value.""""""\n ...\n def __neg__(self) -> float:\n """"""-self""""""\n ...\n def __pos__(self) -> float:\n """"""+self""""""\n ...\n def __int__(self) -> int:\n """"""int(self)""""""\n ...\n def __float__(self) -> float:\n """"""float(self)""""""\n ...\n def __abs__(self) -> float:\n """"""abs(self)""""""\n ...\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n def __bool__(self) -> bool:\n """"""True if self else False""""""\n ...\n if sys.version_info >= (3, 14):\n @classmethod\n def from_number(cls, number: float | SupportsIndex | SupportsFloat, /) -> Self: ...\n\nclass complex:\n """"""\n Create a complex number from a real part and an optional imaginary part.\n\n This is equivalent to (real + imag*1j) where imag defaults to 0.\n """"""\n # Python doesn't currently accept SupportsComplex for the second argument\n @overload\n def __new__(\n cls,\n real: complex | SupportsComplex | SupportsFloat | SupportsIndex = ...,\n imag: complex | SupportsFloat | SupportsIndex = ...,\n ) -> Self: ...\n @overload\n def __new__(cls, real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ...\n @property\n def real(self) -> float:\n """"""the real part of a complex number""""""\n ...\n @property\n def imag(self) -> float:\n """"""the imaginary part of a complex number""""""\n ...\n def conjugate(self) -> complex:\n """"""Return the complex conjugate of its argument. (3-4j).conjugate() == 3+4j.""""""\n ...\n def __add__(self, value: complex, /) -> complex:\n """"""Return self+value.""""""\n ...\n def __sub__(self, value: complex, /) -> complex:\n """"""Return self-value.""""""\n ...\n def __mul__(self, value: complex, /) -> complex:\n """"""Return self*value.""""""\n ...\n def __pow__(self, value: complex, mod: None = None, /) -> complex:\n """"""Return pow(self, value, mod).""""""\n ...\n def __truediv__(self, value: complex, /) -> complex:\n """"""Return self/value.""""""\n ...\n def __radd__(self, value: complex, /) -> complex:\n """"""Return value+self.""""""\n ...\n def __rsub__(self, value: complex, /) -> complex:\n """"""Return value-self.""""""\n ...\n def __rmul__(self, value: complex, /) -> complex:\n """"""Return value*self.""""""\n ...\n def __rpow__(self, value: complex, mod: None = None, /) -> complex:\n """"""Return pow(value, self, mod).""""""\n ...\n def __rtruediv__(self, value: complex, /) -> complex:\n """"""Return value/self.""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __ne__(self, value: object, /) -> bool:\n """"""Return self!=value.""""""\n ...\n def __neg__(self) -> complex:\n """"""-self""""""\n ...\n def __pos__(self) -> complex:\n """"""+self""""""\n ...\n def __abs__(self) -> float:\n """"""abs(self)""""""\n ...\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n def __bool__(self) -> bool:\n """"""True if self else False""""""\n ...\n if sys.version_info >= (3, 11):\n def __complex__(self) -> complex:\n """"""Convert this value to exact type complex.""""""\n ...\n if sys.version_info >= (3, 14):\n @classmethod\n def from_number(cls, number: complex | SupportsComplex | SupportsFloat | SupportsIndex, /) -> Self: ...\n\nclass _FormatMapMapping(Protocol):\n def __getitem__(self, key: str, /) -> Any: ...\n\nclass _TranslateTable(Protocol):\n def __getitem__(self, key: int, /) -> str | int | None: ...\n\nclass str(Sequence[str]):\n """"""\n str(object='') -> str\n str(bytes_or_buffer[, encoding[, errors]]) -> str\n\n Create a new string object from the given object. If encoding or\n errors is specified, then the object must expose a data buffer\n that will be decoded using the given encoding and error handler.\n Otherwise, returns the result of object.__str__() (if defined)\n or repr(object).\n encoding defaults to sys.getdefaultencoding().\n errors defaults to 'strict'.\n """"""\n @overload\n def __new__(cls, object: object = ...) -> Self: ...\n @overload\n def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ...\n @overload\n def capitalize(self: LiteralString) -> LiteralString:\n """"""\n Return a capitalized version of the string.\n\n More specifically, make the first character have upper case and the rest lower\n case.\n """"""\n ...\n @overload\n def capitalize(self) -> str:\n """"""\n Return a capitalized version of the string.\n\n More specifically, make the first character have upper case and the rest lower\n case.\n """"""\n ...\n @overload\n def casefold(self: LiteralString) -> LiteralString:\n """"""Return a version of the string suitable for caseless comparisons.""""""\n ...\n @overload\n def casefold(self) -> str:\n """"""Return a version of the string suitable for caseless comparisons.""""""\n ...\n @overload\n def center(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = "" "", /) -> LiteralString:\n """"""\n Return a centered string of length width.\n\n Padding is done using the specified fill character (default is a space).\n """"""\n ...\n @overload\n def center(self, width: SupportsIndex, fillchar: str = "" "", /) -> str:\n """"""\n Return a centered string of length width.\n\n Padding is done using the specified fill character (default is a space).\n """"""\n ...\n def count(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int:\n """"""\n S.count(sub[, start[, end]]) -> int\n\n Return the number of non-overlapping occurrences of substring sub in\n string S[start:end]. Optional arguments start and end are\n interpreted as in slice notation.\n """"""\n ...\n def encode(self, encoding: str = ""utf-8"", errors: str = ""strict"") -> bytes:\n """"""\n Encode the string using the codec registered for encoding.\n\n encoding\n The encoding in which to encode the string.\n errors\n The error handling scheme to use for encoding errors.\n The default is 'strict' meaning that encoding errors raise a\n UnicodeEncodeError. Other possible values are 'ignore', 'replace' and\n 'xmlcharrefreplace' as well as any other name registered with\n codecs.register_error that can handle UnicodeEncodeErrors.\n """"""\n ...\n def endswith(\n self, suffix: str | tuple[str, ...], start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> bool:\n """"""\n S.endswith(suffix[, start[, end]]) -> bool\n\n Return True if S ends with the specified suffix, False otherwise.\n With optional start, test S beginning at that position.\n With optional end, stop comparing S at that position.\n suffix can also be a tuple of strings to try.\n """"""\n ...\n @overload\n def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString:\n """"""\n Return a copy where all tab characters are expanded using spaces.\n\n If tabsize is not given, a tab size of 8 characters is assumed.\n """"""\n ...\n @overload\n def expandtabs(self, tabsize: SupportsIndex = 8) -> str:\n """"""\n Return a copy where all tab characters are expanded using spaces.\n\n If tabsize is not given, a tab size of 8 characters is assumed.\n """"""\n ...\n def find(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int:\n """"""\n S.find(sub[, start[, end]]) -> int\n\n Return the lowest index in S where substring sub is found,\n such that sub is contained within S[start:end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Return -1 on failure.\n """"""\n ...\n @overload\n def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString:\n """"""\n S.format(*args, **kwargs) -> str\n\n Return a formatted version of S, using substitutions from args and kwargs.\n The substitutions are identified by braces ('{' and '}').\n """"""\n ...\n @overload\n def format(self, *args: object, **kwargs: object) -> str:\n """"""\n S.format(*args, **kwargs) -> str\n\n Return a formatted version of S, using substitutions from args and kwargs.\n The substitutions are identified by braces ('{' and '}').\n """"""\n ...\n def format_map(self, mapping: _FormatMapMapping, /) -> str:\n """"""\n S.format_map(mapping) -> str\n\n Return a formatted version of S, using substitutions from mapping.\n The substitutions are identified by braces ('{' and '}').\n """"""\n ...\n def index(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int:\n """"""\n S.index(sub[, start[, end]]) -> int\n\n Return the lowest index in S where substring sub is found,\n such that sub is contained within S[start:end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Raises ValueError when the substring is not found.\n """"""\n ...\n def isalnum(self) -> bool:\n """"""\n Return True if the string is an alpha-numeric string, False otherwise.\n\n A string is alpha-numeric if all characters in the string are alpha-numeric and\n there is at least one character in the string.\n """"""\n ...\n def isalpha(self) -> bool:\n """"""\n Return True if the string is an alphabetic string, False otherwise.\n\n A string is alphabetic if all characters in the string are alphabetic and there\n is at least one character in the string.\n """"""\n ...\n def isascii(self) -> bool:\n """"""\n Return True if all characters in the string are ASCII, False otherwise.\n\n ASCII characters have code points in the range U+0000-U+007F.\n Empty string is ASCII too.\n """"""\n ...\n def isdecimal(self) -> bool:\n """"""\n Return True if the string is a decimal string, False otherwise.\n\n A string is a decimal string if all characters in the string are decimal and\n there is at least one character in the string.\n """"""\n ...\n def isdigit(self) -> bool:\n """"""\n Return True if the string is a digit string, False otherwise.\n\n A string is a digit string if all characters in the string are digits and there\n is at least one character in the string.\n """"""\n ...\n def isidentifier(self) -> bool:\n """"""\n Return True if the string is a valid Python identifier, False otherwise.\n\n Call keyword.iskeyword(s) to test whether string s is a reserved identifier,\n such as ""def"" or ""class"".\n """"""\n ...\n def islower(self) -> bool:\n """"""\n Return True if the string is a lowercase string, False otherwise.\n\n A string is lowercase if all cased characters in the string are lowercase and\n there is at least one cased character in the string.\n """"""\n ...\n def isnumeric(self) -> bool:\n """"""\n Return True if the string is a numeric string, False otherwise.\n\n A string is numeric if all characters in the string are numeric and there is at\n least one character in the string.\n """"""\n ...\n def isprintable(self) -> bool:\n """"""\n Return True if the string is printable, False otherwise.\n\n A string is printable if all of its characters are considered printable in\n repr() or if it is empty.\n """"""\n ...\n def isspace(self) -> bool:\n """"""\n Return True if the string is a whitespace string, False otherwise.\n\n A string is whitespace if all characters in the string are whitespace and there\n is at least one character in the string.\n """"""\n ...\n def istitle(self) -> bool:\n """"""\n Return True if the string is a title-cased string, False otherwise.\n\n In a title-cased string, upper- and title-case characters may only\n follow uncased characters and lowercase characters only cased ones.\n """"""\n ...\n def isupper(self) -> bool:\n """"""\n Return True if the string is an uppercase string, False otherwise.\n\n A string is uppercase if all cased characters in the string are uppercase and\n there is at least one cased character in the string.\n """"""\n ...\n @overload\n def join(self: LiteralString, iterable: Iterable[LiteralString], /) -> LiteralString:\n """"""\n Concatenate any number of strings.\n\n The string whose method is called is inserted in between each given string.\n The result is returned as a new string.\n\n Example: '.'.join(['ab', 'pq', 'rs']) -> 'ab.pq.rs'\n """"""\n ...\n @overload\n def join(self, iterable: Iterable[str], /) -> str:\n """"""\n Concatenate any number of strings.\n\n The string whose method is called is inserted in between each given string.\n The result is returned as a new string.\n\n Example: '.'.join(['ab', 'pq', 'rs']) -> 'ab.pq.rs'\n """"""\n ...\n @overload\n def ljust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = "" "", /) -> LiteralString:\n """"""\n Return a left-justified string of length width.\n\n Padding is done using the specified fill character (default is a space).\n """"""\n ...\n @overload\n def ljust(self, width: SupportsIndex, fillchar: str = "" "", /) -> str:\n """"""\n Return a left-justified string of length width.\n\n Padding is done using the specified fill character (default is a space).\n """"""\n ...\n @overload\n def lower(self: LiteralString) -> LiteralString:\n """"""Return a copy of the string converted to lowercase.""""""\n ...\n @overload\n def lower(self) -> str:\n """"""Return a copy of the string converted to lowercase.""""""\n ...\n @overload\n def lstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString:\n """"""\n Return a copy of the string with leading whitespace removed.\n\n If chars is given and not None, remove characters in chars instead.\n """"""\n ...\n @overload\n def lstrip(self, chars: str | None = None, /) -> str:\n """"""\n Return a copy of the string with leading whitespace removed.\n\n If chars is given and not None, remove characters in chars instead.\n """"""\n ...\n @overload\n def partition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]:\n """"""\n Partition the string into three parts using the given separator.\n\n This will search for the separator in the string. If the separator is found,\n returns a 3-tuple containing the part before the separator, the separator\n itself, and the part after it.\n\n If the separator is not found, returns a 3-tuple containing the original string\n and two empty strings.\n """"""\n ...\n @overload\n def partition(self, sep: str, /) -> tuple[str, str, str]:\n """"""\n Partition the string into three parts using the given separator.\n\n This will search for the separator in the string. If the separator is found,\n returns a 3-tuple containing the part before the separator, the separator\n itself, and the part after it.\n\n If the separator is not found, returns a 3-tuple containing the original string\n and two empty strings.\n """"""\n ...\n if sys.version_info >= (3, 13):\n @overload\n def replace(\n self: LiteralString, old: LiteralString, new: LiteralString, /, count: SupportsIndex = -1\n ) -> LiteralString: ...\n @overload\n def replace(self, old: str, new: str, /, count: SupportsIndex = -1) -> str: ... # type: ignore[misc]\n else:\n @overload\n def replace(\n self: LiteralString, old: LiteralString, new: LiteralString, count: SupportsIndex = -1, /\n ) -> LiteralString:\n """"""\n Return a copy with all occurrences of substring old replaced by new.\n\n count\n Maximum number of occurrences to replace.\n -1 (the default value) means replace all occurrences.\n\n If the optional argument count is given, only the first count occurrences are\n replaced.\n """"""\n ...\n @overload\n def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str:\n """"""\n Return a copy with all occurrences of substring old replaced by new.\n\n count\n Maximum number of occurrences to replace.\n -1 (the default value) means replace all occurrences.\n\n If the optional argument count is given, only the first count occurrences are\n replaced.\n """"""\n ...\n\n @overload\n def removeprefix(self: LiteralString, prefix: LiteralString, /) -> LiteralString:\n """"""\n Return a str with the given prefix string removed if present.\n\n If the string starts with the prefix string, return string[len(prefix):].\n Otherwise, return a copy of the original string.\n """"""\n ...\n @overload\n def removeprefix(self, prefix: str, /) -> str:\n """"""\n Return a str with the given prefix string removed if present.\n\n If the string starts with the prefix string, return string[len(prefix):].\n Otherwise, return a copy of the original string.\n """"""\n ...\n @overload\n def removesuffix(self: LiteralString, suffix: LiteralString, /) -> LiteralString:\n """"""\n Return a str with the given suffix string removed if present.\n\n If the string ends with the suffix string and that suffix is not empty,\n return string[:-len(suffix)]. Otherwise, return a copy of the original\n string.\n """"""\n ...\n @overload\n def removesuffix(self, suffix: str, /) -> str:\n """"""\n Return a str with the given suffix string removed if present.\n\n If the string ends with the suffix string and that suffix is not empty,\n return string[:-len(suffix)]. Otherwise, return a copy of the original\n string.\n """"""\n ...\n def rfind(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int:\n """"""\n S.rfind(sub[, start[, end]]) -> int\n\n Return the highest index in S where substring sub is found,\n such that sub is contained within S[start:end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Return -1 on failure.\n """"""\n ...\n def rindex(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int:\n """"""\n S.rindex(sub[, start[, end]]) -> int\n\n Return the highest index in S where substring sub is found,\n such that sub is contained within S[start:end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Raises ValueError when the substring is not found.\n """"""\n ...\n @overload\n def rjust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = "" "", /) -> LiteralString:\n """"""\n Return a right-justified string of length width.\n\n Padding is done using the specified fill character (default is a space).\n """"""\n ...\n @overload\n def rjust(self, width: SupportsIndex, fillchar: str = "" "", /) -> str:\n """"""\n Return a right-justified string of length width.\n\n Padding is done using the specified fill character (default is a space).\n """"""\n ...\n @overload\n def rpartition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]:\n """"""\n Partition the string into three parts using the given separator.\n\n This will search for the separator in the string, starting at the end. If\n the separator is found, returns a 3-tuple containing the part before the\n separator, the separator itself, and the part after it.\n\n If the separator is not found, returns a 3-tuple containing two empty strings\n and the original string.\n """"""\n ...\n @overload\n def rpartition(self, sep: str, /) -> tuple[str, str, str]:\n """"""\n Partition the string into three parts using the given separator.\n\n This will search for the separator in the string, starting at the end. If\n the separator is found, returns a 3-tuple containing the part before the\n separator, the separator itself, and the part after it.\n\n If the separator is not found, returns a 3-tuple containing two empty strings\n and the original string.\n """"""\n ...\n @overload\n def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]:\n r""""""\n Return a list of the substrings in the string, using sep as the separator string.\n\n sep\n The separator used to split the string.\n\n When set to None (the default value), will split on any whitespace\n character (including \n \r \t \f and spaces) and will discard\n empty strings from the result.\n maxsplit\n Maximum number of splits.\n -1 (the default value) means no limit.\n\n Splitting starts at the end of the string and works to the front.\n """"""\n ...\n @overload\n def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]:\n r""""""\n Return a list of the substrings in the string, using sep as the separator string.\n\n sep\n The separator used to split the string.\n\n When set to None (the default value), will split on any whitespace\n character (including \n \r \t \f and spaces) and will discard\n empty strings from the result.\n maxsplit\n Maximum number of splits.\n -1 (the default value) means no limit.\n\n Splitting starts at the end of the string and works to the front.\n """"""\n ...\n @overload\n def rstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString:\n """"""\n Return a copy of the string with trailing whitespace removed.\n\n If chars is given and not None, remove characters in chars instead.\n """"""\n ...\n @overload\n def rstrip(self, chars: str | None = None, /) -> str:\n """"""\n Return a copy of the string with trailing whitespace removed.\n\n If chars is given and not None, remove characters in chars instead.\n """"""\n ...\n @overload\n def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]:\n r""""""\n Return a list of the substrings in the string, using sep as the separator string.\n\n sep\n The separator used to split the string.\n\n When set to None (the default value), will split on any whitespace\n character (including \n \r \t \f and spaces) and will discard\n empty strings from the result.\n maxsplit\n Maximum number of splits.\n -1 (the default value) means no limit.\n\n Splitting starts at the front of the string and works to the end.\n\n Note, str.split() is mainly useful for data that has been intentionally\n delimited. With natural text that includes punctuation, consider using\n the regular expression module.\n """"""\n ...\n @overload\n def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]:\n r""""""\n Return a list of the substrings in the string, using sep as the separator string.\n\n sep\n The separator used to split the string.\n\n When set to None (the default value), will split on any whitespace\n character (including \n \r \t \f and spaces) and will discard\n empty strings from the result.\n maxsplit\n Maximum number of splits.\n -1 (the default value) means no limit.\n\n Splitting starts at the front of the string and works to the end.\n\n Note, str.split() is mainly useful for data that has been intentionally\n delimited. With natural text that includes punctuation, consider using\n the regular expression module.\n """"""\n ...\n @overload\n def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]:\n """"""\n Return a list of the lines in the string, breaking at line boundaries.\n\n Line breaks are not included in the resulting list unless keepends is given and\n true.\n """"""\n ...\n @overload\n def splitlines(self, keepends: bool = False) -> list[str]:\n """"""\n Return a list of the lines in the string, breaking at line boundaries.\n\n Line breaks are not included in the resulting list unless keepends is given and\n true.\n """"""\n ...\n def startswith(\n self, prefix: str | tuple[str, ...], start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> bool:\n """"""\n S.startswith(prefix[, start[, end]]) -> bool\n\n Return True if S starts with the specified prefix, False otherwise.\n With optional start, test S beginning at that position.\n With optional end, stop comparing S at that position.\n prefix can also be a tuple of strings to try.\n """"""\n ...\n @overload\n def strip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString:\n """"""\n Return a copy of the string with leading and trailing whitespace removed.\n\n If chars is given and not None, remove characters in chars instead.\n """"""\n ...\n @overload\n def strip(self, chars: str | None = None, /) -> str:\n """"""\n Return a copy of the string with leading and trailing whitespace removed.\n\n If chars is given and not None, remove characters in chars instead.\n """"""\n ...\n @overload\n def swapcase(self: LiteralString) -> LiteralString:\n """"""Convert uppercase characters to lowercase and lowercase characters to uppercase.""""""\n ...\n @overload\n def swapcase(self) -> str:\n """"""Convert uppercase characters to lowercase and lowercase characters to uppercase.""""""\n ...\n @overload\n def title(self: LiteralString) -> LiteralString:\n """"""\n Return a version of the string where each word is titlecased.\n\n More specifically, words start with uppercased characters and all remaining\n cased characters have lower case.\n """"""\n ...\n @overload\n def title(self) -> str:\n """"""\n Return a version of the string where each word is titlecased.\n\n More specifically, words start with uppercased characters and all remaining\n cased characters have lower case.\n """"""\n ...\n def translate(self, table: _TranslateTable, /) -> str:\n """"""\n Replace each character in the string using the given translation table.\n\n table\n Translation table, which must be a mapping of Unicode ordinals to\n Unicode ordinals, strings, or None.\n\n The table must implement lookup/indexing via __getitem__, for instance a\n dictionary or list. If this operation raises LookupError, the character is\n left untouched. Characters mapped to None are deleted.\n """"""\n ...\n @overload\n def upper(self: LiteralString) -> LiteralString:\n """"""Return a copy of the string converted to uppercase.""""""\n ...\n @overload\n def upper(self) -> str:\n """"""Return a copy of the string converted to uppercase.""""""\n ...\n @overload\n def zfill(self: LiteralString, width: SupportsIndex, /) -> LiteralString:\n """"""\n Pad a numeric string with zeros on the left, to fill a field of the given width.\n\n The string is never truncated.\n """"""\n ...\n @overload\n def zfill(self, width: SupportsIndex, /) -> str:\n """"""\n Pad a numeric string with zeros on the left, to fill a field of the given width.\n\n The string is never truncated.\n """"""\n ...\n @staticmethod\n @overload\n def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T], /) -> dict[int, _T]:\n """"""\n Return a translation table usable for str.translate().\n\n If there is only one argument, it must be a dictionary mapping Unicode\n ordinals (integers) or characters to Unicode ordinals, strings or None.\n Character keys will be then converted to ordinals.\n If there are two arguments, they must be strings of equal length, and\n in the resulting dictionary, each character in x will be mapped to the\n character at the same position in y. If there is a third argument, it\n must be a string, whose characters will be mapped to None in the result.\n """"""\n ...\n @staticmethod\n @overload\n def maketrans(x: str, y: str, /) -> dict[int, int]:\n """"""\n Return a translation table usable for str.translate().\n\n If there is only one argument, it must be a dictionary mapping Unicode\n ordinals (integers) or characters to Unicode ordinals, strings or None.\n Character keys will be then converted to ordinals.\n If there are two arguments, they must be strings of equal length, and\n in the resulting dictionary, each character in x will be mapped to the\n character at the same position in y. If there is a third argument, it\n must be a string, whose characters will be mapped to None in the result.\n """"""\n ...\n @staticmethod\n @overload\n def maketrans(x: str, y: str, z: str, /) -> dict[int, int | None]:\n """"""\n Return a translation table usable for str.translate().\n\n If there is only one argument, it must be a dictionary mapping Unicode\n ordinals (integers) or characters to Unicode ordinals, strings or None.\n Character keys will be then converted to ordinals.\n If there are two arguments, they must be strings of equal length, and\n in the resulting dictionary, each character in x will be mapped to the\n character at the same position in y. If there is a third argument, it\n must be a string, whose characters will be mapped to None in the result.\n """"""\n ...\n @overload\n def __add__(self: LiteralString, value: LiteralString, /) -> LiteralString:\n """"""Return self+value.""""""\n ...\n @overload\n def __add__(self, value: str, /) -> str:\n """"""Return self+value.""""""\n ...\n # Incompatible with Sequence.__contains__\n def __contains__(self, key: str, /) -> bool:\n """"""Return bool(key in self).""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __ge__(self, value: str, /) -> bool:\n """"""Return self>=value.""""""\n ...\n @overload\n def __getitem__(self: LiteralString, key: SupportsIndex | slice, /) -> LiteralString:\n """"""Return self[key].""""""\n ...\n @overload\n def __getitem__(self, key: SupportsIndex | slice, /) -> str:\n """"""Return self[key].""""""\n ...\n def __gt__(self, value: str, /) -> bool:\n """"""Return self>value.""""""\n ...\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n @overload\n def __iter__(self: LiteralString) -> Iterator[LiteralString]:\n """"""Implement iter(self).""""""\n ...\n @overload\n def __iter__(self) -> Iterator[str]:\n """"""Implement iter(self).""""""\n ...\n def __le__(self, value: str, /) -> bool:\n """"""Return self<=value.""""""\n ...\n def __len__(self) -> int:\n """"""Return len(self).""""""\n ...\n def __lt__(self, value: str, /) -> bool:\n """"""Return self LiteralString:\n """"""Return self%value.""""""\n ...\n @overload\n def __mod__(self, value: Any, /) -> str:\n """"""Return self%value.""""""\n ...\n @overload\n def __mul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString:\n """"""Return self*value.""""""\n ...\n @overload\n def __mul__(self, value: SupportsIndex, /) -> str:\n """"""Return self*value.""""""\n ...\n def __ne__(self, value: object, /) -> bool:\n """"""Return self!=value.""""""\n ...\n @overload\n def __rmul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString:\n """"""Return value*self.""""""\n ...\n @overload\n def __rmul__(self, value: SupportsIndex, /) -> str:\n """"""Return value*self.""""""\n ...\n def __getnewargs__(self) -> tuple[str]: ...\n\nclass bytes(Sequence[int]):\n """"""\n bytes(iterable_of_ints) -> bytes\n bytes(string, encoding[, errors]) -> bytes\n bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer\n bytes(int) -> bytes object of size given by the parameter initialized with null bytes\n bytes() -> empty bytes object\n\n Construct an immutable array of bytes from:\n - an iterable yielding integers in range(256)\n - a text string encoded using the specified encoding\n - any object implementing the buffer API.\n - an integer\n """"""\n @overload\n def __new__(cls, o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, /) -> Self: ...\n @overload\n def __new__(cls, string: str, /, encoding: str, errors: str = ...) -> Self: ...\n @overload\n def __new__(cls) -> Self: ...\n def capitalize(self) -> bytes:\n """"""\n B.capitalize() -> copy of B\n\n Return a copy of B with only its first character capitalized (ASCII)\n and the rest lower-cased.\n """"""\n ...\n def center(self, width: SupportsIndex, fillchar: bytes = b"" "", /) -> bytes:\n """"""\n Return a centered string of length width.\n\n Padding is done using the specified fill character.\n """"""\n ...\n def count(\n self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> int:\n """"""\n B.count(sub[, start[, end]]) -> int\n\n Return the number of non-overlapping occurrences of subsection sub in\n bytes B[start:end]. Optional arguments start and end are interpreted\n as in slice notation.\n """"""\n ...\n def decode(self, encoding: str = ""utf-8"", errors: str = ""strict"") -> str:\n """"""\n Decode the bytes using the codec registered for encoding.\n\n encoding\n The encoding with which to decode the bytes.\n errors\n The error handling scheme to use for the handling of decoding errors.\n The default is 'strict' meaning that decoding errors raise a\n UnicodeDecodeError. Other possible values are 'ignore' and 'replace'\n as well as any other name registered with codecs.register_error that\n can handle UnicodeDecodeErrors.\n """"""\n ...\n def endswith(\n self,\n suffix: ReadableBuffer | tuple[ReadableBuffer, ...],\n start: SupportsIndex | None = ...,\n end: SupportsIndex | None = ...,\n /,\n ) -> bool:\n """"""\n B.endswith(suffix[, start[, end]]) -> bool\n\n Return True if B ends with the specified suffix, False otherwise.\n With optional start, test B beginning at that position.\n With optional end, stop comparing B at that position.\n suffix can also be a tuple of bytes to try.\n """"""\n ...\n def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes:\n """"""\n Return a copy where all tab characters are expanded using spaces.\n\n If tabsize is not given, a tab size of 8 characters is assumed.\n """"""\n ...\n def find(\n self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> int:\n """"""\n B.find(sub[, start[, end]]) -> int\n\n Return the lowest index in B where subsection sub is found,\n such that sub is contained within B[start,end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Return -1 on failure.\n """"""\n ...\n def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str:\n r""""""\n Create a string of hexadecimal numbers from a bytes object.\n\n sep\n An optional single character or byte to separate hex bytes.\n bytes_per_sep\n How many bytes between separators. Positive values count from the\n right, negative values count from the left.\n\n Example:\n >>> value = b'\xb9\x01\xef'\n >>> value.hex()\n 'b901ef'\n >>> value.hex(':')\n 'b9:01:ef'\n >>> value.hex(':', 2)\n 'b9:01ef'\n >>> value.hex(':', -2)\n 'b901:ef'\n """"""\n ...\n def index(\n self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> int:\n """"""\n B.index(sub[, start[, end]]) -> int\n\n Return the lowest index in B where subsection sub is found,\n such that sub is contained within B[start,end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Raises ValueError when the subsection is not found.\n """"""\n ...\n def isalnum(self) -> bool:\n """"""\n B.isalnum() -> bool\n\n Return True if all characters in B are alphanumeric\n and there is at least one character in B, False otherwise.\n """"""\n ...\n def isalpha(self) -> bool:\n """"""\n B.isalpha() -> bool\n\n Return True if all characters in B are alphabetic\n and there is at least one character in B, False otherwise.\n """"""\n ...\n def isascii(self) -> bool:\n """"""\n B.isascii() -> bool\n\n Return True if B is empty or all characters in B are ASCII,\n False otherwise.\n """"""\n ...\n def isdigit(self) -> bool:\n """"""\n B.isdigit() -> bool\n\n Return True if all characters in B are digits\n and there is at least one character in B, False otherwise.\n """"""\n ...\n def islower(self) -> bool:\n """"""\n B.islower() -> bool\n\n Return True if all cased characters in B are lowercase and there is\n at least one cased character in B, False otherwise.\n """"""\n ...\n def isspace(self) -> bool:\n """"""\n B.isspace() -> bool\n\n Return True if all characters in B are whitespace\n and there is at least one character in B, False otherwise.\n """"""\n ...\n def istitle(self) -> bool:\n """"""\n B.istitle() -> bool\n\n Return True if B is a titlecased string and there is at least one\n character in B, i.e. uppercase characters may only follow uncased\n characters and lowercase characters only cased ones. Return False\n otherwise.\n """"""\n ...\n def isupper(self) -> bool:\n """"""\n B.isupper() -> bool\n\n Return True if all cased characters in B are uppercase and there is\n at least one cased character in B, False otherwise.\n """"""\n ...\n def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytes:\n """"""\n Concatenate any number of bytes objects.\n\n The bytes whose method is called is inserted in between each pair.\n\n The result is returned as a new bytes object.\n\n Example: b'.'.join([b'ab', b'pq', b'rs']) -> b'ab.pq.rs'.\n """"""\n ...\n def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b"" "", /) -> bytes:\n """"""\n Return a left-justified string of length width.\n\n Padding is done using the specified fill character.\n """"""\n ...\n def lower(self) -> bytes:\n """"""\n B.lower() -> copy of B\n\n Return a copy of B with all ASCII characters converted to lowercase.\n """"""\n ...\n def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes:\n """"""\n Strip leading bytes contained in the argument.\n\n If the argument is omitted or None, strip leading ASCII whitespace.\n """"""\n ...\n def partition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]:\n """"""\n Partition the bytes into three parts using the given separator.\n\n This will search for the separator sep in the bytes. If the separator is found,\n returns a 3-tuple containing the part before the separator, the separator\n itself, and the part after it.\n\n If the separator is not found, returns a 3-tuple containing the original bytes\n object and two empty bytes objects.\n """"""\n ...\n def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytes:\n """"""\n Return a copy with all occurrences of substring old replaced by new.\n\n count\n Maximum number of occurrences to replace.\n -1 (the default value) means replace all occurrences.\n\n If the optional argument count is given, only the first count occurrences are\n replaced.\n """"""\n ...\n def removeprefix(self, prefix: ReadableBuffer, /) -> bytes:\n """"""\n Return a bytes object with the given prefix string removed if present.\n\n If the bytes starts with the prefix string, return bytes[len(prefix):].\n Otherwise, return a copy of the original bytes.\n """"""\n ...\n def removesuffix(self, suffix: ReadableBuffer, /) -> bytes:\n """"""\n Return a bytes object with the given suffix string removed if present.\n\n If the bytes ends with the suffix string and that suffix is not empty,\n return bytes[:-len(prefix)]. Otherwise, return a copy of the original\n bytes.\n """"""\n ...\n def rfind(\n self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> int:\n """"""\n B.rfind(sub[, start[, end]]) -> int\n\n Return the highest index in B where subsection sub is found,\n such that sub is contained within B[start,end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Return -1 on failure.\n """"""\n ...\n def rindex(\n self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> int:\n """"""\n B.rindex(sub[, start[, end]]) -> int\n\n Return the highest index in B where subsection sub is found,\n such that sub is contained within B[start,end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Raise ValueError when the subsection is not found.\n """"""\n ...\n def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b"" "", /) -> bytes:\n """"""\n Return a right-justified string of length width.\n\n Padding is done using the specified fill character.\n """"""\n ...\n def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]:\n """"""\n Partition the bytes into three parts using the given separator.\n\n This will search for the separator sep in the bytes, starting at the end. If\n the separator is found, returns a 3-tuple containing the part before the\n separator, the separator itself, and the part after it.\n\n If the separator is not found, returns a 3-tuple containing two empty bytes\n objects and the original bytes object.\n """"""\n ...\n def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]:\n """"""\n Return a list of the sections in the bytes, using sep as the delimiter.\n\n sep\n The delimiter according which to split the bytes.\n None (the default value) means split on ASCII whitespace characters\n (space, tab, return, newline, formfeed, vertical tab).\n maxsplit\n Maximum number of splits to do.\n -1 (the default value) means no limit.\n\n Splitting is done starting at the end of the bytes and working to the front.\n """"""\n ...\n def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes:\n """"""\n Strip trailing bytes contained in the argument.\n\n If the argument is omitted or None, strip trailing ASCII whitespace.\n """"""\n ...\n def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]:\n """"""\n Return a list of the sections in the bytes, using sep as the delimiter.\n\n sep\n The delimiter according which to split the bytes.\n None (the default value) means split on ASCII whitespace characters\n (space, tab, return, newline, formfeed, vertical tab).\n maxsplit\n Maximum number of splits to do.\n -1 (the default value) means no limit.\n """"""\n ...\n def splitlines(self, keepends: bool = False) -> list[bytes]:\n """"""\n Return a list of the lines in the bytes, breaking at line boundaries.\n\n Line breaks are not included in the resulting list unless keepends is given and\n true.\n """"""\n ...\n def startswith(\n self,\n prefix: ReadableBuffer | tuple[ReadableBuffer, ...],\n start: SupportsIndex | None = ...,\n end: SupportsIndex | None = ...,\n /,\n ) -> bool:\n """"""\n B.startswith(prefix[, start[, end]]) -> bool\n\n Return True if B starts with the specified prefix, False otherwise.\n With optional start, test B beginning at that position.\n With optional end, stop comparing B at that position.\n prefix can also be a tuple of bytes to try.\n """"""\n ...\n def strip(self, bytes: ReadableBuffer | None = None, /) -> bytes:\n """"""\n Strip leading and trailing bytes contained in the argument.\n\n If the argument is omitted or None, strip leading and trailing ASCII whitespace.\n """"""\n ...\n def swapcase(self) -> bytes:\n """"""\n B.swapcase() -> copy of B\n\n Return a copy of B with uppercase ASCII characters converted\n to lowercase ASCII and vice versa.\n """"""\n ...\n def title(self) -> bytes:\n """"""\n B.title() -> copy of B\n\n Return a titlecased version of B, i.e. ASCII words start with uppercase\n characters, all remaining cased characters have lowercase.\n """"""\n ...\n def translate(self, table: ReadableBuffer | None, /, delete: bytes = b"""") -> bytes:\n """"""\n Return a copy with each character mapped by the given translation table.\n\n table\n Translation table, which must be a bytes object of length 256.\n\n All characters occurring in the optional argument delete are removed.\n The remaining characters are mapped through the given translation table.\n """"""\n ...\n def upper(self) -> bytes:\n """"""\n B.upper() -> copy of B\n\n Return a copy of B with all ASCII characters converted to uppercase.\n """"""\n ...\n def zfill(self, width: SupportsIndex, /) -> bytes:\n """"""\n Pad a numeric string with zeros on the left, to fill a field of the given width.\n\n The original string is never truncated.\n """"""\n ...\n @classmethod\n def fromhex(cls, string: str, /) -> Self:\n r""""""\n Create a bytes object from a string of hexadecimal numbers.\n\n Spaces between two numbers are accepted.\n Example: bytes.fromhex('B9 01EF') -> b'\\xb9\\x01\\xef'.\n """"""\n ...\n @staticmethod\n def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes:\n """"""\n Return a translation table useable for the bytes or bytearray translate method.\n\n The returned table will be one where each byte in frm is mapped to the byte at\n the same position in to.\n\n The bytes objects frm and to must be of the same length.\n """"""\n ...\n def __len__(self) -> int:\n """"""Return len(self).""""""\n ...\n def __iter__(self) -> Iterator[int]:\n """"""Implement iter(self).""""""\n ...\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n @overload\n def __getitem__(self, key: SupportsIndex, /) -> int:\n """"""Return self[key].""""""\n ...\n @overload\n def __getitem__(self, key: slice, /) -> bytes:\n """"""Return self[key].""""""\n ...\n def __add__(self, value: ReadableBuffer, /) -> bytes:\n """"""Return self+value.""""""\n ...\n def __mul__(self, value: SupportsIndex, /) -> bytes:\n """"""Return self*value.""""""\n ...\n def __rmul__(self, value: SupportsIndex, /) -> bytes:\n """"""Return value*self.""""""\n ...\n def __mod__(self, value: Any, /) -> bytes:\n """"""Return self%value.""""""\n ...\n # Incompatible with Sequence.__contains__\n def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool:\n """"""Return bool(key in self).""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __ne__(self, value: object, /) -> bool:\n """"""Return self!=value.""""""\n ...\n def __lt__(self, value: bytes, /) -> bool:\n """"""Return self bool:\n """"""Return self<=value.""""""\n ...\n def __gt__(self, value: bytes, /) -> bool:\n """"""Return self>value.""""""\n ...\n def __ge__(self, value: bytes, /) -> bool:\n """"""Return self>=value.""""""\n ...\n def __getnewargs__(self) -> tuple[bytes]: ...\n if sys.version_info >= (3, 11):\n def __bytes__(self) -> bytes:\n """"""Convert this value to exact type bytes.""""""\n ...\n\n def __buffer__(self, flags: int, /) -> memoryview:\n """"""Return a buffer object that exposes the underlying memory of the object.""""""\n ...\n\nclass bytearray(MutableSequence[int]):\n """"""\n bytearray(iterable_of_ints) -> bytearray\n bytearray(string, encoding[, errors]) -> bytearray\n bytearray(bytes_or_buffer) -> mutable copy of bytes_or_buffer\n bytearray(int) -> bytes array of size given by the parameter initialized with null bytes\n bytearray() -> empty bytes array\n\n Construct a mutable bytearray object from:\n - an iterable yielding integers in range(256)\n - a text string encoded using the specified encoding\n - a bytes or a buffer object\n - any object implementing the buffer API.\n - an integer\n """"""\n @overload\n def __init__(self) -> None: ...\n @overload\n def __init__(self, ints: Iterable[SupportsIndex] | SupportsIndex | ReadableBuffer, /) -> None: ...\n @overload\n def __init__(self, string: str, /, encoding: str, errors: str = ...) -> None: ...\n def append(self, item: SupportsIndex, /) -> None:\n """"""\n Append a single item to the end of the bytearray.\n\n item\n The item to be appended.\n """"""\n ...\n def capitalize(self) -> bytearray:\n """"""\n B.capitalize() -> copy of B\n\n Return a copy of B with only its first character capitalized (ASCII)\n and the rest lower-cased.\n """"""\n ...\n def center(self, width: SupportsIndex, fillchar: bytes = b"" "", /) -> bytearray:\n """"""\n Return a centered string of length width.\n\n Padding is done using the specified fill character.\n """"""\n ...\n def count(\n self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> int:\n """"""\n B.count(sub[, start[, end]]) -> int\n\n Return the number of non-overlapping occurrences of subsection sub in\n bytes B[start:end]. Optional arguments start and end are interpreted\n as in slice notation.\n """"""\n ...\n def copy(self) -> bytearray:\n """"""Return a copy of B.""""""\n ...\n def decode(self, encoding: str = ""utf-8"", errors: str = ""strict"") -> str:\n """"""\n Decode the bytearray using the codec registered for encoding.\n\n encoding\n The encoding with which to decode the bytearray.\n errors\n The error handling scheme to use for the handling of decoding errors.\n The default is 'strict' meaning that decoding errors raise a\n UnicodeDecodeError. Other possible values are 'ignore' and 'replace'\n as well as any other name registered with codecs.register_error that\n can handle UnicodeDecodeErrors.\n """"""\n ...\n def endswith(\n self,\n suffix: ReadableBuffer | tuple[ReadableBuffer, ...],\n start: SupportsIndex | None = ...,\n end: SupportsIndex | None = ...,\n /,\n ) -> bool:\n """"""\n B.endswith(suffix[, start[, end]]) -> bool\n\n Return True if B ends with the specified suffix, False otherwise.\n With optional start, test B beginning at that position.\n With optional end, stop comparing B at that position.\n suffix can also be a tuple of bytes to try.\n """"""\n ...\n def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray:\n """"""\n Return a copy where all tab characters are expanded using spaces.\n\n If tabsize is not given, a tab size of 8 characters is assumed.\n """"""\n ...\n def extend(self, iterable_of_ints: Iterable[SupportsIndex], /) -> None:\n """"""\n Append all the items from the iterator or sequence to the end of the bytearray.\n\n iterable_of_ints\n The iterable of items to append.\n """"""\n ...\n def find(\n self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> int:\n """"""\n B.find(sub[, start[, end]]) -> int\n\n Return the lowest index in B where subsection sub is found,\n such that sub is contained within B[start,end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Return -1 on failure.\n """"""\n ...\n def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str:\n """"""\n Create a string of hexadecimal numbers from a bytearray object.\n\n sep\n An optional single character or byte to separate hex bytes.\n bytes_per_sep\n How many bytes between separators. Positive values count from the\n right, negative values count from the left.\n\n Example:\n >>> value = bytearray([0xb9, 0x01, 0xef])\n >>> value.hex()\n 'b901ef'\n >>> value.hex(':')\n 'b9:01:ef'\n >>> value.hex(':', 2)\n 'b9:01ef'\n >>> value.hex(':', -2)\n 'b901:ef'\n """"""\n ...\n def index(\n self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> int:\n """"""\n B.index(sub[, start[, end]]) -> int\n\n Return the lowest index in B where subsection sub is found,\n such that sub is contained within B[start,end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Raises ValueError when the subsection is not found.\n """"""\n ...\n def insert(self, index: SupportsIndex, item: SupportsIndex, /) -> None:\n """"""\n Insert a single item into the bytearray before the given index.\n\n index\n The index where the value is to be inserted.\n item\n The item to be inserted.\n """"""\n ...\n def isalnum(self) -> bool:\n """"""\n B.isalnum() -> bool\n\n Return True if all characters in B are alphanumeric\n and there is at least one character in B, False otherwise.\n """"""\n ...\n def isalpha(self) -> bool:\n """"""\n B.isalpha() -> bool\n\n Return True if all characters in B are alphabetic\n and there is at least one character in B, False otherwise.\n """"""\n ...\n def isascii(self) -> bool:\n """"""\n B.isascii() -> bool\n\n Return True if B is empty or all characters in B are ASCII,\n False otherwise.\n """"""\n ...\n def isdigit(self) -> bool:\n """"""\n B.isdigit() -> bool\n\n Return True if all characters in B are digits\n and there is at least one character in B, False otherwise.\n """"""\n ...\n def islower(self) -> bool:\n """"""\n B.islower() -> bool\n\n Return True if all cased characters in B are lowercase and there is\n at least one cased character in B, False otherwise.\n """"""\n ...\n def isspace(self) -> bool:\n """"""\n B.isspace() -> bool\n\n Return True if all characters in B are whitespace\n and there is at least one character in B, False otherwise.\n """"""\n ...\n def istitle(self) -> bool:\n """"""\n B.istitle() -> bool\n\n Return True if B is a titlecased string and there is at least one\n character in B, i.e. uppercase characters may only follow uncased\n characters and lowercase characters only cased ones. Return False\n otherwise.\n """"""\n ...\n def isupper(self) -> bool:\n """"""\n B.isupper() -> bool\n\n Return True if all cased characters in B are uppercase and there is\n at least one cased character in B, False otherwise.\n """"""\n ...\n def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytearray:\n """"""\n Concatenate any number of bytes/bytearray objects.\n\n The bytearray whose method is called is inserted in between each pair.\n\n The result is returned as a new bytearray object.\n """"""\n ...\n def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b"" "", /) -> bytearray:\n """"""\n Return a left-justified string of length width.\n\n Padding is done using the specified fill character.\n """"""\n ...\n def lower(self) -> bytearray:\n """"""\n B.lower() -> copy of B\n\n Return a copy of B with all ASCII characters converted to lowercase.\n """"""\n ...\n def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray:\n """"""\n Strip leading bytes contained in the argument.\n\n If the argument is omitted or None, strip leading ASCII whitespace.\n """"""\n ...\n def partition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]:\n """"""\n Partition the bytearray into three parts using the given separator.\n\n This will search for the separator sep in the bytearray. If the separator is\n found, returns a 3-tuple containing the part before the separator, the\n separator itself, and the part after it as new bytearray objects.\n\n If the separator is not found, returns a 3-tuple containing the copy of the\n original bytearray object and two empty bytearray objects.\n """"""\n ...\n def pop(self, index: int = -1, /) -> int:\n """"""\n Remove and return a single item from B.\n\n index\n The index from where to remove the item.\n -1 (the default value) means remove the last item.\n\n If no index argument is given, will pop the last item.\n """"""\n ...\n def remove(self, value: int, /) -> None:\n """"""\n Remove the first occurrence of a value in the bytearray.\n\n value\n The value to remove.\n """"""\n ...\n def removeprefix(self, prefix: ReadableBuffer, /) -> bytearray:\n """"""\n Return a bytearray with the given prefix string removed if present.\n\n If the bytearray starts with the prefix string, return\n bytearray[len(prefix):]. Otherwise, return a copy of the original\n bytearray.\n """"""\n ...\n def removesuffix(self, suffix: ReadableBuffer, /) -> bytearray:\n """"""\n Return a bytearray with the given suffix string removed if present.\n\n If the bytearray ends with the suffix string and that suffix is not\n empty, return bytearray[:-len(suffix)]. Otherwise, return a copy of\n the original bytearray.\n """"""\n ...\n def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytearray:\n """"""\n Return a copy with all occurrences of substring old replaced by new.\n\n count\n Maximum number of occurrences to replace.\n -1 (the default value) means replace all occurrences.\n\n If the optional argument count is given, only the first count occurrences are\n replaced.\n """"""\n ...\n def rfind(\n self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> int:\n """"""\n B.rfind(sub[, start[, end]]) -> int\n\n Return the highest index in B where subsection sub is found,\n such that sub is contained within B[start,end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Return -1 on failure.\n """"""\n ...\n def rindex(\n self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /\n ) -> int:\n """"""\n B.rindex(sub[, start[, end]]) -> int\n\n Return the highest index in B where subsection sub is found,\n such that sub is contained within B[start,end]. Optional\n arguments start and end are interpreted as in slice notation.\n\n Raise ValueError when the subsection is not found.\n """"""\n ...\n def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b"" "", /) -> bytearray:\n """"""\n Return a right-justified string of length width.\n\n Padding is done using the specified fill character.\n """"""\n ...\n def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]:\n """"""\n Partition the bytearray into three parts using the given separator.\n\n This will search for the separator sep in the bytearray, starting at the end.\n If the separator is found, returns a 3-tuple containing the part before the\n separator, the separator itself, and the part after it as new bytearray\n objects.\n\n If the separator is not found, returns a 3-tuple containing two empty bytearray\n objects and the copy of the original bytearray object.\n """"""\n ...\n def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]:\n """"""\n Return a list of the sections in the bytearray, using sep as the delimiter.\n\n sep\n The delimiter according which to split the bytearray.\n None (the default value) means split on ASCII whitespace characters\n (space, tab, return, newline, formfeed, vertical tab).\n maxsplit\n Maximum number of splits to do.\n -1 (the default value) means no limit.\n\n Splitting is done starting at the end of the bytearray and working to the front.\n """"""\n ...\n def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray:\n """"""\n Strip trailing bytes contained in the argument.\n\n If the argument is omitted or None, strip trailing ASCII whitespace.\n """"""\n ...\n def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]:\n """"""\n Return a list of the sections in the bytearray, using sep as the delimiter.\n\n sep\n The delimiter according which to split the bytearray.\n None (the default value) means split on ASCII whitespace characters\n (space, tab, return, newline, formfeed, vertical tab).\n maxsplit\n Maximum number of splits to do.\n -1 (the default value) means no limit.\n """"""\n ...\n def splitlines(self, keepends: bool = False) -> list[bytearray]:\n """"""\n Return a list of the lines in the bytearray, breaking at line boundaries.\n\n Line breaks are not included in the resulting list unless keepends is given and\n true.\n """"""\n ...\n def startswith(\n self,\n prefix: ReadableBuffer | tuple[ReadableBuffer, ...],\n start: SupportsIndex | None = ...,\n end: SupportsIndex | None = ...,\n /,\n ) -> bool:\n """"""\n B.startswith(prefix[, start[, end]]) -> bool\n\n Return True if B starts with the specified prefix, False otherwise.\n With optional start, test B beginning at that position.\n With optional end, stop comparing B at that position.\n prefix can also be a tuple of bytes to try.\n """"""\n ...\n def strip(self, bytes: ReadableBuffer | None = None, /) -> bytearray:\n """"""\n Strip leading and trailing bytes contained in the argument.\n\n If the argument is omitted or None, strip leading and trailing ASCII whitespace.\n """"""\n ...\n def swapcase(self) -> bytearray:\n """"""\n B.swapcase() -> copy of B\n\n Return a copy of B with uppercase ASCII characters converted\n to lowercase ASCII and vice versa.\n """"""\n ...\n def title(self) -> bytearray:\n """"""\n B.title() -> copy of B\n\n Return a titlecased version of B, i.e. ASCII words start with uppercase\n characters, all remaining cased characters have lowercase.\n """"""\n ...\n def translate(self, table: ReadableBuffer | None, /, delete: bytes = b"""") -> bytearray:\n """"""\n Return a copy with each character mapped by the given translation table.\n\n table\n Translation table, which must be a bytes object of length 256.\n\n All characters occurring in the optional argument delete are removed.\n The remaining characters are mapped through the given translation table.\n """"""\n ...\n def upper(self) -> bytearray:\n """"""\n B.upper() -> copy of B\n\n Return a copy of B with all ASCII characters converted to uppercase.\n """"""\n ...\n def zfill(self, width: SupportsIndex, /) -> bytearray:\n """"""\n Pad a numeric string with zeros on the left, to fill a field of the given width.\n\n The original string is never truncated.\n """"""\n ...\n @classmethod\n def fromhex(cls, string: str, /) -> Self:\n r""""""\n Create a bytearray object from a string of hexadecimal numbers.\n\n Spaces between two numbers are accepted.\n Example: bytearray.fromhex('B9 01EF') -> bytearray(b'\\xb9\\x01\\xef')\n """"""\n ...\n @staticmethod\n def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes:\n """"""\n Return a translation table useable for the bytes or bytearray translate method.\n\n The returned table will be one where each byte in frm is mapped to the byte at\n the same position in to.\n\n The bytes objects frm and to must be of the same length.\n """"""\n ...\n def __len__(self) -> int:\n """"""Return len(self).""""""\n ...\n def __iter__(self) -> Iterator[int]:\n """"""Implement iter(self).""""""\n ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n @overload\n def __getitem__(self, key: SupportsIndex, /) -> int:\n """"""Return self[key].""""""\n ...\n @overload\n def __getitem__(self, key: slice, /) -> bytearray:\n """"""Return self[key].""""""\n ...\n @overload\n def __setitem__(self, key: SupportsIndex, value: SupportsIndex, /) -> None:\n """"""Set self[key] to value.""""""\n ...\n @overload\n def __setitem__(self, key: slice, value: Iterable[SupportsIndex] | bytes, /) -> None:\n """"""Set self[key] to value.""""""\n ...\n def __delitem__(self, key: SupportsIndex | slice, /) -> None:\n """"""Delete self[key].""""""\n ...\n def __add__(self, value: ReadableBuffer, /) -> bytearray:\n """"""Return self+value.""""""\n ...\n # The superclass wants us to accept Iterable[int], but that fails at runtime.\n def __iadd__(self, value: ReadableBuffer, /) -> Self:\n """"""Implement self+=value.""""""\n ...\n def __mul__(self, value: SupportsIndex, /) -> bytearray:\n """"""Return self*value.""""""\n ...\n def __rmul__(self, value: SupportsIndex, /) -> bytearray:\n """"""Return value*self.""""""\n ...\n def __imul__(self, value: SupportsIndex, /) -> Self:\n """"""Implement self*=value.""""""\n ...\n def __mod__(self, value: Any, /) -> bytes:\n """"""Return self%value.""""""\n ...\n # Incompatible with Sequence.__contains__\n def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool:\n """"""Return bool(key in self).""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __ne__(self, value: object, /) -> bool:\n """"""Return self!=value.""""""\n ...\n def __lt__(self, value: ReadableBuffer, /) -> bool:\n """"""Return self bool:\n """"""Return self<=value.""""""\n ...\n def __gt__(self, value: ReadableBuffer, /) -> bool:\n """"""Return self>value.""""""\n ...\n def __ge__(self, value: ReadableBuffer, /) -> bool:\n """"""Return self>=value.""""""\n ...\n def __alloc__(self) -> int:\n """"""\n B.__alloc__() -> int\n\n Return the number of bytes actually allocated.\n """"""\n ...\n def __buffer__(self, flags: int, /) -> memoryview:\n """"""Return a buffer object that exposes the underlying memory of the object.""""""\n ...\n def __release_buffer__(self, buffer: memoryview, /) -> None:\n """"""Release the buffer object that exposes the underlying memory of the object.""""""\n ...\n if sys.version_info >= (3, 14):\n def resize(self, size: int, /) -> None: ...\n\n_IntegerFormats: TypeAlias = Literal[\n ""b"", ""B"", ""@b"", ""@B"", ""h"", ""H"", ""@h"", ""@H"", ""i"", ""I"", ""@i"", ""@I"", ""l"", ""L"", ""@l"", ""@L"", ""q"", ""Q"", ""@q"", ""@Q"", ""P"", ""@P""\n]\n\n@final\nclass memoryview(Sequence[_I]):\n """"""Create a new memoryview object which references the given object.""""""\n @property\n def format(self) -> str:\n """"""\n A string containing the format (in struct module style)\n for each element in the view.\n """"""\n ...\n @property\n def itemsize(self) -> int:\n """"""The size in bytes of each element of the memoryview.""""""\n ...\n @property\n def shape(self) -> tuple[int, ...] | None:\n """"""\n A tuple of ndim integers giving the shape of the memory\n as an N-dimensional array.\n """"""\n ...\n @property\n def strides(self) -> tuple[int, ...] | None:\n """"""\n A tuple of ndim integers giving the size in bytes to access\n each element for each dimension of the array.\n """"""\n ...\n @property\n def suboffsets(self) -> tuple[int, ...] | None:\n """"""A tuple of integers used internally for PIL-style arrays.""""""\n ...\n @property\n def readonly(self) -> bool:\n """"""A bool indicating whether the memory is read only.""""""\n ...\n @property\n def ndim(self) -> int:\n """"""\n An integer indicating how many dimensions of a multi-dimensional\n array the memory represents.\n """"""\n ...\n @property\n def obj(self) -> ReadableBuffer:\n """"""The underlying object of the memoryview.""""""\n ...\n @property\n def c_contiguous(self) -> bool:\n """"""A bool indicating whether the memory is C contiguous.""""""\n ...\n @property\n def f_contiguous(self) -> bool:\n """"""A bool indicating whether the memory is Fortran contiguous.""""""\n ...\n @property\n def contiguous(self) -> bool:\n """"""A bool indicating whether the memory is contiguous.""""""\n ...\n @property\n def nbytes(self) -> int:\n """"""\n The amount of space in bytes that the array would use in\n a contiguous representation.\n """"""\n ...\n def __new__(cls, obj: ReadableBuffer) -> Self: ...\n def __enter__(self) -> Self: ...\n def __exit__(\n self,\n exc_type: type[BaseException] | None, # noqa: PYI036 # This is the module declaring BaseException\n exc_val: BaseException | None,\n exc_tb: TracebackType | None,\n /,\n ) -> None: ...\n @overload\n def cast(self, format: Literal[""c"", ""@c""], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bytes]:\n """"""Cast a memoryview to a new format or shape.""""""\n ...\n @overload\n def cast(self, format: Literal[""f"", ""@f"", ""d"", ""@d""], shape: list[int] | tuple[int, ...] = ...) -> memoryview[float]:\n """"""Cast a memoryview to a new format or shape.""""""\n ...\n @overload\n def cast(self, format: Literal[""?""], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bool]:\n """"""Cast a memoryview to a new format or shape.""""""\n ...\n @overload\n def cast(self, format: _IntegerFormats, shape: list[int] | tuple[int, ...] = ...) -> memoryview:\n """"""Cast a memoryview to a new format or shape.""""""\n ...\n @overload\n def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> _I:\n """"""Return self[key].""""""\n ...\n @overload\n def __getitem__(self, key: slice, /) -> memoryview[_I]:\n """"""Return self[key].""""""\n ...\n def __contains__(self, x: object, /) -> bool: ...\n def __iter__(self) -> Iterator[_I]:\n """"""Implement iter(self).""""""\n ...\n def __len__(self) -> int:\n """"""Return len(self).""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n @overload\n def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None:\n """"""Set self[key] to value.""""""\n ...\n @overload\n def __setitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], value: _I, /) -> None:\n """"""Set self[key] to value.""""""\n ...\n if sys.version_info >= (3, 10):\n def tobytes(self, order: Literal[""C"", ""F"", ""A""] | None = ""C"") -> bytes:\n """"""\n Return the data in the buffer as a byte string.\n\n Order can be {'C', 'F', 'A'}. When order is 'C' or 'F', the data of the\n original array is converted to C or Fortran order. For contiguous views,\n 'A' returns an exact copy of the physical memory. In particular, in-memory\n Fortran order is preserved. For non-contiguous views, the data is converted\n to C first. order=None is the same as order='C'.\n """"""\n ...\n else:\n def tobytes(self, order: Literal[""C"", ""F"", ""A""] | None = None) -> bytes: ...\n\n def tolist(self) -> list[int]:\n """"""Return the data in the buffer as a list of elements.""""""\n ...\n def toreadonly(self) -> memoryview:\n """"""Return a readonly version of the memoryview.""""""\n ...\n def release(self) -> None:\n """"""Release the underlying buffer exposed by the memoryview object.""""""\n ...\n def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str:\n r""""""\n Return the data in the buffer as a str of hexadecimal numbers.\n\n sep\n An optional single character or byte to separate hex bytes.\n bytes_per_sep\n How many bytes between separators. Positive values count from the\n right, negative values count from the left.\n\n Example:\n >>> value = memoryview(b'\xb9\x01\xef')\n >>> value.hex()\n 'b901ef'\n >>> value.hex(':')\n 'b9:01:ef'\n >>> value.hex(':', 2)\n 'b9:01ef'\n >>> value.hex(':', -2)\n 'b901:ef'\n """"""\n ...\n def __buffer__(self, flags: int, /) -> memoryview:\n """"""Return a buffer object that exposes the underlying memory of the object.""""""\n ...\n def __release_buffer__(self, buffer: memoryview, /) -> None:\n """"""Release the buffer object that exposes the underlying memory of the object.""""""\n ...\n\n # These are inherited from the Sequence ABC, but don't actually exist on memoryview.\n # See https://github.com/python/cpython/issues/125420\n index: ClassVar[None] # type: ignore[assignment]\n count: ClassVar[None] # type: ignore[assignment]\n if sys.version_info >= (3, 14):\n def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...\n\n@final\nclass bool(int):\n """"""\n bool(x) -> bool\n\n Returns True when the argument x is true, False otherwise.\n The builtins True and False are the only two instances of the class bool.\n The class bool is a subclass of the class int, and cannot be subclassed.\n """"""\n def __new__(cls, o: object = ..., /) -> Self: ...\n # The following overloads could be represented more elegantly with a TypeVar(""_B"", bool, int),\n # however mypy has a bug regarding TypeVar constraints (https://github.com/python/mypy/issues/11880).\n @overload\n def __and__(self, value: bool, /) -> bool:\n """"""Return self&value.""""""\n ...\n @overload\n def __and__(self, value: int, /) -> int:\n """"""Return self&value.""""""\n ...\n @overload\n def __or__(self, value: bool, /) -> bool:\n """"""Return self|value.""""""\n ...\n @overload\n def __or__(self, value: int, /) -> int:\n """"""Return self|value.""""""\n ...\n @overload\n def __xor__(self, value: bool, /) -> bool:\n """"""Return self^value.""""""\n ...\n @overload\n def __xor__(self, value: int, /) -> int:\n """"""Return self^value.""""""\n ...\n @overload\n def __rand__(self, value: bool, /) -> bool:\n """"""Return value&self.""""""\n ...\n @overload\n def __rand__(self, value: int, /) -> int:\n """"""Return value&self.""""""\n ...\n @overload\n def __ror__(self, value: bool, /) -> bool:\n """"""Return value|self.""""""\n ...\n @overload\n def __ror__(self, value: int, /) -> int:\n """"""Return value|self.""""""\n ...\n @overload\n def __rxor__(self, value: bool, /) -> bool:\n """"""Return value^self.""""""\n ...\n @overload\n def __rxor__(self, value: int, /) -> int:\n """"""Return value^self.""""""\n ...\n def __getnewargs__(self) -> tuple[int]: ...\n @deprecated(""Will throw an error in Python 3.16. Use `not` for logical negation of bools instead."")\n def __invert__(self) -> int:\n """"""~self""""""\n ...\n\n@final\nclass slice(Generic[_StartT_co, _StopT_co, _StepT_co]):\n """"""\n slice(stop)\n slice(start, stop[, step])\n\n Create a slice object. This is used for extended slicing (e.g. a[0:10:2]).\n """"""\n @property\n def start(self) -> _StartT_co: ...\n @property\n def step(self) -> _StepT_co: ...\n @property\n def stop(self) -> _StopT_co: ...\n # Note: __new__ overloads map `None` to `Any`, since users expect slice(x, None)\n # to be compatible with slice(None, x).\n # generic slice --------------------------------------------------------------------\n @overload\n def __new__(cls, start: None, stop: None = None, step: None = None, /) -> slice[Any, Any, Any]: ...\n # unary overloads ------------------------------------------------------------------\n @overload\n def __new__(cls, stop: _T2, /) -> slice[Any, _T2, Any]: ...\n # binary overloads -----------------------------------------------------------------\n @overload\n def __new__(cls, start: _T1, stop: None, step: None = None, /) -> slice[_T1, Any, Any]: ...\n @overload\n def __new__(cls, start: None, stop: _T2, step: None = None, /) -> slice[Any, _T2, Any]: ...\n @overload\n def __new__(cls, start: _T1, stop: _T2, step: None = None, /) -> slice[_T1, _T2, Any]: ...\n # ternary overloads ----------------------------------------------------------------\n @overload\n def __new__(cls, start: None, stop: None, step: _T3, /) -> slice[Any, Any, _T3]: ...\n @overload\n def __new__(cls, start: _T1, stop: None, step: _T3, /) -> slice[_T1, Any, _T3]: ...\n @overload\n def __new__(cls, start: None, stop: _T2, step: _T3, /) -> slice[Any, _T2, _T3]: ...\n @overload\n def __new__(cls, start: _T1, stop: _T2, step: _T3, /) -> slice[_T1, _T2, _T3]: ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n if sys.version_info >= (3, 12):\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n else:\n __hash__: ClassVar[None] # type: ignore[assignment]\n\n def indices(self, len: SupportsIndex, /) -> tuple[int, int, int]:\n """"""\n S.indices(len) -> (start, stop, stride)\n\n Assuming a sequence of length len, calculate the start and stop\n indices, and the stride length of the extended slice described by\n S. Out of bounds indices are clipped in a manner consistent with the\n handling of normal slices.\n """"""\n ...\n\nclass tuple(Sequence[_T_co]):\n """"""\n Built-in immutable sequence.\n\n If no argument is given, the constructor returns an empty tuple.\n If iterable is specified the tuple is initialized from iterable's items.\n\n If the argument is a tuple, the return value is the same object.\n """"""\n def __new__(cls, iterable: Iterable[_T_co] = ..., /) -> Self: ...\n def __len__(self) -> int:\n """"""Return len(self).""""""\n ...\n def __contains__(self, key: object, /) -> bool:\n """"""Return bool(key in self).""""""\n ...\n @overload\n def __getitem__(self, key: SupportsIndex, /) -> _T_co:\n """"""Return self[key].""""""\n ...\n @overload\n def __getitem__(self, key: slice, /) -> tuple[_T_co, ...]:\n """"""Return self[key].""""""\n ...\n def __iter__(self) -> Iterator[_T_co]:\n """"""Implement iter(self).""""""\n ...\n def __lt__(self, value: tuple[_T_co, ...], /) -> bool:\n """"""Return self bool:\n """"""Return self<=value.""""""\n ...\n def __gt__(self, value: tuple[_T_co, ...], /) -> bool:\n """"""Return self>value.""""""\n ...\n def __ge__(self, value: tuple[_T_co, ...], /) -> bool:\n """"""Return self>=value.""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n @overload\n def __add__(self, value: tuple[_T_co, ...], /) -> tuple[_T_co, ...]:\n """"""Return self+value.""""""\n ...\n @overload\n def __add__(self, value: tuple[_T, ...], /) -> tuple[_T_co | _T, ...]:\n """"""Return self+value.""""""\n ...\n def __mul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]:\n """"""Return self*value.""""""\n ...\n def __rmul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]:\n """"""Return value*self.""""""\n ...\n def count(self, value: Any, /) -> int:\n """"""Return number of occurrences of value.""""""\n ...\n def index(self, value: Any, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int:\n """"""\n Return first index of value.\n\n Raises ValueError if the value is not present.\n """"""\n ...\n def __class_getitem__(cls, item: Any, /) -> GenericAlias:\n """"""See PEP 585""""""\n ...\n\n# Doesn't exist at runtime, but deleting this breaks mypy and pyright. See:\n# https://github.com/python/typeshed/issues/7580\n# https://github.com/python/mypy/issues/8240\n# Obsolete, use types.FunctionType instead.\n@final\n@type_check_only\nclass function:\n # Make sure this class definition stays roughly in line with `types.FunctionType`\n @property\n def __closure__(self) -> tuple[CellType, ...] | None: ...\n __code__: CodeType\n __defaults__: tuple[Any, ...] | None\n __dict__: dict[str, Any]\n @property\n def __globals__(self) -> dict[str, Any]: ...\n __name__: str\n __qualname__: str\n __annotations__: dict[str, AnnotationForm]\n if sys.version_info >= (3, 14):\n __annotate__: AnnotateFunc | None\n __kwdefaults__: dict[str, Any] | None\n if sys.version_info >= (3, 10):\n @property\n def __builtins__(self) -> dict[str, Any]: ...\n if sys.version_info >= (3, 12):\n __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...]\n\n __module__: str\n if sys.version_info >= (3, 13):\n def __new__(\n cls,\n code: CodeType,\n globals: dict[str, Any],\n name: str | None = None,\n argdefs: tuple[object, ...] | None = None,\n closure: tuple[CellType, ...] | None = None,\n kwdefaults: dict[str, object] | None = None,\n ) -> Self: ...\n else:\n def __new__(\n cls,\n code: CodeType,\n globals: dict[str, Any],\n name: str | None = None,\n argdefs: tuple[object, ...] | None = None,\n closure: tuple[CellType, ...] | None = None,\n ) -> Self: ...\n\n # mypy uses `builtins.function.__get__` to represent methods, properties, and getset_descriptors so we type the return as Any.\n def __get__(self, instance: object, owner: type | None = None, /) -> Any: ...\n\nclass list(MutableSequence[_T]):\n """"""\n Built-in mutable sequence.\n\n If no argument is given, the constructor creates a new empty list.\n The argument must be an iterable if specified.\n """"""\n @overload\n def __init__(self) -> None: ...\n @overload\n def __init__(self, iterable: Iterable[_T], /) -> None: ...\n def copy(self) -> list[_T]:\n """"""Return a shallow copy of the list.""""""\n ...\n def append(self, object: _T, /) -> None:\n """"""Append object to the end of the list.""""""\n ...\n def extend(self, iterable: Iterable[_T], /) -> None:\n """"""Extend list by appending elements from the iterable.""""""\n ...\n def pop(self, index: SupportsIndex = -1, /) -> _T:\n """"""\n Remove and return item at index (default last).\n\n Raises IndexError if list is empty or index is out of range.\n """"""\n ...\n # Signature of `list.index` should be kept in line with `collections.UserList.index()`\n # and multiprocessing.managers.ListProxy.index()\n def index(self, value: _T, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int:\n """"""\n Return first index of value.\n\n Raises ValueError if the value is not present.\n """"""\n ...\n def count(self, value: _T, /) -> int:\n """"""Return number of occurrences of value.""""""\n ...\n def insert(self, index: SupportsIndex, object: _T, /) -> None:\n """"""Insert object before index.""""""\n ...\n def remove(self, value: _T, /) -> None:\n """"""\n Remove first occurrence of value.\n\n Raises ValueError if the value is not present.\n """"""\n ...\n # Signature of `list.sort` should be kept inline with `collections.UserList.sort()`\n # and multiprocessing.managers.ListProxy.sort()\n #\n # Use list[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison]\n # to work around invariance\n @overload\n def sort(self: list[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None:\n """"""\n Sort the list in ascending order and return None.\n\n The sort is in-place (i.e. the list itself is modified) and stable (i.e. the\n order of two equal elements is maintained).\n\n If a key function is given, apply it once to each list item and sort them,\n ascending or descending, according to their function values.\n\n The reverse flag can be set to sort in descending order.\n """"""\n ...\n @overload\n def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None:\n """"""\n Sort the list in ascending order and return None.\n\n The sort is in-place (i.e. the list itself is modified) and stable (i.e. the\n order of two equal elements is maintained).\n\n If a key function is given, apply it once to each list item and sort them,\n ascending or descending, according to their function values.\n\n The reverse flag can be set to sort in descending order.\n """"""\n ...\n def __len__(self) -> int:\n """"""Return len(self).""""""\n ...\n def __iter__(self) -> Iterator[_T]:\n """"""Implement iter(self).""""""\n ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n @overload\n def __getitem__(self, i: SupportsIndex, /) -> _T:\n """"""Return self[index].""""""\n ...\n @overload\n def __getitem__(self, s: slice, /) -> list[_T]:\n """"""Return self[index].""""""\n ...\n @overload\n def __setitem__(self, key: SupportsIndex, value: _T, /) -> None:\n """"""Set self[key] to value.""""""\n ...\n @overload\n def __setitem__(self, key: slice, value: Iterable[_T], /) -> None:\n """"""Set self[key] to value.""""""\n ...\n def __delitem__(self, key: SupportsIndex | slice, /) -> None:\n """"""Delete self[key].""""""\n ...\n # Overloading looks unnecessary, but is needed to work around complex mypy problems\n @overload\n def __add__(self, value: list[_T], /) -> list[_T]:\n """"""Return self+value.""""""\n ...\n @overload\n def __add__(self, value: list[_S], /) -> list[_S | _T]:\n """"""Return self+value.""""""\n ...\n def __iadd__(self, value: Iterable[_T], /) -> Self:\n """"""Implement self+=value.""""""\n ...\n def __mul__(self, value: SupportsIndex, /) -> list[_T]:\n """"""Return self*value.""""""\n ...\n def __rmul__(self, value: SupportsIndex, /) -> list[_T]:\n """"""Return value*self.""""""\n ...\n def __imul__(self, value: SupportsIndex, /) -> Self:\n """"""Implement self*=value.""""""\n ...\n def __contains__(self, key: object, /) -> bool:\n """"""Return bool(key in self).""""""\n ...\n def __reversed__(self) -> Iterator[_T]:\n """"""Return a reverse iterator over the list.""""""\n ...\n def __gt__(self, value: list[_T], /) -> bool:\n """"""Return self>value.""""""\n ...\n def __ge__(self, value: list[_T], /) -> bool:\n """"""Return self>=value.""""""\n ...\n def __lt__(self, value: list[_T], /) -> bool:\n """"""Return self bool:\n """"""Return self<=value.""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __class_getitem__(cls, item: Any, /) -> GenericAlias:\n """"""See PEP 585""""""\n ...\n\nclass dict(MutableMapping[_KT, _VT]):\n """"""\n dict() -> new empty dictionary\n dict(mapping) -> new dictionary initialized from a mapping object's\n (key, value) pairs\n dict(iterable) -> new dictionary initialized as if via:\n d = {}\n for k, v in iterable:\n d[k] = v\n dict(**kwargs) -> new dictionary initialized with the name=value pairs\n in the keyword argument list. For example: dict(one=1, two=2)\n """"""\n # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics\n # Also multiprocessing.managers.SyncManager.dict()\n @overload\n def __init__(self) -> None: ...\n @overload\n def __init__(self: dict[str, _VT], **kwargs: _VT) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780\n @overload\n def __init__(self, map: SupportsKeysAndGetItem[_KT, _VT], /) -> None: ...\n @overload\n def __init__(\n self: dict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780\n map: SupportsKeysAndGetItem[str, _VT],\n /,\n **kwargs: _VT,\n ) -> None: ...\n @overload\n def __init__(self, iterable: Iterable[tuple[_KT, _VT]], /) -> None: ...\n @overload\n def __init__(\n self: dict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780\n iterable: Iterable[tuple[str, _VT]],\n /,\n **kwargs: _VT,\n ) -> None: ...\n # Next two overloads are for dict(string.split(sep) for string in iterable)\n # Cannot be Iterable[Sequence[_T]] or otherwise dict([""foo"", ""bar"", ""baz""]) is not an error\n @overload\n def __init__(self: dict[str, str], iterable: Iterable[list[str]], /) -> None: ...\n @overload\n def __init__(self: dict[bytes, bytes], iterable: Iterable[list[bytes]], /) -> None: ...\n def __new__(cls, *args: Any, **kwargs: Any) -> Self: ...\n def copy(self) -> dict[_KT, _VT]:\n """"""D.copy() -> a shallow copy of D""""""\n ...\n def keys(self) -> dict_keys[_KT, _VT]:\n """"""D.keys() -> a set-like object providing a view on D's keys""""""\n ...\n def values(self) -> dict_values[_KT, _VT]:\n """"""D.values() -> an object providing a view on D's values""""""\n ...\n def items(self) -> dict_items[_KT, _VT]:\n """"""D.items() -> a set-like object providing a view on D's items""""""\n ...\n # Signature of `dict.fromkeys` should be kept identical to\n # `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections`\n # TODO: the true signature of `dict.fromkeys` is not expressible in the current type system.\n # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963.\n @classmethod\n @overload\n def fromkeys(cls, iterable: Iterable[_T], value: None = None, /) -> dict[_T, Any | None]:\n """"""Create a new dictionary with keys from iterable and values set to value.""""""\n ...\n @classmethod\n @overload\n def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> dict[_T, _S]:\n """"""Create a new dictionary with keys from iterable and values set to value.""""""\n ...\n # Positional-only in dict, but not in MutableMapping\n @overload # type: ignore[override]\n def get(self, key: _KT, default: None = None, /) -> _VT | None:\n """"""Return the value for key if key is in the dictionary, else default.""""""\n ...\n @overload\n def get(self, key: _KT, default: _VT, /) -> _VT:\n """"""Return the value for key if key is in the dictionary, else default.""""""\n ...\n @overload\n def get(self, key: _KT, default: _T, /) -> _VT | _T:\n """"""Return the value for key if key is in the dictionary, else default.""""""\n ...\n @overload\n def pop(self, key: _KT, /) -> _VT:\n """"""\n D.pop(k[,d]) -> v, remove specified key and return the corresponding value.\n\n If the key is not found, return the default if given; otherwise,\n raise a KeyError.\n """"""\n ...\n @overload\n def pop(self, key: _KT, default: _VT, /) -> _VT:\n """"""\n D.pop(k[,d]) -> v, remove specified key and return the corresponding value.\n\n If the key is not found, return the default if given; otherwise,\n raise a KeyError.\n """"""\n ...\n @overload\n def pop(self, key: _KT, default: _T, /) -> _VT | _T:\n """"""\n D.pop(k[,d]) -> v, remove specified key and return the corresponding value.\n\n If the key is not found, return the default if given; otherwise,\n raise a KeyError.\n """"""\n ...\n def __len__(self) -> int:\n """"""Return len(self).""""""\n ...\n def __getitem__(self, key: _KT, /) -> _VT:\n """"""Return self[key].""""""\n ...\n def __setitem__(self, key: _KT, value: _VT, /) -> None:\n """"""Set self[key] to value.""""""\n ...\n def __delitem__(self, key: _KT, /) -> None:\n """"""Delete self[key].""""""\n ...\n def __iter__(self) -> Iterator[_KT]:\n """"""Implement iter(self).""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __reversed__(self) -> Iterator[_KT]:\n """"""Return a reverse iterator over the dict keys.""""""\n ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n def __class_getitem__(cls, item: Any, /) -> GenericAlias:\n """"""See PEP 585""""""\n ...\n @overload\n def __or__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]:\n """"""Return self|value.""""""\n ...\n @overload\n def __or__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]:\n """"""Return self|value.""""""\n ...\n @overload\n def __ror__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]:\n """"""Return value|self.""""""\n ...\n @overload\n def __ror__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]:\n """"""Return value|self.""""""\n ...\n # dict.__ior__ should be kept roughly in line with MutableMapping.update()\n @overload # type: ignore[misc]\n def __ior__(self, value: SupportsKeysAndGetItem[_KT, _VT], /) -> Self:\n """"""Return self|=value.""""""\n ...\n @overload\n def __ior__(self, value: Iterable[tuple[_KT, _VT]], /) -> Self:\n """"""Return self|=value.""""""\n ...\n\nclass set(MutableSet[_T]):\n """"""\n set() -> new empty set object\n set(iterable) -> new set object\n\n Build an unordered collection of unique elements.\n """"""\n @overload\n def __init__(self) -> None: ...\n @overload\n def __init__(self, iterable: Iterable[_T], /) -> None: ...\n def add(self, element: _T, /) -> None:\n """"""\n Add an element to a set.\n\n This has no effect if the element is already present.\n """"""\n ...\n def copy(self) -> set[_T]:\n """"""Return a shallow copy of a set.""""""\n ...\n def difference(self, *s: Iterable[Any]) -> set[_T]:\n """"""\n Return the difference of two or more sets as a new set.\n\n (i.e. all elements that are in this set but not the others.)\n """"""\n ...\n def difference_update(self, *s: Iterable[Any]) -> None:\n """"""Remove all elements of another set from this set.""""""\n ...\n def discard(self, element: _T, /) -> None:\n """"""\n Remove an element from a set if it is a member.\n\n Unlike set.remove(), the discard() method does not raise\n an exception when an element is missing from the set.\n """"""\n ...\n def intersection(self, *s: Iterable[Any]) -> set[_T]:\n """"""\n Return the intersection of two sets as a new set.\n\n (i.e. all elements that are in both sets.)\n """"""\n ...\n def intersection_update(self, *s: Iterable[Any]) -> None:\n """"""Update a set with the intersection of itself and another.""""""\n ...\n def isdisjoint(self, s: Iterable[Any], /) -> bool:\n """"""Return True if two sets have a null intersection.""""""\n ...\n def issubset(self, s: Iterable[Any], /) -> bool:\n """"""Test whether every element in the set is in other.""""""\n ...\n def issuperset(self, s: Iterable[Any], /) -> bool:\n """"""Test whether every element in other is in the set.""""""\n ...\n def remove(self, element: _T, /) -> None:\n """"""\n Remove an element from a set; it must be a member.\n\n If the element is not a member, raise a KeyError.\n """"""\n ...\n def symmetric_difference(self, s: Iterable[_T], /) -> set[_T]:\n """"""\n Return the symmetric difference of two sets as a new set.\n\n (i.e. all elements that are in exactly one of the sets.)\n """"""\n ...\n def symmetric_difference_update(self, s: Iterable[_T], /) -> None:\n """"""Update a set with the symmetric difference of itself and another.""""""\n ...\n def union(self, *s: Iterable[_S]) -> set[_T | _S]:\n """"""\n Return the union of sets as a new set.\n\n (i.e. all elements that are in either set.)\n """"""\n ...\n def update(self, *s: Iterable[_T]) -> None:\n """"""Update a set with the union of itself and others.""""""\n ...\n def __len__(self) -> int:\n """"""Return len(self).""""""\n ...\n def __contains__(self, o: object, /) -> bool:\n """"""x.__contains__(y) <==> y in x.""""""\n ...\n def __iter__(self) -> Iterator[_T]:\n """"""Implement iter(self).""""""\n ...\n def __and__(self, value: AbstractSet[object], /) -> set[_T]:\n """"""Return self&value.""""""\n ...\n def __iand__(self, value: AbstractSet[object], /) -> Self:\n """"""Return self&=value.""""""\n ...\n def __or__(self, value: AbstractSet[_S], /) -> set[_T | _S]:\n """"""Return self|value.""""""\n ...\n def __ior__(self, value: AbstractSet[_T], /) -> Self:\n """"""Return self|=value.""""""\n ...\n def __sub__(self, value: AbstractSet[_T | None], /) -> set[_T]:\n """"""Return self-value.""""""\n ...\n def __isub__(self, value: AbstractSet[object], /) -> Self:\n """"""Return self-=value.""""""\n ...\n def __xor__(self, value: AbstractSet[_S], /) -> set[_T | _S]:\n """"""Return self^value.""""""\n ...\n def __ixor__(self, value: AbstractSet[_T], /) -> Self:\n """"""Return self^=value.""""""\n ...\n def __le__(self, value: AbstractSet[object], /) -> bool:\n """"""Return self<=value.""""""\n ...\n def __lt__(self, value: AbstractSet[object], /) -> bool:\n """"""Return self bool:\n """"""Return self>=value.""""""\n ...\n def __gt__(self, value: AbstractSet[object], /) -> bool:\n """"""Return self>value.""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n __hash__: ClassVar[None] # type: ignore[assignment]\n def __class_getitem__(cls, item: Any, /) -> GenericAlias:\n """"""See PEP 585""""""\n ...\n\nclass frozenset(AbstractSet[_T_co]):\n """"""\n frozenset() -> empty frozenset object\n frozenset(iterable) -> frozenset object\n\n Build an immutable unordered collection of unique elements.\n """"""\n @overload\n def __new__(cls) -> Self: ...\n @overload\n def __new__(cls, iterable: Iterable[_T_co], /) -> Self: ...\n def copy(self) -> frozenset[_T_co]:\n """"""Return a shallow copy of a set.""""""\n ...\n def difference(self, *s: Iterable[object]) -> frozenset[_T_co]:\n """"""\n Return the difference of two or more sets as a new set.\n\n (i.e. all elements that are in this set but not the others.)\n """"""\n ...\n def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]:\n """"""\n Return the intersection of two sets as a new set.\n\n (i.e. all elements that are in both sets.)\n """"""\n ...\n def isdisjoint(self, s: Iterable[_T_co], /) -> bool:\n """"""Return True if two sets have a null intersection.""""""\n ...\n def issubset(self, s: Iterable[object], /) -> bool:\n """"""Test whether every element in the set is in other.""""""\n ...\n def issuperset(self, s: Iterable[object], /) -> bool:\n """"""Test whether every element in other is in the set.""""""\n ...\n def symmetric_difference(self, s: Iterable[_T_co], /) -> frozenset[_T_co]:\n """"""\n Return the symmetric difference of two sets as a new set.\n\n (i.e. all elements that are in exactly one of the sets.)\n """"""\n ...\n def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]:\n """"""\n Return the union of sets as a new set.\n\n (i.e. all elements that are in either set.)\n """"""\n ...\n def __len__(self) -> int:\n """"""Return len(self).""""""\n ...\n def __contains__(self, o: object, /) -> bool:\n """"""x.__contains__(y) <==> y in x.""""""\n ...\n def __iter__(self) -> Iterator[_T_co]:\n """"""Implement iter(self).""""""\n ...\n def __and__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]:\n """"""Return self&value.""""""\n ...\n def __or__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]:\n """"""Return self|value.""""""\n ...\n def __sub__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]:\n """"""Return self-value.""""""\n ...\n def __xor__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]:\n """"""Return self^value.""""""\n ...\n def __le__(self, value: AbstractSet[object], /) -> bool:\n """"""Return self<=value.""""""\n ...\n def __lt__(self, value: AbstractSet[object], /) -> bool:\n """"""Return self bool:\n """"""Return self>=value.""""""\n ...\n def __gt__(self, value: AbstractSet[object], /) -> bool:\n """"""Return self>value.""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n def __class_getitem__(cls, item: Any, /) -> GenericAlias:\n """"""See PEP 585""""""\n ...\n\nclass enumerate(Generic[_T]):\n """"""\n Return an enumerate object.\n\n iterable\n an object supporting iteration\n\n The enumerate object yields pairs containing a count (from start, which\n defaults to zero) and a value yielded by the iterable argument.\n\n enumerate is useful for obtaining an indexed list:\n (0, seq[0]), (1, seq[1]), (2, seq[2]), ...\n """"""\n def __new__(cls, iterable: Iterable[_T], start: int = 0) -> Self: ...\n def __iter__(self) -> Self:\n """"""Implement iter(self).""""""\n ...\n def __next__(self) -> tuple[int, _T]:\n """"""Implement next(self).""""""\n ...\n def __class_getitem__(cls, item: Any, /) -> GenericAlias:\n """"""See PEP 585""""""\n ...\n\n@final\nclass range(Sequence[int]):\n """"""\n range(stop) -> range object\n range(start, stop[, step]) -> range object\n\n Return an object that produces a sequence of integers from start (inclusive)\n to stop (exclusive) by step. range(i, j) produces i, i+1, i+2, ..., j-1.\n start defaults to 0, and stop is omitted! range(4) produces 0, 1, 2, 3.\n These are exactly the valid indices for a list of 4 elements.\n When step is given, it specifies the increment (or decrement).\n """"""\n @property\n def start(self) -> int: ...\n @property\n def stop(self) -> int: ...\n @property\n def step(self) -> int: ...\n @overload\n def __new__(cls, stop: SupportsIndex, /) -> Self: ...\n @overload\n def __new__(cls, start: SupportsIndex, stop: SupportsIndex, step: SupportsIndex = ..., /) -> Self: ...\n def count(self, value: int, /) -> int:\n """"""rangeobject.count(value) -> integer -- return number of occurrences of value""""""\n ...\n def index(self, value: int, /) -> int:\n """"""\n rangeobject.index(value) -> integer -- return index of value.\n Raise ValueError if the value is not present.\n """"""\n ...\n def __len__(self) -> int:\n """"""Return len(self).""""""\n ...\n def __eq__(self, value: object, /) -> bool:\n """"""Return self==value.""""""\n ...\n def __hash__(self) -> int:\n """"""Return hash(self).""""""\n ...\n def __contains__(self, key: object, /) -> bool:\n """"""Return bool(key in self).""""""\n ...\n def __iter__(self) -> Iterator[int]:\n """"""Implement iter(self).""""""\n ...\n @overload\n def __getitem__(self, key: SupportsIndex, /) -> int:\n """"""Return self[key].""""""\n ...\n @overload\n def __getitem__(self, key: slice, /) -> range:\n """"""Return self[key].""""""\n ...\n def __reversed__(self) -> Iterator[int]:\n """"""Return a reverse iterator.""""""\n ...\n\nclass property:\n """"""\n Property attribute.\n\n fget\n function to be used for getting an attribute value\n fset\n function to be used for setting an attribute value\n fdel\n function to be used for del'ing an attribute\n doc\n docstring\n\n Typical use is to define a managed attribute x:\n\n class C(object):\n def getx(self): return self._x\n def setx(self, value): self._x = value\n def delx(self): del self._x\n x = property(getx, setx, delx, ""I'm the 'x' property."")\n\n Decorators make defining new properties or modifying existing ones easy:\n\n class C(object):\n @property\n def x(self):\n ""I am the 'x' property.""\n return self._x\n @x.setter\n def x(self, value):\n self._x = value\n @x.deleter\n def x(self):\n del self._x\n """"""\n fget: Callable[[Any], Any] | None\n fset: Callable[[Any, Any], None] | None\n fdel: Callable[[Any], None] | None\n __isabstractmethod__: bool\n if sys.version_info >= (3, 13):\n __name__: str\n\n def __init__(\n self,\n fget: Callable[[Any], Any] | None = ...,\n fset: Callable[[Any, Any], None] | None = ...,\n fdel: Callable[[Any], None] | None = ...,\n doc: str | None = ...,\n ) -> None: ...\n def getter(self, fget: Callable[[Any], Any], /) -> property:\n """"""Descriptor to obtain a copy of the property with a different getter.""""""\n ...\n def setter(self, fset: Callable[[Any, Any], None], /) -> property:\n """"""Descriptor to obtain a copy of the property with a different setter.""""""\n ...\n def deleter(self, fdel: Callable[[Any], None], /) -> property:\n """"""Descriptor to obtain a copy of the property with a different deleter.""""""\n ...\n @overload\n def __get__(self, instance: None, owner: type, /) -> Self:\n """"""Return an attribute of instance, which is of type owner.""""""\n ...\n @overload\n def __get__(self, instance: Any, owner: type | None = None, /) -> Any:\n """"""Return an attribute of instance, which is of type owner.""""""\n ...\n def __set__(self, instance: Any, value: Any, /) -> None:\n """"""Set an attribute of instance to value.""""""\n ...\n def __delete__(self, instance: Any, /) -> None:\n """"""Delete an attribute of instance.""""""\n ...\n\n@final\nclass _NotImplementedType(Any):\n __call__: None\n\nNotImplemented: _NotImplementedType\n\ndef abs(x: SupportsAbs[_T], /) -> _T:\n """"""Return the absolute value of the argument.""""""\n ...\ndef all(iterable: Iterable[object], /) -> bool:\n """"""\n Return True if bool(x) is True for all values x in the iterable.\n\n If the iterable is empty, return True.\n """"""\n ...\ndef any(iterable: Iterable[object], /) -> bool:\n """"""\n Return True if bool(x) is True for any x in the iterable.\n\n If the iterable is empty, return False.\n """"""\n ...\ndef ascii(obj: object, /) -> str:\n r""""""\n Return an ASCII-only representation of an object.\n\n As repr(), return a string containing a printable representation of an\n object, but escape the non-ASCII characters in the string returned by\n repr() using \\x, \\u or \\U escapes. This generates a string similar\n to that returned by repr() in Python 2.\n """"""\n ...\ndef bin(number: int | SupportsIndex, /) -> str:\n """"""\n Return the binary representation of an integer.\n\n >>> bin(2796202)\n '0b1010101010101010101010'\n """"""\n ...\ndef breakpoint(*args: Any, **kws: Any) -> None:\n """"""\n breakpoint(*args, **kws)\n\n Call sys.breakpointhook(*args, **kws). sys.breakpointhook() must accept\n whatever arguments are passed.\n\n By default, this drops you into the pdb debugger.\n """"""\n ...\ndef callable(obj: object, /) -> TypeIs[Callable[..., object]]:\n """"""\n Return whether the object is callable (i.e., some kind of function).\n\n Note that classes are callable, as are instances of classes with a\n __call__() method.\n """"""\n ...\ndef chr(i: int | SupportsIndex, /) -> str:\n """"""Return a Unicode string of one character with ordinal i; 0 <= i <= 0x10ffff.""""""\n ...\n\nif sys.version_info >= (3, 10):\n def aiter(async_iterable: SupportsAiter[_SupportsAnextT_co], /) -> _SupportsAnextT_co:\n """"""Return an AsyncIterator for an AsyncIterable object.""""""\n ...\n\n class _SupportsSynchronousAnext(Protocol[_AwaitableT_co]):\n def __anext__(self) -> _AwaitableT_co: ...\n\n @overload\n # `anext` is not, in fact, an async function. When default is not provided\n # `anext` is just a passthrough for `obj.__anext__`\n # See discussion in #7491 and pure-Python implementation of `anext` at https://github.com/python/cpython/blob/ea786a882b9ed4261eafabad6011bc7ef3b5bf94/Lib/test/test_asyncgen.py#L52-L80\n def anext(i: _SupportsSynchronousAnext[_AwaitableT], /) -> _AwaitableT:\n """"""\n async anext(aiterator[, default])\n\n Return the next item from the async iterator. If default is given and the async\n iterator is exhausted, it is returned instead of raising StopAsyncIteration.\n """"""\n ...\n @overload\n async def anext(i: SupportsAnext[_T], default: _VT, /) -> _T | _VT:\n """"""\n async anext(aiterator[, default])\n\n Return the next item from the async iterator. If default is given and the async\n iterator is exhausted, it is returned instead of raising StopAsyncIteration.\n """"""\n ...\n\n# compile() returns a CodeType, unless the flags argument includes PyCF_ONLY_AST (=1024),\n# in which case it returns ast.AST. We have overloads for flag 0 (the default) and for\n# explicitly passing PyCF_ONLY_AST. We fall back to Any for other values of flags.\n@overload\ndef compile(\n source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive,\n filename: str | ReadableBuffer | PathLike[Any],\n mode: str,\n flags: Literal[0],\n dont_inherit: bool = False,\n optimize: int = -1,\n *,\n _feature_version: int = -1,\n) -> CodeType:\n """"""\n Compile source into a code object that can be executed by exec() or eval().\n\n The source code may represent a Python module, statement or expression.\n The filename will be used for run-time error messages.\n The mode must be 'exec' to compile a module, 'single' to compile a\n single (interactive) statement, or 'eval' to compile an expression.\n The flags argument, if present, controls which future statements influence\n the compilation of the code.\n The dont_inherit argument, if true, stops the compilation inheriting\n the effects of any future statements in effect in the code calling\n compile; if absent or false these statements do influence the compilation,\n in addition to any features explicitly specified.\n """"""\n ...\n@overload\ndef compile(\n source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive,\n filename: str | ReadableBuffer | PathLike[Any],\n mode: str,\n *,\n dont_inherit: bool = False,\n optimize: int = -1,\n _feature_version: int = -1,\n) -> CodeType:\n """"""\n Compile source into a code object that can be executed by exec() or eval().\n\n The source code may represent a Python module, statement or expression.\n The filename will be used for run-time error messages.\n The mode must be 'exec' to compile a module, 'single' to compile a\n single (interactive) statement, or 'eval' to compile an expression.\n The flags argument, if present, controls which future statements influence\n the compilation of the code.\n The dont_inherit argument, if true, stops the compilation inheriting\n the effects of any future statements in effect in the code calling\n compile; if absent or false these statements do influence the compilation,\n in addition to any features explicitly specified.\n """"""\n ...\n@overload\ndef compile(\n source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive,\n filename: str | ReadableBuffer | PathLike[Any],\n mode: str,\n flags: Literal[1024],\n dont_inherit: bool = False,\n optimize: int = -1,\n *,\n _feature_version: int = -1,\n) -> _ast.AST:\n """"""\n Compile source into a code object that can be executed by exec() or eval().\n\n The source code may represent a Python module, statement or expression.\n The filename will be used for run-time error messages.\n The mode must be 'exec' to compile a module, 'single' to compile a\n single (interactive) statement, or 'eval' to compile an expression.\n The flags argument, if present, controls which future statements influence\n the compilation of the code.\n The dont_inherit argument, if true, stops the compilation inheriting\n the effects of any future statements in effect in the code calling\n compile; if absent or false these statements do influence the compilation,\n in addition to any features explicitly specified.\n """"""\n ...\n@overload\ndef compile(\n source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive,\n filename: str | ReadableBuffer | PathLike[Any],\n mode: str,\n flags: int,\n dont_inherit: bool = False,\n optimize: int = -1,\n *,\n _feature_version: int = -1,\n) -> Any:\n """"""\n Compile source into a code object that can be executed by exec() or eval().\n\n The source code may represent a Python module, statement or expression.\n The filename will be used for run-time error messages.\n The mode must be 'exec' to compile a module, 'single' to compile a\n single (interactive) statement, or 'eval' to compile an expression.\n The flags argument, if present, controls which future statements influence\n the compilation of the code.\n The dont_inherit argument, if true, stops the compilation inheriting\n the effects of any future statements in effect in the code calling\n compile; if absent or false these statements do influence the compilation,\n in addition to any features explicitly specified.\n """"""\n ...\n\ncopyright: _sitebuiltins._Printer\ncredits: _sitebuiltins._Printer\n\ndef delattr(obj: object, name: str, /) -> None:\n """"""\n Deletes the named attribute from the given object.\n\n delattr(x, 'y') is equivalent to ``del x.y``\n """"""\n ...\ndef dir(o: object = ..., /) -> list[str]:\n """"""\n Show attributes of an object.\n\n If called without an argument, return the names in the current scope.\n Else, return an alphabetized list of names comprising (some of) the attributes\n of the given object, and of attributes reachable from it.\n If the object supplies a method named __dir__, it will be used; otherwise\n the default dir() logic is used and returns:\n for a module object: the module's attributes.\n for a class object: its attributes, and recursively the attributes\n of its bases.\n for any other object: its attributes, its class's attributes, and\n recursively the attributes of its class's base classes.\n """"""\n ...\n@overload\ndef divmod(x: SupportsDivMod[_T_contra, _T_co], y: _T_contra, /) -> _T_co:\n """"""Return the tuple (x//y, x%y). Invariant: div*y + mod == x.""""""\n ...\n@overload\ndef divmod(x: _T_contra, y: SupportsRDivMod[_T_contra, _T_co], /) -> _T_co:\n """"""Return the tuple (x//y, x%y). Invariant: div*y + mod == x.""""""\n ...\n\n# The `globals` argument to `eval` has to be `dict[str, Any]` rather than `dict[str, object]` due to invariance.\n# (The `globals` argument has to be a ""real dict"", rather than any old mapping, unlike the `locals` argument.)\nif sys.version_info >= (3, 13):\n def eval(\n source: str | ReadableBuffer | CodeType,\n /,\n globals: dict[str, Any] | None = None,\n locals: Mapping[str, object] | None = None,\n ) -> Any: ...\n\nelse:\n def eval(\n source: str | ReadableBuffer | CodeType,\n globals: dict[str, Any] | None = None,\n locals: Mapping[str, object] | None = None,\n /,\n ) -> Any:\n """"""\n Evaluate the given source in the context of globals and locals.\n\n The source may be a string representing a Python expression\n or a code object as returned by compile().\n The globals must be a dictionary and locals can be any mapping,\n defaulting to the current globals and locals.\n If only globals is given, locals defaults to it.\n """"""\n ...\n\n# Comment above regarding `eval` applies to `exec` as well\nif sys.version_info >= (3, 13):\n def exec(\n source: str | ReadableBuffer | CodeType,\n /,\n globals: dict[str, Any] | None = None,\n locals: Mapping[str, object] | None = None,\n *,\n closure: tuple[CellType, ...] | None = None,\n ) -> None: ...\n\nelif sys.version_info >= (3, 11):\n def exec(\n source: str | ReadableBuffer | CodeType,\n globals: dict[str, Any] | None = None,\n locals: Mapping[str, object] | None = None,\n /,\n *,\n closure: tuple[CellType, ...] | None = None,\n ) -> None:\n """"""\n Execute the given source in the context of globals and locals.\n\n The source may be a string representing one or more Python statements\n or a code object as returned by compile().\n The globals must be a dictionary and locals can be any mapping,\n defaulting to the current globals and locals.\n If only globals is given, locals defaults to it.\n The closure must be a tuple of cellvars, and can only be used\n when source is a code object requiring exactly that many cellvars.\n """"""\n ...\n\nelse:\n def exec(\n source: str | ReadableBuffer | CodeType,\n globals: dict[str, Any] | None = None,\n locals: Mapping[str, object] | None = None,\n /,\n ) -> None: ...\n\nexit: _sitebuiltins.Quitter\n\nclass filter(Generic[_T]):\n """"""\n filter(function or None, iterable) --> filter object\n\n Return an iterator yielding those items of iterable for which function(item)\n is true. If function is None, return the items that are true.\n """"""\n @overload\n def __new__(cls, function: None, iterable: Iterable[_T | None], /) -> Self: ...\n @overload\n def __new__(cls, function: Callable[[_S], TypeGuard[_T]], iterable: Iterable[_S], /) -> Self: ...\n @overload\n def __new__(cls, function: Callable[[_S], TypeIs[_T]], iterable: Iterable[_S], /) -> Self: ...\n @overload\n def __new__(cls, function: Callable[[_T], Any], iterable: Iterable[_T], /) -> Self: ...\n def __iter__(self) -> Self:\n """"""Implement iter(self).""""""\n ...\n def __next__(self) -> _T:\n """"""Implement next(self).""""""\n ...\n\ndef format(value: object, format_spec: str = """", /) -> str:\n """"""\n Return type(value).__format__(value, format_spec)\n\n Many built-in types implement format_spec according to the\n Format Specification Mini-language. See help('FORMATTING').\n\n If type(value) does not supply a method named __format__\n and format_spec is empty, then str(value) is returned.\n See also help('SPECIALMETHODS').\n """"""\n ...\n@overload\ndef getattr(o: object, name: str, /) -> Any:\n """"""\n Get a named attribute from an object.\n\n getattr(x, 'y') is equivalent to x.y\n When a default argument is given, it is returned when the attribute doesn't\n exist; without it, an exception is raised in that case.\n """"""\n ...\n\n# While technically covered by the last overload, spelling out the types for None, bool\n# and basic containers help mypy out in some tricky situations involving type context\n# (aka bidirectional inference)\n@overload\ndef getattr(o: object, name: str, default: None, /) -> Any | None:\n """"""\n Get a named attribute from an object.\n\n getattr(x, 'y') is equivalent to x.y\n When a default argument is given, it is returned when the attribute doesn't\n exist; without it, an exception is raised in that case.\n """"""\n ...\n@overload\ndef getattr(o: object, name: str, default: bool, /) -> Any | bool:\n """"""\n Get a named attribute from an object.\n\n getattr(x, 'y') is equivalent to x.y\n When a default argument is given, it is returned when the attribute doesn't\n exist; without it, an exception is raised in that case.\n """"""\n ...\n@overload\ndef getattr(o: object, name: str, default: list[Any], /) -> Any | list[Any]:\n """"""\n Get a named attribute from an object.\n\n getattr(x, 'y') is equivalent to x.y\n When a default argument is given, it is returned when the attribute doesn't\n exist; without it, an exception is raised in that case.\n """"""\n ...\n@overload\ndef getattr(o: object, name: str, default: dict[Any, Any], /) -> Any | dict[Any, Any]:\n """"""\n Get a named attribute from an object.\n\n getattr(x, 'y') is equivalent to x.y\n When a default argument is given, it is returned when the attribute doesn't\n exist; without it, an exception is raised in that case.\n """"""\n ...\n@overload\ndef getattr(o: object, name: str, default: _T, /) -> Any | _T:\n """"""\n Get a named attribute from an object.\n\n getattr(x, 'y') is equivalent to x.y\n When a default argument is given, it is returned when the attribute doesn't\n exist; without it, an exception is raised in that case.\n """"""\n ...\ndef globals() -> dict[str, Any]:\n """"""\n Return the dictionary containing the current scope's global variables.\n\n NOTE: Updates to this dictionary *will* affect name lookups in the current\n global scope and vice-versa.\n """"""\n ...\ndef hasattr(obj: object, name: str, /) -> bool:\n """"""\n Return whether the object has an attribute with the given name.\n\n This is done by calling getattr(obj, name) and catching AttributeError.\n """"""\n ...\ndef hash(obj: object, /) -> int:\n """"""\n Return the hash value for the given object.\n\n Two objects that compare equal must also have the same hash value, but the\n reverse is not necessarily true.\n """"""\n ...\n\nhelp: _sitebuiltins._Helper\n\ndef hex(number: int | SupportsIndex, /) -> str:\n """"""\n Return the hexadecimal representation of an integer.\n\n >>> hex(12648430)\n '0xc0ffee'\n """"""\n ...\ndef id(obj: object, /) -> int:\n """"""\n Return the identity of an object.\n\n This is guaranteed to be unique among simultaneously existing objects.\n (CPython uses the object's memory address.)\n """"""\n ...\ndef input(prompt: object = """", /) -> str:\n """"""\n Read a string from standard input. The trailing newline is stripped.\n\n The prompt string, if given, is printed to standard output without a\n trailing newline before reading input.\n\n If the user hits EOF (*nix: Ctrl-D, Windows: Ctrl-Z+Return), raise EOFError.\n On *nix systems, readline is used if available.\n """"""\n ...\n\nclass _GetItemIterable(Protocol[_T_co]):\n def __getitem__(self, i: int, /) -> _T_co: ...\n\n@overload\ndef iter(object: SupportsIter[_SupportsNextT_co], /) -> _SupportsNextT_co:\n """"""\n Get an iterator from an object.\n\n In the first form, the argument must supply its own iterator, or be a sequence.\n In the second form, the callable is called until it returns the sentinel.\n """"""\n ...\n@overload\ndef iter(object: _GetItemIterable[_T], /) -> Iterator[_T]:\n """"""\n Get an iterator from an object.\n\n In the first form, the argument must supply its own iterator, or be a sequence.\n In the second form, the callable is called until it returns the sentinel.\n """"""\n ...\n@overload\ndef iter(object: Callable[[], _T | None], sentinel: None, /) -> Iterator[_T]:\n """"""\n Get an iterator from an object.\n\n In the first form, the argument must supply its own iterator, or be a sequence.\n In the second form, the callable is called until it returns the sentinel.\n """"""\n ...\n@overload\ndef iter(object: Callable[[], _T], sentinel: object, /) -> Iterator[_T]:\n """"""\n Get an iterator from an object.\n\n In the first form, the argument must supply its own iterator, or be a sequence.\n In the second form, the callable is called until it returns the sentinel.\n """"""\n ...\n\n# Keep this alias in sync with unittest.case._ClassInfo\nif sys.version_info >= (3, 10):\n _ClassInfo: TypeAlias = type | types.UnionType | tuple[_ClassInfo, ...]\nelse:\n _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...]\n\ndef isinstance(obj: object, class_or_tuple: _ClassInfo, /) -> bool:\n """"""\n Return whether an object is an instance of a class or of a subclass thereof.\n\n A tuple, as in ``isinstance(x, (A, B, ...))``, may be given as the target to\n check against. This is equivalent to ``isinstance(x, A) or isinstance(x, B)\n or ...`` etc.\n """"""\n ...\ndef issubclass(cls: type, class_or_tuple: _ClassInfo, /) -> bool:\n """"""\n Return whether 'cls' is derived from another class or is the same class.\n\n A tuple, as in ``issubclass(x, (A, B, ...))``, may be given as the target to\n check against. This is equivalent to ``issubclass(x, A) or issubclass(x, B)\n or ...``.\n """"""\n ...\ndef len(obj: Sized, /) -> int:\n """"""Return the number of items in a container.""""""\n ...\n\nlicense: _sitebuiltins._Printer\n\ndef locals() -> dict[str, Any]:\n """"""\n Return a dictionary containing the current scope's local variables.\n\n NOTE: Whether or not updates to this dictionary will affect name lookups in\n the local scope and vice-versa is *implementation dependent* and not\n covered by any backwards compatibility guarantees.\n """"""\n ...\n\nclass map(Generic[_S]):\n """"""\n map(func, *iterables) --> map object\n\n Make an iterator that computes the function using arguments from\n each of the iterables. Stops when the shortest iterable is exhausted.\n """"""\n # 3.14 adds `strict` argument.\n if sys.version_info >= (3, 14):\n @overload\n def __new__(cls, func: Callable[[_T1], _S], iterable: Iterable[_T1], /, *, strict: bool = False) -> Self: ...\n @overload\n def __new__(\n cls, func: Callable[[_T1, _T2], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], /, *, strict: bool = False\n ) -> Self: ...\n @overload\n def __new__(\n cls,\n func: Callable[[_T1, _T2, _T3], _S],\n iterable: Iterable[_T1],\n iter2: Iterable[_T2],\n iter3: Iterable[_T3],\n /,\n *,\n strict: bool = False,\n ) -> Self: ...\n @overload\n def __new__(\n cls,\n func: Callable[[_T1, _T2, _T3, _T4], _S],\n iterable: Iterable[_T1],\n iter2: Iterable[_T2],\n iter3: Iterable[_T3],\n iter4: Iterable[_T4],\n /,\n *,\n strict: bool = False,\n ) -> Self: ...\n @overload\n def __new__(\n cls,\n func: Callable[[_T1, _T2, _T3, _T4, _T5], _S],\n iterable: Iterable[_T1],\n iter2: Iterable[_T2],\n iter3: Iterable[_T3],\n iter4: Iterable[_T4],\n iter5: Iterable[_T5],\n /,\n *,\n strict: bool = False,\n ) -> Self: ...\n @overload\n def __new__(\n cls,\n func: Callable[..., _S],\n iterable: Iterable[Any],\n iter2: Iterable[Any],\n iter3: Iterable[Any],\n iter4: Iterable[Any],\n iter5: Iterable[Any],\n iter6: Iterable[Any],\n /,\n *iterables: Iterable[Any],\n strict: bool = False,\n ) -> Self: ...\n else:\n @overload\n def __new__(cls, func: Callable[[_T1], _S], iterable: Iterable[_T1], /) -> Self: ...\n @overload\n def __new__(cls, func: Callable[[_T1, _T2], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], /) -> Self: ...\n @overload\n def __new__(\n cls, func: Callable[[_T1, _T2, _T3], _S], iterable: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], /\n ) -> Self: ...\n @overload\n def __new__(\n cls,\n func: Callable[[_T1, _T2, _T3, _T4], _S],\n iterable: Iterable[_T1],\n iter2: Iterable[_T2],\n iter3: Iterable[_T3],\n iter4: Iterable[_T4],\n /,\n ) -> Self: ...\n @overload\n def __new__(\n cls,\n func: Callable[[_T1, _T2, _T3, _T4, _T5], _S],\n iterable: Iterable[_T1],\n iter2: Iterable[_T2],\n iter3: Iterable[_T3],\n iter4: Iterable[_T4],\n iter5: Iterable[_T5],\n /,\n ) -> Self: ...\n @overload\n def __new__(\n cls,\n func: Callable[..., _S],\n iterable: Iterable[Any],\n iter2: Iterable[Any],\n iter3: Iterable[Any],\n iter4: Iterable[Any],\n iter5: Iterable[Any],\n iter6: Iterable[Any],\n /,\n *iterables: Iterable[Any],\n ) -> Self: ...\n\n def __iter__(self) -> Self:\n """"""Implement iter(self).""""""\n ...\n def __next__(self) -> _S:\n """"""Implement next(self).""""""\n ...\n\n@overload\ndef max(\n arg1: SupportsRichComparisonT, arg2: SupportsRichComparisonT, /, *_args: SupportsRichComparisonT, key: None = None\n) -> SupportsRichComparisonT:\n """"""\n max(iterable, *[, default=obj, key=func]) -> value\n max(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its biggest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the largest argument.\n """"""\n ...\n@overload\ndef max(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T:\n """"""\n max(iterable, *[, default=obj, key=func]) -> value\n max(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its biggest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the largest argument.\n """"""\n ...\n@overload\ndef max(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None) -> SupportsRichComparisonT:\n """"""\n max(iterable, *[, default=obj, key=func]) -> value\n max(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its biggest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the largest argument.\n """"""\n ...\n@overload\ndef max(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison]) -> _T:\n """"""\n max(iterable, *[, default=obj, key=func]) -> value\n max(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its biggest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the largest argument.\n """"""\n ...\n@overload\ndef max(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, default: _T) -> SupportsRichComparisonT | _T:\n """"""\n max(iterable, *[, default=obj, key=func]) -> value\n max(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its biggest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the largest argument.\n """"""\n ...\n@overload\ndef max(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2:\n """"""\n max(iterable, *[, default=obj, key=func]) -> value\n max(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its biggest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the largest argument.\n """"""\n ...\n@overload\ndef min(\n arg1: SupportsRichComparisonT, arg2: SupportsRichComparisonT, /, *_args: SupportsRichComparisonT, key: None = None\n) -> SupportsRichComparisonT:\n """"""\n min(iterable, *[, default=obj, key=func]) -> value\n min(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its smallest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the smallest argument.\n """"""\n ...\n@overload\ndef min(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T:\n """"""\n min(iterable, *[, default=obj, key=func]) -> value\n min(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its smallest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the smallest argument.\n """"""\n ...\n@overload\ndef min(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None) -> SupportsRichComparisonT:\n """"""\n min(iterable, *[, default=obj, key=func]) -> value\n min(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its smallest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the smallest argument.\n """"""\n ...\n@overload\ndef min(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison]) -> _T:\n """"""\n min(iterable, *[, default=obj, key=func]) -> value\n min(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its smallest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the smallest argument.\n """"""\n ...\n@overload\ndef min(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, default: _T) -> SupportsRichComparisonT | _T:\n """"""\n min(iterable, *[, default=obj, key=func]) -> value\n min(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its smallest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the smallest argument.\n """"""\n ...\n@overload\ndef min(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2:\n """"""\n min(iterable, *[, default=obj, key=func]) -> value\n min(arg1, arg2, *args, *[, key=func]) -> value\n\n With a single iterable argument, return its smallest item. The\n default keyword-only argument specifies an object to return if\n the provided iterable is empty.\n With two or more arguments, return the smallest argument.\n """"""\n ...\n@overload\ndef next(i: SupportsNext[_T], /) -> _T:\n """"""\n Return the next item from the iterator.\n\n If default is given and the iterator is exhausted,\n it is returned instead of raising StopIteration.\n """"""\n ...\n@overload\ndef next(i: SupportsNext[_T], default: _VT, /) -> _T | _VT:\n """"""\n Return the next item from the iterator.\n\n If default is given and the iterator is exhausted,\n it is returned instead of raising StopIteration.\n """"""\n ...\ndef oct(number: int | SupportsIndex, /) -> str:\n """"""\n Return the octal representation of an integer.\n\n >>> oct(342391)\n '0o1234567'\n """"""\n ...\n\n_Opener: TypeAlias = Callable[[str, int], int]\n\n# Text mode: always returns a TextIOWrapper\n@overload\ndef open(\n file: FileDescriptorOrPath,\n mode: OpenTextMode = ""r"",\n buffering: int = -1,\n encoding: str | None = None,\n errors: str | None = None,\n newline: str | None = None,\n closefd: bool = True,\n opener: _Opener | None = None,\n) -> TextIOWrapper:\n r""""""\n Open file and return a stream. Raise OSError upon failure.\n\n file is either a text or byte string giving the name (and the path\n if the file isn't in the current working directory) of the file to\n be opened or an integer file descriptor of the file to be\n wrapped. (If a file descriptor is given, it is closed when the\n returned I/O object is closed, unless closefd is set to False.)\n\n mode is an optional string that specifies the mode in which the file\n is opened. It defaults to 'r' which means open for reading in text\n mode. Other common values are 'w' for writing (truncating the file if\n it already exists), 'x' for creating and writing to a new file, and\n 'a' for appending (which on some Unix systems, means that all writes\n append to the end of the file regardless of the current seek position).\n In text mode, if encoding is not specified the encoding used is platform\n dependent: locale.getencoding() is called to get the current locale encoding.\n (For reading and writing raw bytes use binary mode and leave encoding\n unspecified.) The available modes are:\n\n ========= ===============================================================\n Character Meaning\n --------- ---------------------------------------------------------------\n 'r' open for reading (default)\n 'w' open for writing, truncating the file first\n 'x' create a new file and open it for writing\n 'a' open for writing, appending to the end of the file if it exists\n 'b' binary mode\n 't' text mode (default)\n '+' open a disk file for updating (reading and writing)\n ========= ===============================================================\n\n The default mode is 'rt' (open for reading text). For binary random\n access, the mode 'w+b' opens and truncates the file to 0 bytes, while\n 'r+b' opens the file without truncation. The 'x' mode implies 'w' and\n raises an `FileExistsError` if the file already exists.\n\n Python distinguishes between files opened in binary and text modes,\n even when the underlying operating system doesn't. Files opened in\n binary mode (appending 'b' to the mode argument) return contents as\n bytes objects without any decoding. In text mode (the default, or when\n 't' is appended to the mode argument), the contents of the file are\n returned as strings, the bytes having been first decoded using a\n platform-dependent encoding or using the specified encoding if given.\n\n buffering is an optional integer used to set the buffering policy.\n Pass 0 to switch buffering off (only allowed in binary mode), 1 to select\n line buffering (only usable in text mode), and an integer > 1 to indicate\n the size of a fixed-size chunk buffer. When no buffering argument is\n given, the default buffering policy works as follows:\n\n * Binary files are buffered in fixed-size chunks; the size of the buffer\n is chosen using a heuristic trying to determine the underlying device's\n ""block size"" and falling back on `io.DEFAULT_BUFFER_SIZE`.\n On many systems, the buffer will typically be 4096 or 8192 bytes long.\n\n * ""Interactive"" text files (files for which isatty() returns True)\n use line buffering. Other text files use the policy described above\n for binary files.\n\n encoding is the name of the encoding used to decode or encode the\n file. This should only be used in text mode. The default encoding is\n platform dependent, but any encoding supported by Python can be\n passed. See the codecs module for the list of supported encodings.\n\n errors is an optional string that specifies how encoding errors are to\n be handled---this argument should not be used in binary mode. Pass\n 'strict' to raise a ValueError exception if there is an encoding error\n (the default of None has the same effect), or pass 'ignore' to ignore\n errors. (Note that ignoring encoding errors can lead to data loss.)\n See the documentation for codecs.register or run 'help(codecs.Codec)'\n for a list of the permitted encoding error strings.\n\n newline controls how universal newlines works (it only applies to text\n mode). It can be None, '', '\n', '\r', and '\r\n'. It works as\n follows:\n\n * On input, if newline is None, universal newlines mode is\n enabled. Lines in the input can end in '\n', '\r', or '\r\n', and\n these are translated into '\n' before being returned to the\n caller. If it is '', universal newline mode is enabled, but line\n endings are returned to the caller untranslated. If it has any of\n the other legal values, input lines are only terminated by the given\n string, and the line ending is returned to the caller untranslated.\n\n * On output, if newline is None, any '\n' characters written are\n translated to the system default line separator, os.linesep. If\n newline is '' or '\n', no translation takes place. If newline is any\n of the other legal values, any '\n' characters written are translated\n to the given string.\n\n If closefd is False, the underlying file descriptor will be kept open\n when the file is closed. This does not work when a file name is given\n and must be True in that case.\n\n A custom opener can be used by passing a callable as *opener*. The\n underlying file descriptor for the file object is then obtained by\n calling *opener* with (*file*, *flags*). *opener* must return an open\n file descriptor (passing os.open as *opener* results in functionality\n similar to passing None).\n\n open() returns a file object whose type depends on the mode, and\n through which the standard file operations such as reading and writing\n are performed. When open() is used to open a file in a text mode ('w',\n 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open\n a file in a binary mode, the returned class varies: in read binary\n mode, it returns a BufferedReader; in write binary and append binary\n modes, it returns a BufferedWriter, and in read/write mode, it returns\n a BufferedRandom.\n\n It is also possible to use a string or bytearray as a file for both\n reading and writing. For strings StringIO can be used like a file\n opened in a text mode, and for bytes a BytesIO can be used like a file\n opened in a binary mode.\n """"""\n ...\n\n# Unbuffered binary mode: returns a FileIO\n@overload\ndef open(\n file: FileDescriptorOrPath,\n mode: OpenBinaryMode,\n buffering: Literal[0],\n encoding: None = None,\n errors: None = None,\n newline: None = None,\n closefd: bool = True,\n opener: _Opener | None = None,\n) -> FileIO:\n r""""""\n Open file and return a stream. Raise OSError upon failure.\n\n file is either a text or byte string giving the name (and the path\n if the file isn't in the current working directory) of the file to\n be opened or an integer file descriptor of the file to be\n wrapped. (If a file descriptor is given, it is closed when the\n returned I/O object is closed, unless closefd is set to False.)\n\n mode is an optional string that specifies the mode in which the file\n is opened. It defaults to 'r' which means open for reading in text\n mode. Other common values are 'w' for writing (truncating the file if\n it already exists), 'x' for creating and writing to a new file, and\n 'a' for appending (which on some Unix systems, means that all writes\n append to the end of the file regardless of the current seek position).\n In text mode, if encoding is not specified the encoding used is platform\n dependent: locale.getencoding() is called to get the current locale encoding.\n (For reading and writing raw bytes use binary mode and leave encoding\n unspecified.) The available modes are:\n\n ========= ===============================================================\n Character Meaning\n --------- ---------------------------------------------------------------\n 'r' open for reading (default)\n 'w' open for writing, truncating the file first\n 'x' create a new file and open it for writing\n 'a' open for writing, appending to the end of the file if it exists\n 'b' binary mode\n 't' text mode (default)\n '+' open a disk file for updating (reading and writing)\n ========= ===============================================================\n\n The default mode is 'rt' (open for reading text). For binary random\n access, the mode 'w+b' opens and truncates the file to 0 bytes, while\n 'r+b' opens the file without truncation. The 'x' mode implies 'w' and\n raises an `FileExistsError` if the file already exists.\n\n Python distinguishes between files opened in binary and text modes,\n even when the underlying operating system doesn't. Files opened in\n binary mode (appending 'b' to the mode argument) return contents as\n bytes objects without any decoding. In text mode (the default, or when\n 't' is appended to the mode argument), the contents of the file are\n returned as strings, the bytes having been first decoded using a\n platform-dependent encoding or using the specified encoding if given.\n\n buffering is an optional integer used to set the buffering policy.\n Pass 0 to switch buffering off (only allowed in binary mode), 1 to select\n line buffering (only usable in text mode), and an integer > 1 to indicate\n the size of a fixed-size chunk buffer. When no buffering argument is\n given, the default buffering policy works as follows:\n\n * Binary files are buffered in fixed-size chunks; the size of the buffer\n is chosen using a heuristic trying to determine the underlying device's\n ""block size"" and falling back on `io.DEFAULT_BUFFER_SIZE`.\n On many systems, the buffer will typically be 4096 or 8192 bytes long.\n\n * ""Interactive"" text files (files for which isatty() returns True)\n use line buffering. Other text files use the policy described above\n for binary files.\n\n encoding is the name of the encoding used to decode or encode the\n file. This should only be used in text mode. The default encoding is\n platform dependent, but any encoding supported by Python can be\n passed. See the codecs module for the list of supported encodings.\n\n errors is an optional string that specifies how encoding errors are to\n be handled---this argument should not be used in binary mode. Pass\n 'strict' to raise a ValueError exception if there is an encoding error\n (the default of None has the same effect), or pass 'ignore' to ignore\n errors. (Note that ignoring encoding errors can lead to data loss.)\n See the documentation for codecs.register or run 'help(codecs.Codec)'\n for a list of the permitted encoding error strings.\n\n newline controls how universal newlines works (it only applies to text\n mode). It can be None, '', '\n', '\r', and '\r\n'. It works as\n follows:\n\n * On input, if newline is None, universal newlines mode is\n enabled. Lines in the input can end in '\n', '\r', or '\r\n', and\n these are translated into '\n' before being returned to the\n caller. If it is '', universal newline mode is enabled, but line\n endings are returned to the caller untranslated. If it has any of\n the other legal values, input lines are only terminated by the given\n string, and the line ending is returned to the caller untranslated.\n\n * On output, if newline is None, any '\n' characters written are\n translated to the system default line separator, os.linesep. If\n newline is '' or '\n', no translation takes place. If newline is any\n of the other legal values, any '\n' characters written are translated\n to the given string.\n\n If closefd is False, the underlying file descriptor will be kept open\n when the file is closed. This does not work when a file name is given\n and must be True in that case.\n\n A custom opener can be used by passing a callable as *opener*. The\n underlying file descriptor for the file object is then obtained by\n calling *opener* with (*file*, *flags*). *opener* must return an open\n file descriptor (passing os.open as *opener* results in functionality\n similar to passing None).\n\n open() returns a file object whose type depends on the mode, and\n through which the standard file operations such as reading and writing\n are performed. When open() is used to open a file in a text mode ('w',\n 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open\n a file in a binary mode, the returned class varies: in read binary\n mode, it returns a BufferedReader; in write binary and append binary\n modes, it returns a BufferedWriter, and in read/write mode, it returns\n a BufferedRandom.\n\n It is also possible to use a string or bytearray as a file for both\n reading and writing. For strings StringIO can be used like a file\n opened in a text mode, and for bytes a BytesIO can be used like a file\n opened in a binary mode.\n """"""\n ...\n\n# Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter\n@overload\ndef open(\n file: FileDescriptorOrPath,\n mode: OpenBinaryModeUpdating,\n buffering: Literal[-1, 1] = -1,\n encoding: None = None,\n errors: None = None,\n newline: None = None,\n closefd: bool = True,\n opener: _Opener | None = None,\n) -> BufferedRandom:\n r""""""\n Open file and return a stream. Raise OSError upon failure.\n\n file is either a text or byte string giving the name (and the path\n if the file isn't in the current working directory) of the file to\n be opened or an integer file descriptor of the file to be\n wrapped. (If a file descriptor is given, it is closed when the\n returned I/O object is closed, unless closefd is set to False.)\n\n mode is an optional string that specifies the mode in which the file\n is opened. It defaults to 'r' which means open for reading in text\n mode. Other common values are 'w' for writing (truncating the file if\n it already exists), 'x' for creating and writing to a new file, and\n 'a' for appending (which on some Unix systems, means that all writes\n append to the end of the file regardless of the current seek position).\n In text mode, if encoding is not specified the encoding used is platform\n dependent: locale.getencoding() is called to get the current locale encoding.\n (For reading and writing raw bytes use binary mode and leave encoding\n unspecified.) The available modes are:\n\n ========= ===============================================================\n Character Meaning\n --------- ---------------------------------------------------------------\n 'r' open for reading (default)\n 'w' open for writing, truncating the file first\n 'x' create a new file and open it for writing\n 'a' open for writing, appending to the end of the file if it exists\n 'b' binary mode\n 't' text mode (default)\n '+' open a disk file for updating (reading and writing)\n ========= ===============================================================\n\n The default mode is 'rt' (open for reading text). For binary random\n access, the mode 'w+b' opens and truncates the file to 0 bytes, while\n 'r+b' opens the file without truncation. The 'x' mode implies 'w' and\n raises an `FileExistsError` if the file already exists.\n\n Python distinguishes between files opened in binary and text modes,\n even when the underlying operating system doesn't. Files opened in\n binary mode (appending 'b' to the mode argument) return contents as\n bytes objects without any decoding. In text mode (the default, or when\n 't' is appended to the mode argument), the contents of the file are\n returned as strings, the bytes having been first decoded using a\n platform-dependent encoding or using the specified encoding if given.\n\n buffering is an optional integer used to set the buffering policy.\n Pass 0 to switch buffering off (only allowed in binary mode), 1 to select\n line buffering (only usable in text mode), and an integer > 1 to indicate\n the size of a fixed-size chunk buffer. When no buffering argument is\n given, the default buffering policy works as follows:\n\n * Binary files are buffered in fixed-size chunks; the size of the buffer\n is chosen using a heuristic trying to determine the underlying device's\n ""block size"" and falling back on `io.DEFAULT_BUFFER_SIZE`.\n On many systems, the buffer will typically be 4096 or 8192 bytes long.\n\n * ""Interactive"" text files (files for which isatty() returns True)\n use line buffering. Other text files use the policy described above\n for binary files.\n\n encoding is the name of the encoding used to decode or encode the\n file. This should only be used in text mode. The default encoding is\n platform dependent, but any encoding supported by Python can be\n passed. See the codecs module for the list of supported encodings.\n\n errors is an optional string that specifies how encoding errors are to\n be handled---this argument should not be used in binary mode. Pass\n 'strict' to raise a ValueError exception if there is an encoding error\n (the default of None has the same effect), or pass 'ignore' to ignore\n errors. (Note that ignoring encoding errors can lead to data loss.)\n See the documentation for codecs.register or run 'help(codecs.Codec)'\n for a list of the permitted encoding error strings.\n\n newline controls how universal newlines works (it only applies to text\n mode). It can be None, '', '\n', '\r', and '\r\n'. It works as\n follows:\n\n * On input, if newline is None, universal newlines mode is\n enabled. Lines in the input can end in '\n', '\r', or '\r\n', and\n these are translated into '\n' before being returned to the\n caller. If it is '', universal newline mode is enabled, but line\n endings are returned to the caller untranslated. If it has any of\n the other legal values, input lines are only terminated by the given\n string, and the line ending is returned to the caller untranslated.\n\n * On output, if newline is None, any '\n' characters written are\n translated to the system default line separator, os.linesep. If\n newline is '' or '\n', no translation takes place. If newline is any\n of the other legal values, any '\n' characters written are translated\n to the given string.\n\n If closefd is False, the underlying file descriptor will be kept open\n when the file is closed. This does not work when a file name is given\n and must be True in that case.\n\n A custom opener can be used by passing a callable as *opener*. The\n underlying file descriptor for the file object is then obtained by\n calling *opener* with (*file*, *flags*). *opener* must return an open\n file descriptor (passing os.open as *opener* results in functionality\n similar to passing None).\n\n open() returns a file object whose type depends on the mode, and\n through which the standard file operations such as reading and writing\n are performed. When open() is used to open a file in a text mode ('w',\n 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open\n a file in a binary mode, the returned class varies: in read binary\n mode, it returns a BufferedReader; in write binary and append binary\n modes, it returns a BufferedWriter, and in read/write mode, it returns\n a BufferedRandom.\n\n It is also possible to use a string or bytearray as a file for both\n reading and writing. For strings StringIO can be used like a file\n opened in a text mode, and for bytes a BytesIO can be used like a file\n opened in a binary mode.\n """"""\n ...\n@overload\ndef open(\n file: FileDescriptorOrPath,\n mode: OpenBinaryModeWriting,\n buffering: Literal[-1, 1] = -1,\n encoding: None = None,\n errors: None = None,\n newline: None = None,\n closefd: bool = True,\n opener: _Opener | None = None,\n) -> BufferedWriter:\n r""""""\n Open file and return a stream. Raise OSError upon failure.\n\n file is either a text or byte string giving the name (and the path\n if the file isn't in the current working directory) of the file to\n be opened or an integer file descriptor of the file to be\n wrapped. (If a file descriptor is given, it is closed when the\n returned I/O object is closed, unless closefd is set to False.)\n\n mode is an optional string that specifies the mode in which the file\n is opened. It defaults to 'r' which means open for reading in text\n mode. Other common values are 'w' for writing (truncating the file if\n it already exists), 'x' for creating and writing to a new file, and\n 'a' for appending (which on some Unix systems, means that all writes\n append to the end of the file regardless of the current seek position).\n In text mode, if encoding is not specified the encoding used is platform\n dependent: locale.getencoding() is called to get the current locale encoding.\n (For reading and writing raw bytes use binary mode and leave encoding\n unspecified.) The available modes are:\n\n ========= ===============================================================\n Character Meaning\n --------- ---------------------------------------------------------------\n 'r' open for reading (default)\n 'w' open for writing, truncating the file first\n 'x' create a new file and open it for writing\n 'a' open for writing, appending to the end of the file if it exists\n 'b' binary mode\n 't' text mode (default)\n '+' open a disk file for updating (reading and writing)\n ========= ===============================================================\n\n The default mode is 'rt' (open for reading text). For binary random\n access, the mode 'w+b' opens and truncates the file to 0 bytes, while\n 'r+b' opens the file without truncation. The 'x' mode implies 'w' and\n raises an `FileExistsError` if the file already exists.\n\n Python distinguishes between files opened in binary and text modes,\n even when the underlying operating system doesn't. Files opened in\n binary mode (appending 'b' to the mode argument) return contents as\n bytes objects without any decoding. In text mode (the default, or when\n 't' is appended to the mode argument), the contents of the file are\n returned as strings, the bytes having been first decoded using a\n platform-dependent encoding or using the specified encoding if given.\n\n buffering is an optional integer used to set the buffering policy.\n Pass 0 to switch buffering off (only allowed in binary mode), 1 to select\n line buffering (only usable in text mode), and an integer > 1 to indicate\n the size of a fixed-size chunk buffer. When no buffering argument is\n given, the default buffering policy works as follows:\n\n * Binary files are buffered in fixed-size chunks; the size of the buffer\n is chosen using a heuristic trying to determine the underlying device's\n ""block size"" and falling back on `io.DEFAULT_BUFFER_SIZE`.\n On many systems, the buffer will typically be 4096 or 8192 bytes long.\n\n * ""Interactive"" text files (files for which isatty() returns True)\n use line buffering. Other text files use the policy described above\n for binary files.\n\n encoding is the name of the encoding used to decode or encode the\n file. This should only be used in text mode. The default encoding is\n platform dependent, but any encoding supported by Python can be\n passed. See the codecs module for the list of supported encodings.\n\n errors is an optional string that specifies how encoding errors are to\n be handled---this argument should not be used in binary mode. Pass\n 'strict' to raise a ValueError exception if there is an encoding error\n (the default of None has the same effect), or pass 'ignore' to ignore\n errors. (Note that ignoring encoding errors can lead to data loss.)\n See the documentation for codecs.register or run 'help(codecs.Codec)'\n for a list of the permitted encoding error strings.\n\n newline controls how universal newlines works (it only applies to text\n mode). It can be None, '', '\n', '\r', and '\r\n'. It works as\n follows:\n\n * On input, if newline is None, universal newlines mode is\n enabled. Lines in the input can end in '\n', '\r', or '\r\n', and\n these are translated into '\n' before being returned to the\n caller. If it is '', universal newline mode is enabled, but line\n endings are returned to the caller untranslated. If it has any of\n the other legal values, input lines are only terminated by the given\n string, and the line ending is returned to the caller untranslated.\n\n * On output, if newline is None, any '\n' characters written are\n translated to the system default line separator, os.linesep. If\n newline is '' or '\n', no translation takes place. If newline is any\n of the other legal values, any '\n' characters written are translated\n to the given string.\n\n If closefd is False, the underlying file descriptor will be kept open\n when the file is closed. This does not work when a file name is given\n and must be True in that case.\n\n A custom opener can be used by passing a callable as *opener*. The\n underlying file descriptor for the file object is then obtained by\n calling *opener* with (*file*, *flags*). *opener* must return an open\n file descriptor (passing os.open as *opener* results in functionality\n similar to passing None).\n\n open() returns a file object whose type depends on the mode, and\n through which the standard file operations such as reading and writing\n are performed. When open() is used to open a file in a text mode ('w',\n 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open\n a file in a binary mode, the returned class varies: in read binary\n mode, it returns a BufferedReader; in write binary and append binary\n modes, it returns a BufferedWriter, and in read/write mode, it returns\n a BufferedRandom.\n\n It is also possible to use a string or bytearray as a file for both\n reading and writing. For strings StringIO can be used like a file\n opened in a text mode, and for bytes a BytesIO can be used like a file\n opened in a binary mode.\n """"""\n ...\n@overload\ndef open(\n file: FileDescriptorOrPath,\n mode: OpenBinaryModeReading,\n buffering: Literal[-1, 1] = -1,\n encoding: None = None,\n errors: None = None,\n newline: None = None,\n closefd: bool = True,\n opener: _Opener | None = None,\n) -> BufferedReader:\n r""""""\n Open file and return a stream. Raise OSError upon failure.\n\n file is either a text or byte string giving the name (and the path\n if the file isn't in the current working directory) of the file to\n be opened or an integer file descriptor of the file to be\n wrapped. (If a file descriptor is given, it is closed when the\n returned I/O object is closed, unless closefd is set to False.)\n\n mode is an optional string that specifies the mode in which the file\n is opened. It defaults to 'r' which means open for reading in text\n mode. Other common values are 'w' for writing (truncating the file if\n it already exists), 'x' for creating and writing to a new file, and\n 'a' for appending (which on some Unix systems, means that all writes\n append to the end of the file regardless of the current seek position).\n In text mode, if encoding is not specified the encoding used is platform\n dependent: locale.getencoding() is called to get the current locale encoding.\n (For reading and writing raw bytes use binary mode and leave encoding\n unspecified.) The available modes are:\n\n ========= ===============================================================\n Character Meaning\n --------- ---------------------------------------------------------------\n 'r' open for reading (default)\n 'w' open for writing, truncating the file first\n 'x' create a new file and open it for writing\n 'a' open for writing, appending to the end of the file if it exists\n 'b' binary mode\n 't' text mode (default)\n '+' open a disk file for updating (reading and writing)\n ========= ===============================================================\n\n The default mode is 'rt' (open for reading text). For binary random\n access, the mode 'w+b' opens and truncates the file to 0 bytes, while\n 'r+b' opens the file without truncation. The 'x' mode implies 'w' and\n raises an `FileExistsError` if the file already exists.\n\n Python distinguishes between files opened in binary and text modes,\n even when the underlying operating system doesn't. Files opened in\n binary mode (appending 'b' to the mode argument) return contents as\n bytes objects without any decoding. In text mode (the default, or when\n 't' is appended to the mode argument), the contents of the file are\n returned as strings, the bytes having been first decoded using a\n platform-dependent encoding or using the specified encoding if given.\n\n buffering is an optional integer used to set the buffering policy.\n Pass 0 to switch buffering off (only allowed in binary mode), 1 to select\n line buffering (only usable in text mode), and an integer > 1 to indicate\n the size of a fixed-size chunk buffer. When no buffering argument is\n given, the default buffering policy works as follows:\n\n * Binary files are buffered in fixed-size chunks; the size of the buffer\n is chosen using a heuristic trying to determine the underlying device's\n ""block size"" and falling back on `io.DEFAULT_BUFFER_SIZE`.\n On many systems, the buffer will typically be 4096 or 8192 bytes long.\n\n * ""Interactive"" text files (files for which isatty() returns True)\n use line buffering. Other text files use the policy described above\n for binary files.\n\n encoding is the name of the encoding used to decode or encode the\n file. This should only be used in text mode. The default encoding is\n platform dependent, but any encoding supported by Python can be\n passed. See the codecs module for the list of supported encodings.\n\n errors is an optional string that specifies how encoding errors are to\n be handled---this argument should not be used in binary mode. Pass\n 'strict' to raise a ValueError exception if there is an encoding error\n (the default of None has the same effect), or pass 'ignore' to ignore\n errors. (Note that ignoring encoding errors can lead to data loss.)\n See the documentation for codecs.register or run 'help(codecs.Codec)'\n for a list of the permitted encoding error strings.\n\n newline controls how universal newlines works (it only applies to text\n mode). It can be None, '', '\n', '\r', and '\r\n'. It works as\n follows:\n\n * On input, if newline is None, universal newlines mode is\n enabled. Lines in the input can end in '\n', '\r', or '\r\n', and\n these are translated into '\n' before being returned to the\n caller. If it is '', universal newline mode is enabled, but line\n endings are returned to the caller untranslated. If it has any of\n the other legal values, input lines are only terminated by the given\n string, and the line ending is returned to the caller untranslated.\n\n * On output, if newline is None, any '\n' characters written are\n translated to the system default line separator, os.linesep. If\n newline is '' or '\n', no translation takes place. If newline is any\n of the other legal values, any '\n' characters written are translated\n to the given string.\n\n If closefd is False, the underlying file descriptor will be kept open\n when the file is closed. This does not work when a file name is given\n and must be True in that case.\n\n A custom opener can be used by passing a callable as *opener*. The\n underlying file descriptor for the file object is then obtained by\n calling *opener* with (*file*, *flags*). *opener* must return an open\n file descriptor (passing os.open as *opener* results in functionality\n similar to passing None).\n\n open() returns a file object whose type depends on the mode, and\n through which the standard file operations such as reading and writing\n are performed. When open() is used to open a file in a text mode ('w',\n 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open\n a file in a binary mode, the returned class varies: in read binary\n mode, it returns a BufferedReader; in write binary and append binary\n modes, it returns a BufferedWriter, and in read/write mode, it returns\n a BufferedRandom.\n\n It is also possible to use a string or bytearray as a file for both\n reading and writing. For strings StringIO can be used like a file\n opened in a text mode, and for bytes a BytesIO can be used like a file\n opened in a binary mode.\n """"""\n ...\n\n# Buffering cannot be determined: fall back to BinaryIO\n@overload\ndef open(\n file: FileDescriptorOrPath,\n mode: OpenBinaryMode,\n buffering: int = -1,\n encoding: None = None,\n errors: None = None,\n newline: None = None,\n closefd: bool = True,\n opener: _Opener | None = None,\n) -> BinaryIO:\n r""""""\n Open file and return a stream. Raise OSError upon failure.\n\n file is either a text or byte string giving the name (and the path\n if the file isn't in the current working directory) of the file to\n be opened or an integer file descriptor of the file to be\n wrapped. (If a file descriptor is given, it is closed when the\n returned I/O object is closed, unless closefd is set to False.)\n\n mode is an optional string that specifies the mode in which the file\n is opened. It defaults to 'r' which means open for reading in text\n mode. Other common values are 'w' for writing (truncating the file if\n it already exists), 'x' for creating and writing to a new file, and\n 'a' for appending (which on some Unix systems, means that all writes\n append to the end of the file regardless of the current seek position).\n In text mode, if encoding is not specified the encoding used is platform\n dependent: locale.getencoding() is called to get the current locale encoding.\n (For reading and writing raw bytes use binary mode and leave encoding\n unspecified.) The available modes are:\n\n ========= ===============================================================\n Character Meaning\n --------- ---------------------------------------------------------------\n 'r' open for reading (default)\n 'w' open for writing, truncating the file first\n 'x' create a new file and open it for writing\n 'a' open for writing, appending to the end of the file if it exists\n 'b' binary mode\n 't' text mode (default)\n '+' open a disk file for updating (reading and writing)\n ========= ===============================================================\n\n The default mode is 'rt' (open for reading text). For binary random\n access, the mode 'w+b' opens and truncates the file to 0 bytes, while\n 'r+b' opens the file without truncation. The 'x' mode implies 'w' and\n raises an `FileExistsError` if the file already exists.\n\n Python distinguishes between files opened in binary and text modes,\n even when the underlying operating system doesn't. Files opened in\n binary mode (appending 'b' to the mode argument) return contents as\n bytes objects without any decoding. In text mode (the default, or when\n 't' is appended to the mode argument), the contents of the file are\n returned as strings, the bytes having been first decoded using a\n platform-dependent encoding or using the specified encoding if given.\n\n buffering is an optional integer used to set the buffering policy.\n Pass 0 to switch buffering off (only allowed in binary mode), 1 to select\n line buffering (only usable in text mode), and an integer > 1 to indicate\n the size of a fixed-size chunk buffer. When no buffering argument is\n given, the default buffering policy works as follows:\n\n * Binary files are buffered in fixed-size chunks; the size of the buffer\n is chosen using a heuristic trying to determine the underlying device's\n ""block size"" and falling back on `io.DEFAULT_BUFFER_SIZE`.\n On many systems, the buffer will typically be 4096 or 8192 bytes long.\n\n * ""Interactive"" text files (files for which isatty() returns True)\n use line buffering. Other text files use the policy described above\n for binary files.\n\n encoding is the name of the encoding used to decode or encode the\n file. This should only be used in text mode. The default encoding is\n platform dependent, but any encoding supported by Python can be\n passed. See the codecs module for the list of supported encodings.\n\n errors is an optional string that specifies how encoding errors are to\n be handled---this argument should not be used in binary mode. Pass\n 'strict' to raise a ValueError exception if there is an encoding error\n (the default of None has the same effect), or pass 'ignore' to ignore\n errors. (Note that ignoring encoding errors can lead to data loss.)\n See the documentation for codecs.register or run 'help(codecs.Codec)'\n for a list of the permitted encoding error strings.\n\n newline controls how universal newlines works (it only applies to text\n mode). It can be None, '', '\n', '\r', and '\r\n'. It works as\n follows:\n\n * On input, if newline is None, universal newlines mode is\n enabled. Lines in the input can end in '\n', '\r', or '\r\n', and\n these are translated into '\n' before being returned to the\n caller. If it is '', universal newline mode is enabled, but line\n endings are returned to the caller untranslated. If it has any of\n the other legal values, input lines are only terminated by the given\n string, and the line ending is returned to the caller untranslated.\n\n * On output, if newline is None, any '\n' characters written are\n translated to the system default line separator, os.linesep. If\n newline is '' or '\n', no translation takes place. If newline is any\n of the other legal values, any '\n' characters written are translated\n to the given string.\n\n If closefd is False, the underlying file descriptor will be kept open\n when the file is closed. This does not work when a file name is given\n and must be True in that case.\n\n A custom opener can be used by passing a callable as *opener*. The\n underlying file descriptor for the file object is then obtained by\n calling *opener* with (*file*, *flags*). *opener* must return an open\n file descriptor (passing os.open as *opener* results in functionality\n similar to passing None).\n\n open() returns a file object whose type depends on the mode, and\n through which the standard file operations such as reading and writing\n are performed. When open() is used to open a file in a text mode ('w',\n 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open\n a file in a binary mode, the returned class varies: in read binary\n mode, it returns a BufferedReader; in write binary and append binary\n modes, it returns a BufferedWriter, and in read/write mode, it returns\n a BufferedRandom.\n\n It is also possible to use a string or bytearray as a file for both\n reading and writing. For strings StringIO can be used like a file\n opened in a text mode, and for bytes a BytesIO can be used like a file\n opened in a binary mode.\n """"""\n ...\n\n# Fallback if mode is not specified\n@overload\ndef open(\n file: FileDescriptorOrPath,\n mode: str,\n buffering: int = -1,\n encoding: str | None = None,\n errors: str | None = None,\n newline: str | None = None,\n closefd: bool = True,\n opener: _Opener | None = None,\n) -> IO[Any]:\n r""""""\n Open file and return a stream. Raise OSError upon failure.\n\n file is either a text or byte string giving the name (and the path\n if the file isn't in the current working directory) of the file to\n be opened or an integer file descriptor of the file to be\n wrapped. (If a file descriptor is given, it is closed when the\n returned I/O object is closed, unless closefd is set to False.)\n\n mode is an optional string that specifies the mode in which the file\n is opened. It defaults to 'r' which means open for reading in text\n mode. Other common values are 'w' for writing (truncating the file if\n it already exists), 'x' for creating and writing to a new file, and\n 'a' for appending (which on some Unix systems, means that all writes\n append to the end of the file regardless of the current seek position).\n In text mode, if encoding is not specified the encoding used is platform\n dependent: locale.getencoding() is called to get the current locale encoding.\n (For reading and writing raw bytes use binary mode and leave encoding\n unspecified.) The available modes are:\n\n ========= ===============================================================\n Character Meaning\n --------- ---------------------------------------------------------------\n 'r' open for reading (default)\n 'w' open for writing, truncating the file first\n 'x' create a new file and open it for writing\n 'a' open for writing, appending to the end of the file if it exists\n 'b' binary mode\n 't' text mode (default)\n '+' open a disk file for updating (reading and writing)\n ========= ===============================================================\n\n The default mode is 'rt' (open for reading text). For binary random\n access, the mode 'w+b' opens and truncates the file to 0 bytes, while\n 'r+b' opens the file without truncation. The 'x' mode implies 'w' and\n raises an `FileExistsError` if the file already exists.\n\n Python distinguishes between files opened in binary and text modes,\n even when the underlying operating system doesn't. Files opened in\n binary mode (appending 'b' to the mode argument) return contents as\n bytes objects without any decoding. In text mode (the default, or when\n 't' is appended to the mode argument), the contents of the file are\n returned as strings, the bytes having been first decoded using a\n platform-dependent encoding or using the specified encoding if given.\n\n buffering is an optional integer used to set the buffering policy.\n Pass 0 to switch buffering off (only allowed in binary mode), 1 to select\n line buffering (only usable in text mode), and an integer > 1 to indicate\n the size of a fixed-size chunk buffer. When no buffering argument is\n given, the default buffering policy works as follows:\n\n * Binary files are buffered in fixed-size chunks; the size of the buffer\n is chosen using a heuristic trying to determine the underlying device's\n ""block size"" and falling back on `io.DEFAULT_BUFFER_SIZE`.\n On many systems, the buffer will typically be 4096 or 8192 bytes long.\n\n * ""Interactive"" text files (files for which isatty() returns True)\n use line buffering. Other text files use the policy described above\n for binary files.\n\n encoding is the name of the encoding used to decode or encode the\n file. This should only be used in text mode. The default encoding is\n platform dependent, but any encoding supported by Python can be\n passed. See the codecs module for the list of supported encodings.\n\n errors is an optional string that specifies how encoding errors are to\n be handled---this argument should not be used in binary mode. Pass\n 'strict' to raise a ValueError exception if there is an encoding error\n (the default of None has the same effect), or pass 'ignore' to ignore\n errors. (Note that ignoring encoding errors can lead to data loss.)\n See the documentation for codecs.register or run 'help(codecs.Codec)'\n for a list of the permitted encoding error strings.\n\n newline controls how universal newlines works (it only applies to text\n mode). It can be None, '', '\n', '\r', and '\r\n'. It works as\n follows:\n\n * On input, if newline is None, universal newlines mode is\n enabled. Lines in the input can end in '\n', '\r', or '\r\n', and\n these are translated into '\n' before being returned to the\n caller. If it is '', universal newline mode is enabled, but line\n endings are returned to the caller untranslated. If it has any of\n the other legal values, input lines are only terminated by the given\n string, and the line ending is returned to the caller untranslated.\n\n * On output, if newline is None, any '\n' characters written are\n translated to the system default line separator, os.linesep. If\n newline is '' or '\n', no translation takes place. If newline is any\n of the other legal values, any '\n' characters written are translated\n to the given string.\n\n If closefd is False, the underlying file descriptor will be kept open\n when the file is closed. This does not work when a file name is given\n and must be True in that case.\n\n A custom opener can be used by passing a callable as *opener*. The\n underlying file descriptor for the file object is then obtained by\n calling *opener* with (*file*, *flags*). *opener* must return an open\n file descriptor (passing os.open as *opener* results in functionality\n similar to passing None).\n\n open() returns a file object whose type depends on the mode, and\n through which the standard file operations such as reading and writing\n are performed. When open() is used to open a file in a text mode ('w',\n 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open\n a file in a binary mode, the returned class varies: in read binary\n mode, it returns a BufferedReader; in write binary and append binary\n modes, it returns a BufferedWriter, and in read/write mode, it returns\n a BufferedRandom.\n\n It is also possible to use a string or bytearray as a file for both\n reading and writing. For strings StringIO can be used like a file\n opened in a text mode, and for bytes a BytesIO can be used like a file\n opened in a binary mode.\n """"""\n ...\ndef ord(c: str | bytes | bytearray, /) -> int:\n """"""Return the Unicode code point for a one-character string.""""""\n ...\n\nclass _SupportsWriteAndFlush(SupportsWrite[_T_contra], SupportsFlush, Protocol[_T_contra]): ...\n\n@overload\ndef print(\n *values: object,\n sep: str | None = "" "",\n end: str | None = ""\n"",\n file: SupportsWrite[str] | None = None,\n flush: Literal[False] = False,\n) -> None:\n """"""\n Prints the values to a stream, or to sys.stdout by default.\n\n sep\n string inserted between values, default a space.\n end\n string appended after the last value, default a newline.\n file\n a file-like object (stream); defaults to the current sys.stdout.\n flush\n whether to forcibly flush the stream.\n """"""\n ...\n@overload\ndef print(\n *values: object, sep: str | None = "" "", end: str | None = ""\n"", file: _SupportsWriteAndFlush[str] | None = None, flush: bool\n) -> None:\n """"""\n Prints the values to a stream, or to sys.stdout by default.\n\n sep\n string inserted between values, default a space.\n end\n string appended after the last value, default a newline.\n file\n a file-like object (stream); defaults to the current sys.stdout.\n flush\n whether to forcibly flush the stream.\n """"""\n ...\n\n_E_contra = TypeVar(""_E_contra"", contravariant=True)\n_M_contra = TypeVar(""_M_contra"", contravariant=True)\n\nclass _SupportsPow2(Protocol[_E_contra, _T_co]):\n def __pow__(self, other: _E_contra, /) -> _T_co: ...\n\nclass _SupportsPow3NoneOnly(Protocol[_E_contra, _T_co]):\n def __pow__(self, other: _E_contra, modulo: None = None, /) -> _T_co: ...\n\nclass _SupportsPow3(Protocol[_E_contra, _M_contra, _T_co]):\n def __pow__(self, other: _E_contra, modulo: _M_contra, /) -> _T_co: ...\n\n_SupportsSomeKindOfPow = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed\n _SupportsPow2[Any, Any] | _SupportsPow3NoneOnly[Any, Any] | _SupportsPow3[Any, Any, Any]\n)\n\n# TODO: `pow(int, int, Literal[0])` fails at runtime,\n# but adding a `NoReturn` overload isn't a good solution for expressing that (see #8566).\n@overload\ndef pow(base: int, exp: int, mod: int) -> int:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: int, exp: Literal[0], mod: None = None) -> Literal[1]:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: int, exp: _PositiveInteger, mod: None = None) -> int:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: int, exp: _NegativeInteger, mod: None = None) -> float:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n\n# int base & positive-int exp -> int; int base & negative-int exp -> float\n# return type must be Any as `int | float` causes too many false-positive errors\n@overload\ndef pow(base: int, exp: int, mod: None = None) -> Any:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: _PositiveInteger, exp: float, mod: None = None) -> float:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: _NegativeInteger, exp: float, mod: None = None) -> complex:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: float, exp: int, mod: None = None) -> float:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n\n# float base & float exp could return float or complex\n# return type must be Any (same as complex base, complex exp),\n# as `float | complex` causes too many false-positive errors\n@overload\ndef pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> Any:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> complex:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: _SupportsPow2[_E_contra, _T_co], exp: _E_contra, mod: None = None) -> _T_co:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: _SupportsPow3NoneOnly[_E_contra, _T_co], exp: _E_contra, mod: None = None) -> _T_co:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: _SupportsPow3[_E_contra, _M_contra, _T_co], exp: _E_contra, mod: _M_contra) -> _T_co:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = None) -> Any:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n@overload\ndef pow(base: _SupportsSomeKindOfPow, exp: complex, mod: None = None) -> complex:\n """"""\n Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments\n\n Some types, such as ints, are able to use a more efficient algorithm when\n invoked using the three argument form.\n """"""\n ...\n\nquit: _sitebuiltins.Quitter\n\nclass reversed(Generic[_T]):\n """"""Return a reverse iterator over the values of the given sequence.""""""\n @overload\n def __new__(cls, sequence: Reversible[_T], /) -> Iterator[_T]: ... # type: ignore[misc]\n @overload\n def __new__(cls, sequence: SupportsLenAndGetItem[_T], /) -> Iterator[_T]: ... # type: ignore[misc]\n def __iter__(self) -> Self:\n """"""Implement iter(self).""""""\n ...\n def __next__(self) -> _T:\n """"""Implement next(self).""""""\n ...\n def __length_hint__(self) -> int:\n """"""Private method returning an estimate of len(list(it)).""""""\n ...\n\ndef repr(obj: object, /) -> str:\n """"""\n Return the canonical string representation of the object.\n\n For many object types, including most builtins, eval(repr(obj)) == obj.\n """"""\n ...\n\n# See https://github.com/python/typeshed/pull/9141\n# and https://github.com/python/typeshed/pull/9151\n# on why we don't use `SupportsRound` from `typing.pyi`\n\nclass _SupportsRound1(Protocol[_T_co]):\n def __round__(self) -> _T_co: ...\n\nclass _SupportsRound2(Protocol[_T_co]):\n def __round__(self, ndigits: int, /) -> _T_co: ...\n\n@overload\ndef round(number: _SupportsRound1[_T], ndigits: None = None) -> _T:\n """"""\n Round a number to a given precision in decimal digits.\n\n The return value is an integer if ndigits is omitted or None. Otherwise\n the return value has the same type as the number. ndigits may be negative.\n """"""\n ...\n@overload\ndef round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T:\n """"""\n Round a number to a given precision in decimal digits.\n\n The return value is an integer if ndigits is omitted or None. Otherwise\n the return value has the same type as the number. ndigits may be negative.\n """"""\n ...\n\n# See https://github.com/python/typeshed/pull/6292#discussion_r748875189\n# for why arg 3 of `setattr` should be annotated with `Any` and not `object`\ndef setattr(obj: object, name: str, value: Any, /) -> None:\n """"""\n Sets the named attribute on the given object to the specified value.\n\n setattr(x, 'y', v) is equivalent to ``x.y = v``\n """"""\n ...\n@overload\ndef sorted(\n iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, reverse: bool = False\n) -> list[SupportsRichComparisonT]:\n """"""\n Return a new list containing all items from the iterable in ascending order.\n\n A custom key function can be supplied to customize the sort order, and the\n reverse flag can be set to request the result in descending order.\n """"""\n ...\n@overload\ndef sorted(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> list[_T]:\n """"""\n Return a new list containing all items from the iterable in ascending order.\n\n A custom key function can be supplied to customize the sort order, and the\n reverse flag can be set to request the result in descending order.\n """"""\n ...\n\n_AddableT1 = TypeVar(""_AddableT1"", bound=SupportsAdd[Any, Any])\n_AddableT2 = TypeVar(""_AddableT2"", bound=SupportsAdd[Any, Any])\n\nclass _SupportsSumWithNoDefaultGiven(SupportsAdd[Any, Any], SupportsRAdd[int, Any], Protocol): ...\n\n_SupportsSumNoDefaultT = TypeVar(""_SupportsSumNoDefaultT"", bound=_SupportsSumWithNoDefaultGiven)\n\n# In general, the return type of `x + x` is *not* guaranteed to be the same type as x.\n# However, we can't express that in the stub for `sum()`\n# without creating many false-positive errors (see #7578).\n# Instead, we special-case the most common examples of this: bool and literal integers.\n@overload\ndef sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int:\n """"""\n Return the sum of a 'start' value (default: 0) plus an iterable of numbers\n\n When the iterable is empty, return the start value.\n This function is intended specifically for use with numeric values and may\n reject non-numeric types.\n """"""\n ...\n@overload\ndef sum(iterable: Iterable[_SupportsSumNoDefaultT], /) -> _SupportsSumNoDefaultT | Literal[0]:\n """"""\n Return the sum of a 'start' value (default: 0) plus an iterable of numbers\n\n When the iterable is empty, return the start value.\n This function is intended specifically for use with numeric values and may\n reject non-numeric types.\n """"""\n ...\n@overload\ndef sum(iterable: Iterable[_AddableT1], /, start: _AddableT2) -> _AddableT1 | _AddableT2:\n """"""\n Return the sum of a 'start' value (default: 0) plus an iterable of numbers\n\n When the iterable is empty, return the start value.\n This function is intended specifically for use with numeric values and may\n reject non-numeric types.\n """"""\n ...\n\n# The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object`\n# (A ""SupportsDunderDict"" protocol doesn't work)\n@overload\ndef vars(object: type, /) -> types.MappingProxyType[str, Any]:\n """"""\n Show vars.\n\n Without arguments, equivalent to locals().\n With an argument, equivalent to object.__dict__.\n """"""\n ...\n@overload\ndef vars(object: Any = ..., /) -> dict[str, Any]:\n """"""\n Show vars.\n\n Without arguments, equivalent to locals().\n With an argument, equivalent to object.__dict__.\n """"""\n ...\n\nclass zip(Generic[_T_co]):\n """"""\n zip(*iterables, strict=False) --> Yield tuples until an input is exhausted.\n\n >>> list(zip('abcdefg', range(3), range(4)))\n [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)]\n\n The zip object yields n-length tuples, where n is the number of iterables\n passed as positional arguments to zip(). The i-th element in every tuple\n comes from the i-th iterable argument to zip(). This continues until the\n shortest argument is exhausted.\n\n If strict is true and one of the arguments is exhausted before the others,\n raise a ValueError.\n """"""\n if sys.version_info >= (3, 10):\n @overload\n def __new__(cls, *, strict: bool = ...) -> zip[Any]: ...\n @overload\n def __new__(cls, iter1: Iterable[_T1], /, *, strict: bool = ...) -> zip[tuple[_T1]]: ...\n @overload\n def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], /, *, strict: bool = ...) -> zip[tuple[_T1, _T2]]: ...\n @overload\n def __new__(\n cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], /, *, strict: bool = ...\n ) -> zip[tuple[_T1, _T2, _T3]]: ...\n @overload\n def __new__(\n cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], /, *, strict: bool = ...\n ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ...\n @overload\n def __new__(\n cls,\n iter1: Iterable[_T1],\n iter2: Iterable[_T2],\n iter3: Iterable[_T3],\n iter4: Iterable[_T4],\n iter5: Iterable[_T5],\n /,\n *,\n strict: bool = ...,\n ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ...\n @overload\n def __new__(\n cls,\n iter1: Iterable[Any],\n iter2: Iterable[Any],\n iter3: Iterable[Any],\n iter4: Iterable[Any],\n iter5: Iterable[Any],\n iter6: Iterable[Any],\n /,\n *iterables: Iterable[Any],\n strict: bool = ...,\n ) -> zip[tuple[Any, ...]]: ...\n else:\n @overload\n def __new__(cls) -> zip[Any]: ...\n @overload\n def __new__(cls, iter1: Iterable[_T1], /) -> zip[tuple[_T1]]: ...\n @overload\n def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], /) -> zip[tuple[_T1, _T2]]: ...\n @overload\n def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], /) -> zip[tuple[_T1, _T2, _T3]]: ...\n @overload\n def __new__(\n cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], /\n ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ...\n @overload\n def __new__(\n cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], iter5: Iterable[_T5], /\n ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ...\n @overload\n def __new__(\n cls,\n iter1: Iterable[Any],\n iter2: Iterable[Any],\n iter3: Iterable[Any],\n iter4: Iterable[Any],\n iter5: Iterable[Any],\n iter6: Iterable[Any],\n /,\n *iterables: Iterable[Any],\n ) -> zip[tuple[Any, ...]]: ...\n\n def __iter__(self) -> Self:\n """"""Implement iter(self).""""""\n ...\n def __next__(self) -> _T_co:\n """"""Implement next(self).""""""\n ...\n\n# Signature of `builtins.__import__` should be kept identical to `importlib.__import__`\n# Return type of `__import__` should be kept the same as return type of `importlib.import_module`\ndef __import__(\n name: str,\n globals: Mapping[str, object] | None = None,\n locals: Mapping[str, object] | None = None,\n fromlist: Sequence[str] = (),\n level: int = 0,\n) -> types.ModuleType:\n """"""\n Import a module.\n\n Because this function is meant for use by the Python\n interpreter and not for general use, it is better to use\n importlib.import_module() to programmatically import a module.\n\n The globals argument is only used to determine the context;\n they are not modified. The locals argument is unused. The fromlist\n should be a list of names to emulate ``from name import ...``, or an\n empty list to emulate ``import name``.\n When importing a module from a package, note that __import__('A.B', ...)\n returns package A when fromlist is empty, but its submodule B when\n fromlist is not empty. The level argument is used to determine whether to\n perform absolute or relative imports: 0 is absolute, while a positive number\n is the number of parent directories to search relative to the current module.\n """"""\n ...\ndef __build_class__(func: Callable[[], CellType | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any:\n """"""\n __build_class__(func, name, /, *bases, [metaclass], **kwds) -> class\n\n Internal helper function used by the class statement.\n """"""\n ...\n\nif sys.version_info >= (3, 10):\n from types import EllipsisType\n\n # Backwards compatibility hack for folks who relied on the ellipsis type\n # existing in typeshed in Python 3.9 and earlier.\n ellipsis = EllipsisType\n\n Ellipsis: EllipsisType\n\nelse:\n # Actually the type of Ellipsis is , but since it's\n # not exposed anywhere under that name, we make it private here.\n @final\n @type_check_only\n class ellipsis: ...\n\n Ellipsis: ellipsis\n\nclass BaseException:\n """"""Common base class for all exceptions""""""\n args: tuple[Any, ...]\n __cause__: BaseException | None\n __context__: BaseException | None\n __suppress_context__: bool\n __traceback__: TracebackType | None\n def __init__(self, *args: object) -> None: ...\n def __new__(cls, *args: Any, **kwds: Any) -> Self: ...\n def __setstate__(self, state: dict[str, Any] | None, /) -> None: ...\n def with_traceback(self, tb: TracebackType | None, /) -> Self:\n """"""\n Exception.with_traceback(tb) --\n set self.__traceback__ to tb and return self.\n """"""\n ...\n if sys.version_info >= (3, 11):\n # only present after add_note() is called\n __notes__: list[str]\n def add_note(self, note: str, /) -> None:\n """"""\n Exception.add_note(note) --\n add a note to the exception\n """"""\n ...\n\nclass GeneratorExit(BaseException):\n """"""Request that a generator exit.""""""\n ...\nclass KeyboardInterrupt(BaseException):\n """"""Program interrupted by user.""""""\n ...\n\nclass SystemExit(BaseException):\n """"""Request to exit from the interpreter.""""""\n code: sys._ExitCode\n\nclass Exception(BaseException):\n """"""Common base class for all non-exit exceptions.""""""\n ...\n\nclass StopIteration(Exception):\n """"""Signal the end from iterator.__next__().""""""\n value: Any\n\nclass OSError(Exception):\n """"""Base class for I/O related errors.""""""\n errno: int | None\n strerror: str | None\n # filename, filename2 are actually str | bytes | None\n filename: Any\n filename2: Any\n if sys.platform == ""win32"":\n winerror: int\n\nEnvironmentError = OSError\nIOError = OSError\nif sys.platform == ""win32"":\n WindowsError = OSError\n\nclass ArithmeticError(Exception):\n """"""Base class for arithmetic errors.""""""\n ...\nclass AssertionError(Exception):\n """"""Assertion failed.""""""\n ...\n\nclass AttributeError(Exception):\n """"""Attribute not found.""""""\n if sys.version_info >= (3, 10):\n def __init__(self, *args: object, name: str | None = ..., obj: object = ...) -> None: ...\n name: str\n obj: object\n\nclass BufferError(Exception):\n """"""Buffer error.""""""\n ...\nclass EOFError(Exception):\n """"""Read beyond end of file.""""""\n ...\n\nclass ImportError(Exception):\n """"""Import can't find module, or can't find name in module.""""""\n def __init__(self, *args: object, name: str | None = ..., path: str | None = ...) -> None: ...\n name: str | None\n path: str | None\n msg: str # undocumented\n if sys.version_info >= (3, 12):\n name_from: str | None # undocumented\n\nclass LookupError(Exception):\n """"""Base class for lookup errors.""""""\n ...\nclass MemoryError(Exception):\n """"""Out of memory.""""""\n ...\n\nclass NameError(Exception):\n """"""Name not found globally.""""""\n if sys.version_info >= (3, 10):\n def __init__(self, *args: object, name: str | None = ...) -> None: ...\n name: str\n\nclass ReferenceError(Exception):\n """"""Weak ref proxy used after referent went away.""""""\n ...\nclass RuntimeError(Exception):\n """"""Unspecified run-time error.""""""\n ...\nclass StopAsyncIteration(Exception):\n """"""Signal the end from iterator.__anext__().""""""\n ...\n\nclass SyntaxError(Exception):\n """"""Invalid syntax.""""""\n msg: str\n filename: str | None\n lineno: int | None\n offset: int | None\n text: str | None\n # Errors are displayed differently if this attribute exists on the exception.\n # The value is always None.\n print_file_and_line: None\n if sys.version_info >= (3, 10):\n end_lineno: int | None\n end_offset: int | None\n\n @overload\n def __init__(self) -> None: ...\n @overload\n def __init__(self, msg: object, /) -> None: ...\n # Second argument is the tuple (filename, lineno, offset, text)\n @overload\n def __init__(self, msg: str, info: tuple[str | None, int | None, int | None, str | None], /) -> None: ...\n if sys.version_info >= (3, 10):\n # end_lineno and end_offset must both be provided if one is.\n @overload\n def __init__(\n self, msg: str, info: tuple[str | None, int | None, int | None, str | None, int | None, int | None], /\n ) -> None: ...\n # If you provide more than two arguments, it still creates the SyntaxError, but\n # the arguments from the info tuple are not parsed. This form is omitted.\n\nclass SystemError(Exception):\n """"""\n Internal error in the Python interpreter.\n\n Please report this to the Python maintainer, along with the traceback,\n the Python version, and the hardware/OS platform and version.\n """"""\n ...\nclass TypeError(Exception):\n """"""Inappropriate argument type.""""""\n ...\nclass ValueError(Exception):\n """"""Inappropriate argument value (of correct type).""""""\n ...\nclass FloatingPointError(ArithmeticError):\n """"""Floating point operation failed.""""""\n ...\nclass OverflowError(ArithmeticError):\n """"""Result too large to be represented.""""""\n ...\nclass ZeroDivisionError(ArithmeticError):\n """"""Second argument to a division or modulo operation was zero.""""""\n ...\nclass ModuleNotFoundError(ImportError):\n """"""Module not found.""""""\n ...\nclass IndexError(LookupError):\n """"""Sequence index out of range.""""""\n ...\nclass KeyError(LookupError):\n """"""Mapping key not found.""""""\n ...\nclass UnboundLocalError(NameError):\n """"""Local name referenced but not bound to a value.""""""\n ...\n\nclass BlockingIOError(OSError):\n """"""I/O operation would block.""""""\n characters_written: int\n\nclass ChildProcessError(OSError):\n """"""Child process error.""""""\n ...\nclass ConnectionError(OSError):\n """"""Connection error.""""""\n ...\nclass BrokenPipeError(ConnectionError):\n """"""Broken pipe.""""""\n ...\nclass ConnectionAbortedError(ConnectionError):\n """"""Connection aborted.""""""\n ...\nclass ConnectionRefusedError(ConnectionError):\n """"""Connection refused.""""""\n ...\nclass ConnectionResetError(ConnectionError):\n """"""Connection reset.""""""\n ...\nclass FileExistsError(OSError):\n """"""File already exists.""""""\n ...\nclass FileNotFoundError(OSError):\n """"""File not found.""""""\n ...\nclass InterruptedError(OSError):\n """"""Interrupted by signal.""""""\n ...\nclass IsADirectoryError(OSError):\n """"""Operation doesn't work on directories.""""""\n ...\nclass NotADirectoryError(OSError):\n """"""Operation only works on directories.""""""\n ...\nclass PermissionError(OSError):\n """"""Not enough permissions.""""""\n ...\nclass ProcessLookupError(OSError):\n """"""Process not found.""""""\n ...\nclass TimeoutError(OSError):\n """"""Timeout expired.""""""\n ...\nclass NotImplementedError(RuntimeError):\n """"""Method or function hasn't been implemented yet.""""""\n ...\nclass RecursionError(RuntimeError):\n """"""Recursion limit exceeded.""""""\n ...\nclass IndentationError(SyntaxError):\n """"""Improper indentation.""""""\n ...\nclass TabError(IndentationError):\n """"""Improper mixture of spaces and tabs.""""""\n ...\nclass UnicodeError(ValueError):\n """"""Unicode related error.""""""\n ...\n\nclass UnicodeDecodeError(UnicodeError):\n """"""Unicode decoding error.""""""\n encoding: str\n object: bytes\n start: int\n end: int\n reason: str\n def __init__(self, encoding: str, object: ReadableBuffer, start: int, end: int, reason: str, /) -> None: ...\n\nclass UnicodeEncodeError(UnicodeError):\n """"""Unicode encoding error.""""""\n encoding: str\n object: str\n start: int\n end: int\n reason: str\n def __init__(self, encoding: str, object: str, start: int, end: int, reason: str, /) -> None: ...\n\nclass UnicodeTranslateError(UnicodeError):\n """"""Unicode translation error.""""""\n encoding: None\n object: str\n start: int\n end: int\n reason: str\n def __init__(self, object: str, start: int, end: int, reason: str, /) -> None: ...\n\nclass Warning(Exception):\n """"""Base class for warning categories.""""""\n ...\nclass UserWarning(Warning):\n """"""Base class for warnings generated by user code.""""""\n ...\nclass DeprecationWarning(Warning):\n """"""Base class for warnings about deprecated features.""""""\n ...\nclass SyntaxWarning(Warning):\n """"""Base class for warnings about dubious syntax.""""""\n ...\nclass RuntimeWarning(Warning):\n """"""Base class for warnings about dubious runtime behavior.""""""\n ...\nclass FutureWarning(Warning):\n """"""\n Base class for warnings about constructs that will change semantically\n in the future.\n """"""\n ...\nclass PendingDeprecationWarning(Warning):\n """"""\n Base class for warnings about features which will be deprecated\n in the future.\n """"""\n ...\nclass ImportWarning(Warning):\n """"""Base class for warnings about probable mistakes in module imports""""""\n ...\nclass UnicodeWarning(Warning):\n """"""\n Base class for warnings about Unicode related problems, mostly\n related to conversion problems.\n """"""\n ...\nclass BytesWarning(Warning):\n """"""\n Base class for warnings about bytes and buffer related problems, mostly\n related to conversion from str or comparing to str.\n """"""\n ...\nclass ResourceWarning(Warning):\n """"""Base class for warnings about resource usage.""""""\n ...\n\nif sys.version_info >= (3, 10):\n class EncodingWarning(Warning):\n """"""Base class for warnings about encodings.""""""\n ...\n\nif sys.version_info >= (3, 11):\n _BaseExceptionT_co = TypeVar(""_BaseExceptionT_co"", bound=BaseException, covariant=True, default=BaseException)\n _BaseExceptionT = TypeVar(""_BaseExceptionT"", bound=BaseException)\n _ExceptionT_co = TypeVar(""_ExceptionT_co"", bound=Exception, covariant=True, default=Exception)\n _ExceptionT = TypeVar(""_ExceptionT"", bound=Exception)\n\n # See `check_exception_group.py` for use-cases and comments.\n class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]):\n """"""A combination of multiple unrelated exceptions.""""""\n def __new__(cls, message: str, exceptions: Sequence[_BaseExceptionT_co], /) -> Self: ...\n def __init__(self, message: str, exceptions: Sequence[_BaseExceptionT_co], /) -> None: ...\n @property\n def message(self) -> str:\n """"""exception message""""""\n ...\n @property\n def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]:\n """"""nested exceptions""""""\n ...\n @overload\n def subgroup(\n self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], /\n ) -> ExceptionGroup[_ExceptionT] | None: ...\n @overload\n def subgroup(\n self, matcher_value: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...], /\n ) -> BaseExceptionGroup[_BaseExceptionT] | None: ...\n @overload\n def subgroup(\n self, matcher_value: Callable[[_BaseExceptionT_co | Self], bool], /\n ) -> BaseExceptionGroup[_BaseExceptionT_co] | None: ...\n @overload\n def split(\n self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], /\n ) -> tuple[ExceptionGroup[_ExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ...\n @overload\n def split(\n self, matcher_value: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...], /\n ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ...\n @overload\n def split(\n self, matcher_value: Callable[[_BaseExceptionT_co | Self], bool], /\n ) -> tuple[BaseExceptionGroup[_BaseExceptionT_co] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ...\n # In reality it is `NonEmptySequence`:\n @overload\n def derive(self, excs: Sequence[_ExceptionT], /) -> ExceptionGroup[_ExceptionT]: ...\n @overload\n def derive(self, excs: Sequence[_BaseExceptionT], /) -> BaseExceptionGroup[_BaseExceptionT]: ...\n def __class_getitem__(cls, item: Any, /) -> GenericAlias:\n """"""See PEP 585""""""\n ...\n\n class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception):\n def __new__(cls, message: str, exceptions: Sequence[_ExceptionT_co], /) -> Self: ...\n def __init__(self, message: str, exceptions: Sequence[_ExceptionT_co], /) -> None: ...\n @property\n def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]:\n """"""nested exceptions""""""\n ...\n # We accept a narrower type, but that's OK.\n @overload # type: ignore[override]\n def subgroup(\n self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], /\n ) -> ExceptionGroup[_ExceptionT] | None: ...\n @overload\n def subgroup(\n self, matcher_value: Callable[[_ExceptionT_co | Self], bool], /\n ) -> ExceptionGroup[_ExceptionT_co] | None: ...\n @overload # type: ignore[override]\n def split(\n self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], /\n ) -> tuple[ExceptionGroup[_ExceptionT] | None, ExceptionGroup[_ExceptionT_co] | None]: ...\n @overload\n def split(\n self, matcher_value: Callable[[_ExceptionT_co | Self], bool], /\n ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ...\n\nif sys.version_info >= (3, 13):\n class PythonFinalizationError(RuntimeError): ...\n",python,tab +61,115464,"/fast/home/franz.srambical/.cursor-server/extensions/anysphere.cursorpyright-1.0.9/dist/typeshed-fallback/stdlib/builtins.pyi",117331,0,"",python,selection_command +62,116204,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +63,116486,"input_pipeline/generate_atari_dataset.py",19289,0,"",python,selection_command +64,116774,"input_pipeline/generate_atari_dataset.py",18301,0,"",python,selection_command +65,117215,"TERMINAL",0,0,"",,terminal_command +66,126661,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +67,127384,"input_pipeline/generate_coinrun_dataset.py",1945,0,"",python,selection_command +68,127550,"input_pipeline/generate_coinrun_dataset.py",1946,0,"",python,selection_command +69,127733,"input_pipeline/generate_coinrun_dataset.py",1952,0,"",python,selection_command +70,127901,"input_pipeline/generate_coinrun_dataset.py",1953,0,"",python,selection_command +71,130861,"input_pipeline/generate_coinrun_dataset.py",1952,0,"",python,selection_command +72,131002,"input_pipeline/generate_coinrun_dataset.py",1946,0,"",python,selection_command +73,131157,"input_pipeline/generate_coinrun_dataset.py",1945,0,"",python,selection_command +74,131530,"input_pipeline/generate_coinrun_dataset.py",1927,0,"",python,selection_command +75,135627,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +76,146583,"input_pipeline/generate_atari_dataset.py",18970,0,"",python,selection_command +77,146899,"input_pipeline/generate_atari_dataset.py",19289,0,"",python,selection_command +78,147142,"input_pipeline/generate_atari_dataset.py",1852,0,"",python,selection_command +79,147689,"input_pipeline/generate_atari_dataset.py",19289,0,"",python,selection_command +80,148305,"input_pipeline/generate_atari_dataset.py",19295,0,"",python,selection_command +81,148458,"input_pipeline/generate_atari_dataset.py",19296,0,"",python,selection_command +82,148648,"input_pipeline/generate_atari_dataset.py",19305,0,"",python,selection_command +83,148779,"input_pipeline/generate_atari_dataset.py",19306,0,"",python,selection_command +84,148945,"input_pipeline/generate_atari_dataset.py",19313,0,"",python,selection_command +85,149594,"input_pipeline/generate_atari_dataset.py",19315,0,"",python,selection_command +86,149907,"input_pipeline/generate_atari_dataset.py",19314,0,"",python,selection_command +87,150028,"input_pipeline/generate_atari_dataset.py",19314,1,"[",python,selection_command +88,150102,"input_pipeline/generate_atari_dataset.py",19314,5,"[None",python,selection_command +89,150308,"input_pipeline/generate_atari_dataset.py",19314,6,"[None,",python,selection_command +90,150460,"input_pipeline/generate_atari_dataset.py",19314,12,"[None, ...])",python,selection_command +91,150806,"input_pipeline/generate_atari_dataset.py",19314,11,"[None, ...]",python,selection_command +92,150969,"input_pipeline/generate_atari_dataset.py",19314,11,"",python,content +93,151286,"input_pipeline/generate_atari_dataset.py",19399,0,"",python,selection_command +94,151896,"input_pipeline/generate_atari_dataset.py",19398,0,"",python,selection_command +95,152252,"input_pipeline/generate_atari_dataset.py",19397,0,"",python,selection_command +96,152668,"input_pipeline/generate_atari_dataset.py",19397,11,"",python,content +97,160920,"input_pipeline/generate_atari_dataset.py",19449,0,"",python,selection_command +98,161110,"input_pipeline/generate_atari_dataset.py",19451,0,"",python,selection_command +99,161147,"input_pipeline/generate_atari_dataset.py",19498,0,"",python,selection_command +100,161184,"input_pipeline/generate_atari_dataset.py",19581,0,"",python,selection_command +101,161213,"input_pipeline/generate_atari_dataset.py",19635,0,"",python,selection_command +102,161351,"input_pipeline/generate_atari_dataset.py",19718,0,"",python,selection_command +103,161509,"input_pipeline/generate_atari_dataset.py",19804,0,"",python,selection_command +104,161664,"input_pipeline/generate_atari_dataset.py",19867,0,"",python,selection_command +105,161957,"input_pipeline/generate_atari_dataset.py",19868,0,"\n ",python,content +106,162365,"input_pipeline/generate_atari_dataset.py",19897,0,"obs_chunks_data = [seq.astype(np.uint8) for seq in episode_obs_chunks]\nact_chunks_data = [act for act in episode_act_chunks]",python,content +107,162740,"input_pipeline/generate_atari_dataset.py",20020,0,"",python,selection_command +108,164337,"input_pipeline/generate_atari_dataset.py",19968,0,"",python,selection_command +109,165464,"input_pipeline/generate_atari_dataset.py",19968,0," ",python,content +110,165903,"input_pipeline/generate_atari_dataset.py",19972,0," ",python,content +111,166092,"input_pipeline/generate_atari_dataset.py",19976,0," ",python,content +112,166294,"input_pipeline/generate_atari_dataset.py",19980,0," ",python,content +113,166440,"input_pipeline/generate_atari_dataset.py",19984,0," ",python,content +114,166617,"input_pipeline/generate_atari_dataset.py",19988,0," ",python,content +115,166813,"input_pipeline/generate_atari_dataset.py",19992,0," ",python,content +116,167751,"input_pipeline/generate_atari_dataset.py",19995,0,"",python,selection_command +117,170657,"input_pipeline/generate_atari_dataset.py",19968,28,"",python,content +118,170659,"input_pipeline/generate_atari_dataset.py",19996,0,"",python,selection_command +119,170974,"input_pipeline/generate_atari_dataset.py",19869,153,"",python,content +120,170987,"input_pipeline/generate_atari_dataset.py",19867,0,"",python,selection_command +121,178453,"input_pipeline/generate_atari_dataset.py",19804,0,"",python,selection_command +122,178536,"input_pipeline/generate_atari_dataset.py",19698,0,"",python,selection_command +123,178981,"input_pipeline/generate_atari_dataset.py",19635,0,"",python,selection_command +124,179023,"input_pipeline/generate_atari_dataset.py",19561,0,"",python,selection_command +125,179188,"input_pipeline/generate_atari_dataset.py",19498,0,"",python,selection_command +126,187549,"input_pipeline/generate_atari_dataset.py",19452,47," obs_chunks_data = [",python,selection_command +127,187811,"input_pipeline/generate_atari_dataset.py",19452,154," obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8) for seq in episode_obs_chunks",python,selection_command +128,187853,"input_pipeline/generate_atari_dataset.py",19452,184," obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8) for seq in episode_obs_chunks\n ]",python,selection_command +129,187980,"input_pipeline/generate_atari_dataset.py",19452,290," obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8) for seq in episode_obs_chunks\n ]\n act_chunks_data = [np.concatenate(act, axis=0) for act in episode_act_chunks]",python,selection_command +130,188571,"input_pipeline/generate_atari_dataset.py",19452,290,"obs_chunks_data = [seq.astype(np.uint8) for seq in episode_obs_chunks]\nact_chunks_data = [act for act in episode_act_chunks]",python,content +131,189084,"input_pipeline/generate_atari_dataset.py",19686,0,"",python,selection_command +132,189879,"input_pipeline/generate_atari_dataset.py",19623,0,"",python,selection_command +133,190041,"input_pipeline/generate_atari_dataset.py",19569,0,"",python,selection_command +134,190172,"input_pipeline/generate_atari_dataset.py",19498,0,"",python,selection_command +135,191293,"input_pipeline/generate_atari_dataset.py",19500,0,"",python,selection_command +136,193834,"input_pipeline/generate_atari_dataset.py",19452,0,"",python,selection_command +137,194974,"input_pipeline/generate_atari_dataset.py",19452,0," ",python,content +138,209812,"input_pipeline/generate_atari_dataset.py",19456,0," ",python,content +139,209815,"input_pipeline/generate_atari_dataset.py",19460,0," ",python,content +140,209818,"input_pipeline/generate_atari_dataset.py",19464,0," ",python,content +141,209820,"input_pipeline/generate_atari_dataset.py",19468,0," ",python,content +142,210279,"input_pipeline/generate_atari_dataset.py",19472,0," ",python,content +143,210285,"input_pipeline/generate_atari_dataset.py",19476,0," ",python,content +144,210297,"input_pipeline/generate_atari_dataset.py",19479,0,"",python,selection_command +145,210298,"input_pipeline/generate_atari_dataset.py",19578,0,"",python,selection_command +146,210299,"input_pipeline/generate_atari_dataset.py",19551,0,"",python,selection_command +147,210300,"input_pipeline/generate_atari_dataset.py",19551,0," ",python,content +148,210303,"input_pipeline/generate_atari_dataset.py",19555,0," ",python,content +149,210305,"input_pipeline/generate_atari_dataset.py",19559,0," ",python,content +150,210308,"input_pipeline/generate_atari_dataset.py",19563,0," ",python,content +151,210312,"input_pipeline/generate_atari_dataset.py",19567,0," ",python,content +152,210314,"input_pipeline/generate_atari_dataset.py",19571,0," ",python,content +153,210317,"input_pipeline/generate_atari_dataset.py",19575,0," ",python,content +154,210319,"input_pipeline/generate_atari_dataset.py",19579,0," ",python,content +155,210323,"input_pipeline/generate_atari_dataset.py",19551,32," ",python,content +156,210326,"input_pipeline/generate_atari_dataset.py",19578,0,"",python,selection_command +157,214542,"input_pipeline/generate_atari_dataset.py",19479,0,"",python,selection_command +158,215257,"input_pipeline/generate_atari_dataset.py",19480,0,"",python,selection_command +159,363603,"input_pipeline/generate_atari_dataset.py",20003,0,"",python,selection_command +160,363958,"input_pipeline/generate_atari_dataset.py",20236,0,"",python,selection_command +161,364172,"input_pipeline/generate_atari_dataset.py",20321,0,"",python,selection_command +162,364531,"input_pipeline/generate_atari_dataset.py",20413,0,"",python,selection_command +163,364597,"input_pipeline/generate_atari_dataset.py",20493,0,"",python,selection_command +164,364775,"input_pipeline/generate_atari_dataset.py",20525,0,"",python,selection_command +165,365200,"input_pipeline/generate_atari_dataset.py",20752,0,"",python,selection_command +166,365758,"input_pipeline/generate_atari_dataset.py",21120,0,"",python,selection_command +167,366271,"input_pipeline/generate_atari_dataset.py",21202,0,"",python,selection_command +168,366474,"input_pipeline/generate_atari_dataset.py",21261,0,"",python,selection_command +169,366481,"input_pipeline/generate_atari_dataset.py",21293,0,"",python,selection_command +170,366679,"input_pipeline/generate_atari_dataset.py",21390,0,"",python,selection_command +171,368038,"input_pipeline/generate_atari_dataset.py",21293,0,"",python,selection_command +172,368112,"input_pipeline/generate_atari_dataset.py",21261,0,"",python,selection_command +173,368963,"input_pipeline/generate_atari_dataset.py",21202,0,"",python,selection_command +174,369244,"input_pipeline/generate_atari_dataset.py",21166,58," current_split_idx += 1",python,selection_command +175,478785,"input_pipeline/generate_atari_dataset.py",21202,0,"",python,selection_command +176,479022,"input_pipeline/generate_atari_dataset.py",0,0,"",python,selection_command +177,483575,"input_pipeline/generate_atari_dataset.py",3743,0,"",python,selection_command +178,488689,"input_pipeline/generate_atari_dataset.py",28554,0,"",python,selection_command +179,490179,"input_pipeline/generate_atari_dataset.py",28550,0,"",python,selection_command +180,490424,"input_pipeline/generate_atari_dataset.py",28468,0,"",python,selection_command +181,490490,"input_pipeline/generate_atari_dataset.py",28464,0,"",python,selection_command +182,490491,"input_pipeline/generate_atari_dataset.py",28463,0,"",python,selection_command +183,490518,"input_pipeline/generate_atari_dataset.py",28462,0,"",python,selection_command +184,490543,"input_pipeline/generate_atari_dataset.py",28416,0,"",python,selection_command +185,490582,"input_pipeline/generate_atari_dataset.py",28414,0,"",python,selection_command +186,490610,"input_pipeline/generate_atari_dataset.py",28396,0,"",python,selection_command +187,490650,"input_pipeline/generate_atari_dataset.py",28377,0,"",python,selection_command +188,490698,"input_pipeline/generate_atari_dataset.py",28359,0,"",python,selection_command +189,490734,"input_pipeline/generate_atari_dataset.py",28338,0,"",python,selection_command +190,490750,"input_pipeline/generate_atari_dataset.py",28319,0,"",python,selection_command +191,490793,"input_pipeline/generate_atari_dataset.py",28276,0,"",python,selection_command +192,490817,"input_pipeline/generate_atari_dataset.py",28266,0,"",python,selection_command +193,490920,"input_pipeline/generate_atari_dataset.py",27009,0,"",python,selection_keyboard +194,492249,"input_pipeline/generate_atari_dataset.py",0,0,"",python,selection_command +195,494673,"input_pipeline/generate_atari_dataset.py",207,0,"",python,selection_command +196,495085,"input_pipeline/generate_atari_dataset.py",28554,0,"",python,selection_command +197,495731,"input_pipeline/generate_atari_dataset.py",27755,0,"",python,selection_keyboard +198,500499,"input_pipeline/generate_atari_dataset.py",26326,0,"",python,selection_keyboard +199,501672,"input_pipeline/generate_atari_dataset.py",24954,0,"",python,selection_keyboard +200,502012,"input_pipeline/generate_atari_dataset.py",23263,0,"",python,selection_keyboard +201,502245,"input_pipeline/generate_atari_dataset.py",21691,0,"",python,selection_keyboard +202,502408,"input_pipeline/generate_atari_dataset.py",19815,0,"",python,selection_keyboard +203,502631,"input_pipeline/generate_atari_dataset.py",17875,0,"",python,selection_keyboard +204,502727,"input_pipeline/generate_atari_dataset.py",16485,0,"",python,selection_keyboard +205,502874,"input_pipeline/generate_atari_dataset.py",15408,0,"",python,selection_keyboard +206,503486,"input_pipeline/generate_atari_dataset.py",14102,0,"",python,selection_keyboard +207,503713,"input_pipeline/generate_atari_dataset.py",12916,0,"",python,selection_keyboard +208,504026,"input_pipeline/generate_atari_dataset.py",11734,0,"",python,selection_keyboard +209,504553,"input_pipeline/generate_atari_dataset.py",12916,0,"",python,selection_keyboard +210,505036,"input_pipeline/generate_atari_dataset.py",14102,0,"",python,selection_keyboard +211,506012,"input_pipeline/generate_atari_dataset.py",28554,0,"",python,selection_command +212,506472,"input_pipeline/generate_atari_dataset.py",27755,0,"",python,selection_keyboard +213,507551,"input_pipeline/generate_atari_dataset.py",27856,0,"",python,selection_command +214,507705,"input_pipeline/generate_atari_dataset.py",27874,0,"",python,selection_command +215,507844,"input_pipeline/generate_atari_dataset.py",27875,0,"",python,selection_command +216,507979,"input_pipeline/generate_atari_dataset.py",27919,0,"",python,selection_command +217,508152,"input_pipeline/generate_atari_dataset.py",27954,0,"",python,selection_command +218,508672,"input_pipeline/generate_atari_dataset.py",27954,3,"""""""",python,selection_command +219,509302,"input_pipeline/generate_atari_dataset.py",27954,600,"""""""\nGenerates a dataset of random-action Atari episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\nReplicates the behavior of generate_coinrun_dataset.py but for Atari.\n""""""\n\nfrom dataclasses import dataclass\n\nimport gymnasium as gym\nimport numpy as np\nimport tyro\nimport json\nimport os\nfrom cleanrl_utils.atari_wrappers import (\n ClipRewardEnv,\n EpisodicLifeEnv,\n FireResetEnv,\n MaxAndSkipEnv,\n NoopResetEnv,\n)\nfrom utils import save_chunks # type: ignore\n\n\n""""""\nOld dataset-only generator removed in favor of integrated Rainbow + capture mode.\n""""""\n",python,selection_command +220,591522,"input_pipeline/generate_atari_dataset.py",27953,601,"",python,content +221,591534,"input_pipeline/generate_atari_dataset.py",27931,0,"",python,selection_command +222,592452,"input_pipeline/generate_atari_dataset.py",27953,0,"\n""""""\nGenerates a dataset of random-action Atari episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\nReplicates the behavior of generate_coinrun_dataset.py but for Atari.\n""""""\n\nfrom dataclasses import dataclass\n\nimport gymnasium as gym\nimport numpy as np\nimport tyro\nimport json\nimport os\nfrom cleanrl_utils.atari_wrappers import (\n ClipRewardEnv,\n EpisodicLifeEnv,\n FireResetEnv,\n MaxAndSkipEnv,\n NoopResetEnv,\n)\nfrom utils import save_chunks # type: ignore\n\n\n""""""\nOld dataset-only generator removed in favor of integrated Rainbow + capture mode.\n""""""\n",python,content +223,592456,"input_pipeline/generate_atari_dataset.py",27954,0,"",python,selection_command +224,593224,"input_pipeline/generate_atari_dataset.py",27954,3,"""""""",python,selection_command +225,593644,"input_pipeline/generate_atari_dataset.py",27954,600,"""""""\nGenerates a dataset of random-action Atari episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\nReplicates the behavior of generate_coinrun_dataset.py but for Atari.\n""""""\n\nfrom dataclasses import dataclass\n\nimport gymnasium as gym\nimport numpy as np\nimport tyro\nimport json\nimport os\nfrom cleanrl_utils.atari_wrappers import (\n ClipRewardEnv,\n EpisodicLifeEnv,\n FireResetEnv,\n MaxAndSkipEnv,\n NoopResetEnv,\n)\nfrom utils import save_chunks # type: ignore\n\n\n""""""\nOld dataset-only generator removed in favor of integrated Rainbow + capture mode.\n""""""\n",python,selection_command +226,593973,"input_pipeline/generate_atari_dataset.py",27953,601,"",python,content +227,593992,"input_pipeline/generate_atari_dataset.py",27931,0,"",python,selection_command +228,598346,"input_pipeline/generate_atari_dataset.py",0,0,"",python,selection_command +229,623604,"TERMINAL",0,0,"",,terminal_focus +230,624316,"TERMINAL",0,0,"source /home/franz.srambical/cleanrl/.venv/bin/activate",,terminal_command +231,624316,"TERMINAL",0,0,"]633;C]0;franz.srambical@hai-login2:~/jafar",,terminal_output +232,626720,"TERMINAL",0,0,"git stash",,terminal_command +233,626764,"TERMINAL",0,0,"]633;C",,terminal_output +234,626884,"TERMINAL",0,0,"Saved working directory and index state WIP on gt-actions: 1b6b878 Update input_pipeline/generate_coinrun_dataset.py\r\n",,terminal_output +235,626910,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar",,terminal_output +236,645260,"input_pipeline/generate_atari_dataset.py",0,0,"Switched from branch 'gt-actions' to 'main'",python,git_branch_checkout +237,654605,"TERMINAL",0,0,"git stash pop",,terminal_command +238,654690,"TERMINAL",0,0,"]633;COn branch main\r\nYour branch is up to date with 'origin/main'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: .gitignore\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tfreeze.freeze\r\n\tinput_pipeline/generate_atari_dataset.py\r\n\tslurm/\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\nDropped refs/stash@{0} (8b3b07692810277ec73194a0a443ec19a82f2bce)\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +239,683739,"input_pipeline/generate_atari_dataset.py",240,0,"",python,selection_mouse +240,683747,"input_pipeline/generate_atari_dataset.py",239,0,"",python,selection_command +241,685266,"input_pipeline/generate_atari_dataset.py",0,0,"Switched from branch 'main' to 'generate-atari-dataset'",python,git_branch_checkout +242,686311,"input_pipeline/generate_coinrun_dataset.py",0,0,"""""""\nGenerates a dataset of random-action CoinRun episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\n""""""\n\nfrom dataclasses import dataclass\n\nfrom gym3 import types_np\nimport numpy as np\nfrom procgen import ProcgenGym3Env\nimport tyro\nimport json\nimport os\nfrom utils import save_chunks\n\n\n@dataclass\nclass Args:\n num_episodes_train: int = 10000\n num_episodes_val: int = 500\n num_episodes_test: int = 500\n output_dir: str = ""data/coinrun_episodes""\n min_episode_length: int = 1000\n max_episode_length: int = 1000\n chunk_size: int = 160\n chunks_per_file: int = 100\n seed: int = 0\n\n\nargs = tyro.cli(Args)\nassert (\n args.max_episode_length >= args.min_episode_length\n), ""Maximum episode length must be greater than or equal to minimum episode length.""\n\nif args.min_episode_length < args.chunk_size:\n print(\n ""Warning: Minimum episode length is smaller than chunk size. Note that episodes shorter than the chunk size will be discarded.""\n )\n\n\n# --- Generate episodes ---\ndef generate_episodes(num_episodes, split):\n episode_idx = 0\n episode_metadata = []\n obs_chunks = []\n act_chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n while episode_idx < num_episodes:\n seed = np.random.randint(0, 10000)\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=seed)\n\n observations_seq = []\n actions_seq = []\n episode_obs_chunks = []\n episode_act_chunks = []\n\n # --- Run episode ---\n step_t = 0\n for step_t in range(args.max_episode_length):\n action = types_np.sample(env.ac_space, bshape=(env.num,))\n env.act(action)\n _, obs, first = env.observe()\n observations_seq.append(obs[""rgb""])\n actions_seq.append(action)\n if len(observations_seq) == args.chunk_size:\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n observations_seq = []\n actions_seq = []\n if first:\n break\n\n # --- Save episode ---\n if step_t + 1 >= args.min_episode_length:\n if observations_seq:\n if len(observations_seq) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(observations_seq)} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n\n obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8)\n for seq in episode_obs_chunks\n ]\n act_chunks_data = [\n np.concatenate(act, axis=0) for act in episode_act_chunks\n ]\n\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n ep_metadata, obs_chunks, file_idx, act_chunks = save_chunks(\n obs_chunks, file_idx, args.chunks_per_file, output_dir_split, act_chunks\n )\n episode_metadata.extend(ep_metadata)\n\n print(f""Episode {episode_idx} completed, length: {step_t + 1}."")\n episode_idx += 1\n else:\n print(f""Episode too short ({step_t + 1}), resampling..."")\n\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n print(f""Done generating {split} split"")\n return episode_metadata\n\n\ndef get_action_space():\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=0)\n return env.ac_space.eltype.n\n\n\ndef main():\n # Set random seed and create dataset directories\n np.random.seed(args.seed)\n # --- Generate episodes ---\n train_episode_metadata = generate_episodes(args.num_episodes_train, ""train"")\n val_episode_metadata = generate_episodes(args.num_episodes_val, ""val"")\n test_episode_metadata = generate_episodes(args.num_episodes_test, ""test"")\n\n # --- Save metadata ---\n metadata = {\n ""env"": ""coinrun"",\n ""num_actions"": get_action_space(),\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),\n ""avg_episode_len_val"": np.mean(\n [ep[""avg_seq_len""] for ep in val_episode_metadata]\n ),\n ""avg_episode_len_test"": np.mean(\n [ep[""avg_seq_len""] for ep in test_episode_metadata]\n ),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n }\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(f""Done generating dataset."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +243,689037,"input_pipeline/generate_coinrun_dataset.py",662,4690,"assert args.max_episode_length >= args.min_episode_length, ""Maximum episode length must be greater than or equal to minimum episode length.""\n\nif args.min_episode_length < args.chunk_size:\n print(""Warning: Minimum episode length is smaller than chunk size. Note that episodes shorter than the chunk size will be discarded."")\n\n# --- Generate episodes ---\ndef generate_episodes(num_episodes, split):\n episode_idx = 0\n episode_metadata = []\n chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n while episode_idx < num_episodes:\n seed = np.random.randint(0, 10000)\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=seed)\n \n observations_seq = []\n episode_chunks = []\n\n # --- Run episode ---\n for step_t in range(args.max_episode_length):\n env.act(types_np.sample(env.ac_space, bshape=(env.num,)))\n _, obs, first = env.observe()\n observations_seq.append(obs[""rgb""])\n if len(observations_seq) == args.chunk_size:\n episode_chunks.append(observations_seq)\n observations_seq = []\n if first:\n break\n\n # --- Save episode ---\n if step_t + 1 >= args.min_episode_length:\n if len(observations_seq) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(observations_seq)} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_chunks.append(observations_seq)\n chunks_data = [np.concatenate(seq, axis=0).astype(np.uint8) for seq in episode_chunks]\n chunks.extend(chunks_data)\n\n\n ep_metadata, chunks, file_idx = save_chunks(chunks, file_idx, args.chunks_per_file, output_dir_split)\n episode_metadata.extend(ep_metadata)\n\n print(f""Episode {episode_idx} completed, length: {step_t + 1}."")\n episode_idx += 1\n else:\n print(f""Episode too short ({step_t + 1}), resampling..."")\n\n if len(chunks) > 0:\n print(f""Warning: Dropping {len(chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."")\n\n print(f""Done generating {split} split"")\n return episode_metadata\n\n\ndef main():\n # Set random seed and create dataset directories\n np.random.seed(args.seed)\n # --- Generate episodes ---\n train_episode_metadata = generate_episodes(args.num_episodes_train, ""train"")\n val_episode_metadata = generate_episodes(args.num_episodes_val, ""val"")\n test_episode_metadata = generate_episodes(args.num_episodes_test, ""test"")\n\n # --- Save metadata ---\n metadata = {\n ""env"": ""coinrun"",\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": np.mean([ep[""avg_seq_len""] for ep in train_episode_metadata]),\n ""avg_episode_len_val"": np.mean([ep[""avg_seq_len""] for ep in val_episode_metadata]),\n ""avg_episode_len_test"": np.mean([ep[""avg_seq_len""] for ep in test_episode_metadata]),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n\n }\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(f""Done generating dataset."")\n\nif __name__ == ""__main__"":\n main()\n\n",python,content +244,689042,"input_pipeline/generate_atari_dataset.py",0,0,"# docs and experiment results can be found at https://docs.cleanrl.dev/rl-algorithms/rainbow/#rainbow_ataripy\nimport collections\nimport math\nimport os\nimport random\nimport time\nfrom collections import deque\nfrom dataclasses import dataclass\n\nimport gymnasium as gym\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim as optim\nimport tyro\nfrom typing import Optional, Any\nfrom torch.utils.tensorboard.writer import SummaryWriter\n\nfrom cleanrl_utils.atari_wrappers import (\n ClipRewardEnv,\n EpisodicLifeEnv,\n FireResetEnv,\n MaxAndSkipEnv,\n NoopResetEnv,\n)\ntry:\n from utils import save_chunks # type: ignore\nexcept Exception: # pragma: no cover\n from input_pipeline.utils import save_chunks # type: ignore\nimport json\n\n\n@dataclass\nclass Args:\n exp_name: str = os.path.basename(__file__)[: -len("".py"")]\n """"""the name of this experiment""""""\n seed: int = 1\n """"""seed of the experiment""""""\n torch_deterministic: bool = True\n """"""if toggled, `torch.backends.cudnn.deterministic=False`""""""\n cuda: bool = True\n """"""if toggled, cuda will be enabled by default""""""\n track: bool = False\n """"""if toggled, this experiment will be tracked with Weights and Biases""""""\n wandb_project_name: str = ""cleanRL""\n """"""the wandb's project name""""""\n wandb_entity: Optional[str] = None\n """"""the entity (team) of wandb's project""""""\n capture_video: bool = False\n """"""whether to capture videos of the agent performances (check out `videos` folder)""""""\n save_model: bool = False\n """"""whether to save model into the `runs/{run_name}` folder""""""\n upload_model: bool = False\n """"""whether to upload the saved model to huggingface""""""\n hf_entity: str = """"\n """"""the user or org name of the model repository from the Hugging Face Hub""""""\n\n env_id: str = ""BreakoutNoFrameskip-v4""\n """"""the id of the environment""""""\n total_timesteps: int = 10000000\n """"""total timesteps of the experiments""""""\n learning_rate: float = 0.0000625\n """"""the learning rate of the optimizer""""""\n num_envs: int = 1\n """"""the number of parallel game environments""""""\n buffer_size: int = 1000000\n """"""the replay memory buffer size""""""\n gamma: float = 0.99\n """"""the discount factor gamma""""""\n tau: float = 1.0\n """"""the target network update rate""""""\n target_network_frequency: int = 8000\n """"""the timesteps it takes to update the target network""""""\n batch_size: int = 32\n """"""the batch size of sample from the reply memory""""""\n start_e: float = 1\n """"""the starting epsilon for exploration""""""\n end_e: float = 0.01\n """"""the ending epsilon for exploration""""""\n exploration_fraction: float = 0.10\n """"""the fraction of `total-timesteps` it takes from start-e to go end-e""""""\n learning_starts: int = 80000\n """"""timestep to start learning""""""\n train_frequency: int = 4\n """"""the frequency of training""""""\n n_step: int = 3\n """"""the number of steps to look ahead for n-step Q learning""""""\n prioritized_replay_alpha: float = 0.5\n """"""alpha parameter for prioritized replay buffer""""""\n prioritized_replay_beta: float = 0.4\n """"""beta parameter for prioritized replay buffer""""""\n prioritized_replay_eps: float = 1e-6\n """"""epsilon parameter for prioritized replay buffer""""""\n n_atoms: int = 51\n """"""the number of atoms""""""\n v_min: float = -10\n """"""the return lower bound""""""\n v_max: float = 10\n """"""the return upper bound""""""\n\n # Dataset capture\n capture_dataset: bool = True\n num_episodes_train: int = 10000\n num_episodes_val: int = 500\n num_episodes_test: int = 500\n output_dir: str = ""data/atari_episodes""\n min_episode_length: int = 1\n chunk_size: int = 160\n chunks_per_file: int = 100\n stop_on_complete: bool = True\n\n\ndef make_env(env_id, seed, idx, capture_video, run_name):\n def thunk():\n if capture_video and idx == 0:\n env = gym.make(env_id, render_mode=""rgb_array"")\n env = gym.wrappers.RecordVideo(env, f""videos/{run_name}"")\n else:\n env = gym.make(env_id)\n env = gym.wrappers.RecordEpisodeStatistics(env)\n\n env = NoopResetEnv(env, noop_max=30)\n env = MaxAndSkipEnv(env, skip=4)\n env = EpisodicLifeEnv(env)\n if ""FIRE"" in env.unwrapped.get_action_meanings():\n env = FireResetEnv(env)\n env = ClipRewardEnv(env)\n env = gym.wrappers.ResizeObservation(env, (84, 84))\n env = gym.wrappers.GrayScaleObservation(env)\n env = gym.wrappers.FrameStack(env, 4)\n\n env.action_space.seed(seed)\n return env\n\n return thunk\n\n\nclass NoisyLinear(nn.Module):\n def __init__(self, in_features, out_features, std_init=0.5):\n super().__init__()\n self.in_features = in_features\n self.out_features = out_features\n self.std_init = std_init\n\n self.weight_mu = nn.Parameter(torch.FloatTensor(out_features, in_features))\n self.weight_sigma = nn.Parameter(torch.FloatTensor(out_features, in_features))\n self.register_buffer(""weight_epsilon"", torch.FloatTensor(out_features, in_features))\n self.bias_mu = nn.Parameter(torch.FloatTensor(out_features))\n self.bias_sigma = nn.Parameter(torch.FloatTensor(out_features))\n self.register_buffer(""bias_epsilon"", torch.FloatTensor(out_features))\n # factorized gaussian noise\n self.reset_parameters()\n self.reset_noise()\n\n def reset_parameters(self):\n mu_range = 1 / math.sqrt(self.in_features)\n self.weight_mu.data.uniform_(-mu_range, mu_range)\n self.weight_sigma.data.fill_(self.std_init / math.sqrt(self.in_features))\n self.bias_mu.data.uniform_(-mu_range, mu_range)\n self.bias_sigma.data.fill_(self.std_init / math.sqrt(self.out_features))\n\n def reset_noise(self):\n self.weight_epsilon.normal_()\n self.bias_epsilon.normal_()\n\n def forward(self, input):\n if self.training:\n weight = self.weight_mu + self.weight_sigma * self.weight_epsilon\n bias = self.bias_mu + self.bias_sigma * self.bias_epsilon\n else:\n weight = self.weight_mu\n bias = self.bias_mu\n return F.linear(input, weight, bias)\n\n\n# ALGO LOGIC: initialize agent here:\nclass NoisyDuelingDistributionalNetwork(nn.Module):\n def __init__(self, env, n_atoms, v_min, v_max):\n super().__init__()\n self.n_atoms = n_atoms\n self.v_min = v_min\n self.v_max = v_max\n self.delta_z = (v_max - v_min) / (n_atoms - 1)\n self.n_actions = env.single_action_space.n\n self.register_buffer(""support"", torch.linspace(v_min, v_max, n_atoms))\n\n self.network = nn.Sequential(\n nn.Conv2d(4, 32, 8, stride=4),\n nn.ReLU(),\n nn.Conv2d(32, 64, 4, stride=2),\n nn.ReLU(),\n nn.Conv2d(64, 64, 3, stride=1),\n nn.ReLU(),\n nn.Flatten(),\n )\n conv_output_size = 3136\n\n self.value_head = nn.Sequential(NoisyLinear(conv_output_size, 512), nn.ReLU(), NoisyLinear(512, n_atoms))\n\n self.advantage_head = nn.Sequential(\n NoisyLinear(conv_output_size, 512), nn.ReLU(), NoisyLinear(512, n_atoms * self.n_actions)\n )\n\n def forward(self, x):\n h = self.network(x / 255.0)\n value = self.value_head(h).view(-1, 1, self.n_atoms)\n advantage = self.advantage_head(h).view(-1, self.n_actions, self.n_atoms)\n q_atoms = value + advantage - advantage.mean(dim=1, keepdim=True)\n q_dist = F.softmax(q_atoms, dim=2)\n return q_dist\n\n def reset_noise(self):\n for layer in self.value_head:\n if isinstance(layer, NoisyLinear):\n layer.reset_noise()\n for layer in self.advantage_head:\n if isinstance(layer, NoisyLinear):\n layer.reset_noise()\n\n\nPrioritizedBatch = collections.namedtuple(\n ""PrioritizedBatch"", [""observations"", ""actions"", ""rewards"", ""next_observations"", ""dones"", ""indices"", ""weights""]\n)\n\n\n# adapted from: https://github.com/openai/baselines/blob/master/baselines/common/segment_tree.py\nclass SumSegmentTree:\n def __init__(self, capacity):\n self.capacity = capacity\n self.tree_size = 2 * capacity - 1\n self.tree = np.zeros(self.tree_size, dtype=np.float32)\n\n def _propagate(self, idx):\n parent = (idx - 1) // 2\n while parent >= 0:\n self.tree[parent] = self.tree[parent * 2 + 1] + self.tree[parent * 2 + 2]\n parent = (parent - 1) // 2\n\n def update(self, idx, value):\n tree_idx = idx + self.capacity - 1\n self.tree[tree_idx] = value\n self._propagate(tree_idx)\n\n def total(self):\n return self.tree[0]\n\n def retrieve(self, value):\n idx = 0\n while idx * 2 + 1 < self.tree_size:\n left = idx * 2 + 1\n right = left + 1\n if value <= self.tree[left]:\n idx = left\n else:\n value -= self.tree[left]\n idx = right\n return idx - (self.capacity - 1)\n\n\n# adapted from: https://github.com/openai/baselines/blob/master/baselines/common/segment_tree.py\nclass MinSegmentTree:\n def __init__(self, capacity):\n self.capacity = capacity\n self.tree_size = 2 * capacity - 1\n self.tree = np.full(self.tree_size, float(""inf""), dtype=np.float32)\n\n def _propagate(self, idx):\n parent = (idx - 1) // 2\n while parent >= 0:\n self.tree[parent] = np.minimum(self.tree[parent * 2 + 1], self.tree[parent * 2 + 2])\n parent = (parent - 1) // 2\n\n def update(self, idx, value):\n tree_idx = idx + self.capacity - 1\n self.tree[tree_idx] = value\n self._propagate(tree_idx)\n\n def min(self):\n return self.tree[0]\n\n\nclass PrioritizedReplayBuffer:\n def __init__(self, capacity, obs_shape, device, n_step, gamma, alpha=0.6, beta=0.4, eps=1e-6):\n self.capacity = capacity\n self.device = device\n self.n_step = n_step\n self.gamma = gamma\n self.alpha = alpha\n self.beta = beta\n self.eps = eps\n\n self.buffer_obs = np.zeros((capacity,) + obs_shape, dtype=np.uint8)\n self.buffer_next_obs = np.zeros((capacity,) + obs_shape, dtype=np.uint8)\n self.buffer_actions = np.zeros(capacity, dtype=np.int64)\n self.buffer_rewards = np.zeros(capacity, dtype=np.float32)\n self.buffer_dones = np.zeros(capacity, dtype=np.bool_)\n\n self.pos = 0\n self.size = 0\n self.max_priority = 1.0\n\n self.sum_tree = SumSegmentTree(capacity)\n self.min_tree = MinSegmentTree(capacity)\n\n # For n-step returns\n self.n_step_buffer = deque(maxlen=n_step)\n\n def _get_n_step_info(self):\n reward = 0.0\n next_obs = self.n_step_buffer[-1][3]\n done = self.n_step_buffer[-1][4]\n\n for i in range(len(self.n_step_buffer)):\n reward += self.gamma**i * self.n_step_buffer[i][2]\n if self.n_step_buffer[i][4]:\n next_obs = self.n_step_buffer[i][3]\n done = True\n break\n return reward, next_obs, done\n\n def add(self, obs, action, reward, next_obs, done):\n self.n_step_buffer.append((obs, action, reward, next_obs, done))\n\n if len(self.n_step_buffer) < self.n_step:\n return\n\n reward, next_obs, done = self._get_n_step_info()\n obs = self.n_step_buffer[0][0]\n action = self.n_step_buffer[0][1]\n\n idx = self.pos\n self.buffer_obs[idx] = obs\n self.buffer_next_obs[idx] = next_obs\n self.buffer_actions[idx] = action\n self.buffer_rewards[idx] = reward\n self.buffer_dones[idx] = done\n\n priority = self.max_priority**self.alpha\n self.sum_tree.update(idx, priority)\n self.min_tree.update(idx, priority)\n\n self.pos = (self.pos + 1) % self.capacity\n self.size = min(self.size + 1, self.capacity)\n\n if done:\n self.n_step_buffer.clear()\n\n def sample(self, batch_size):\n indices = []\n p_total = self.sum_tree.total()\n segment = p_total / batch_size\n\n for i in range(batch_size):\n a = segment * i\n b = segment * (i + 1)\n upperbound = np.random.uniform(a, b)\n idx = self.sum_tree.retrieve(upperbound)\n indices.append(idx)\n\n samples = {\n ""observations"": torch.from_numpy(self.buffer_obs[indices]).to(self.device),\n ""actions"": torch.from_numpy(self.buffer_actions[indices]).to(self.device).unsqueeze(1),\n ""rewards"": torch.from_numpy(self.buffer_rewards[indices]).to(self.device).unsqueeze(1),\n ""next_observations"": torch.from_numpy(self.buffer_next_obs[indices]).to(self.device),\n ""dones"": torch.from_numpy(self.buffer_dones[indices]).to(self.device).unsqueeze(1),\n }\n\n probs = np.array([self.sum_tree.tree[idx + self.capacity - 1] for idx in indices])\n weights = (self.size * probs / p_total) ** -self.beta\n weights = weights / weights.max()\n samples[""weights""] = torch.from_numpy(weights).to(self.device).unsqueeze(1)\n samples[""indices""] = indices\n\n return PrioritizedBatch(**samples)\n\n def update_priorities(self, indices, priorities):\n priorities = np.abs(priorities) + self.eps\n self.max_priority = max(self.max_priority, priorities.max())\n\n for idx, priority in zip(indices, priorities):\n priority = priority**self.alpha\n self.sum_tree.update(idx, priority)\n self.min_tree.update(idx, priority)\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n assert args.num_envs == 1, ""vectorized envs are not supported at the moment""\n run_name = f""{args.env_id}__{args.exp_name}__{args.seed}__{int(time.time())}""\n if args.track:\n import wandb\n\n wandb.init(\n project=args.wandb_project_name,\n entity=args.wandb_entity,\n sync_tensorboard=True,\n config=vars(args),\n name=run_name,\n monitor_gym=True,\n save_code=True,\n )\n writer = SummaryWriter(f""runs/{run_name}"")\n writer.add_text(\n ""hyperparameters"",\n ""|param|value|\n|-|-|\n%s"" % (""\n"".join([f""|{key}|{value}|"" for key, value in vars(args).items()])),\n )\n\n # TRY NOT TO MODIFY: seeding\n random.seed(args.seed)\n np.random.seed(args.seed)\n torch.manual_seed(args.seed)\n torch.backends.cudnn.deterministic = args.torch_deterministic\n\n device = torch.device(""cuda"" if torch.cuda.is_available() and args.cuda else ""cpu"")\n\n # env setup\n envs = gym.vector.SyncVectorEnv(\n [make_env(args.env_id, args.seed + i, i, args.capture_video, run_name) for i in range(args.num_envs)]\n )\n assert isinstance(envs.single_action_space, gym.spaces.Discrete), ""only discrete action space is supported""\n\n q_network = NoisyDuelingDistributionalNetwork(envs, args.n_atoms, args.v_min, args.v_max).to(device)\n optimizer = optim.Adam(q_network.parameters(), lr=args.learning_rate, eps=1.5e-4)\n target_network = NoisyDuelingDistributionalNetwork(envs, args.n_atoms, args.v_min, args.v_max).to(device)\n target_network.load_state_dict(q_network.state_dict())\n\n rb = PrioritizedReplayBuffer(\n args.buffer_size,\n envs.single_observation_space.shape,\n device,\n args.n_step,\n args.gamma,\n args.prioritized_replay_alpha,\n args.prioritized_replay_beta,\n args.prioritized_replay_eps,\n )\n\n # dataset capture state\n split_targets = {\n ""train"": args.num_episodes_train,\n ""val"": args.num_episodes_val,\n ""test"": args.num_episodes_test,\n }\n # Determine splits to run (order: train -> val -> test)\n splits_in_order = [s for s in [""train"", ""val"", ""test""] if split_targets[s] > 0]\n\n episodes_captured_per_split: dict[str, int] = {s: 0 for s in [""train"", ""val"", ""test""]}\n file_idx_by_split: dict[str, int] = {s: 0 for s in [""train"", ""val"", ""test""]}\n episode_metadata_by_split: dict[str, list[dict]] = {s: [] for s in [""train"", ""val"", ""test""]}\n\n obs_chunks: list[np.ndarray] = []\n act_chunks: list[np.ndarray] = []\n\n current_split_idx = 0\n current_split = splits_in_order[0]\n split_dir = os.path.join(args.output_dir, current_split)\n if args.capture_dataset:\n os.makedirs(split_dir, exist_ok=True)\n\n start_time = time.time()\n\n # TRY NOT TO MODIFY: start the game\n obs, _ = envs.reset(seed=args.seed)\n observations_seq: list[np.ndarray] = []\n actions_seq: list[np.ndarray] = []\n for global_step in range(args.total_timesteps):\n # anneal PER beta to 1\n rb.beta = min(\n 1.0, args.prioritized_replay_beta + global_step * (1.0 - args.prioritized_replay_beta) / args.total_timesteps\n )\n\n # ALGO LOGIC: put action logic here\n with torch.no_grad():\n q_dist = q_network(torch.Tensor(obs).to(device))\n q_values = torch.sum(q_dist * q_network.support, dim=2)\n actions = torch.argmax(q_values, dim=1).cpu().numpy()\n\n # TRY NOT TO MODIFY: execute the game and log data.\n next_obs, rewards, terminations, truncations, infos = envs.step(actions)\n\n if args.capture_dataset:\n observations_seq.append(next_obs.astype(np.uint8))\n actions_seq.append(actions.astype(np.int64))\n\n if ""final_info"" in infos:\n for info in infos[""final_info""]:\n if info and ""episode"" in info:\n print(f""global_step={global_step}, episodic_return={info['episode']['r']}"")\n writer.add_scalar(""charts/episodic_return"", info[""episode""][""r""], global_step)\n writer.add_scalar(""charts/episodic_length"", info[""episode""][""l""], global_step)\n\n continue_capturing_multi = any(\n episodes_captured_per_split[s] < split_targets[s]\n for s in splits_in_order\n )\n if args.capture_dataset and continue_capturing_multi:\n current_len = len(observations_seq)\n if current_len >= args.min_episode_length:\n frames = np.concatenate(observations_seq, axis=0).astype(np.uint8)\n acts = np.concatenate(actions_seq, axis=0).astype(np.int64)\n\n episode_obs_chunks = []\n episode_act_chunks = []\n start_idx = 0\n while start_idx < current_len:\n end_idx = min(start_idx + args.chunk_size, current_len)\n if end_idx - start_idx < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {current_len} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(frames[start_idx:end_idx])\n episode_act_chunks.append(acts[start_idx:end_idx])\n start_idx = end_idx\n\n obs_chunks_data = [seq.astype(np.uint8) for seq in episode_obs_chunks]\n act_chunks_data = [act for act in episode_act_chunks]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n # Save to the active split\n ep_metadata, obs_chunks, next_file_idx, act_chunks = save_chunks(\n obs_chunks,\n file_idx_by_split[current_split],\n args.chunks_per_file,\n split_dir,\n act_chunks,\n )\n file_idx_by_split[current_split] = next_file_idx\n episode_metadata_by_split[current_split].extend(ep_metadata)\n\n episodes_captured_per_split[current_split] += 1\n\n if episodes_captured_per_split[current_split] >= split_targets[current_split]:\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks before switching split '"",{current_split},""' for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n obs_chunks = []\n act_chunks = []\n if current_split_idx + 1 < len(splits_in_order):\n current_split_idx += 1\n current_split = splits_in_order[current_split_idx]\n split_dir = os.path.join(args.output_dir, current_split)\n os.makedirs(split_dir, exist_ok=True)\n else:\n print(f""Episode too short ({current_len}), skipping capture..."")\n\n observations_seq = []\n actions_seq = []\n\n # TRY NOT TO MODIFY: save data to reply buffer; handle `final_observation`\n real_next_obs = next_obs.copy()\n for idx, trunc in enumerate(truncations):\n if trunc:\n real_next_obs[idx] = infos[""final_observation""][idx]\n rb.add(obs, actions, rewards, real_next_obs, terminations)\n\n # TRY NOT TO MODIFY: CRUCIAL step easy to overlook\n obs = next_obs\n\n # ALGO LOGIC: training.\n if global_step > args.learning_starts:\n if global_step % args.train_frequency == 0:\n # reset the noise for both networks\n q_network.reset_noise()\n target_network.reset_noise()\n data = rb.sample(args.batch_size)\n\n with torch.no_grad():\n next_dist = target_network(data.next_observations) # [B, num_actions, n_atoms]\n support = target_network.support # [n_atoms]\n next_q_values = torch.sum(next_dist * support, dim=2) # [B, num_actions]\n\n # double q-learning\n next_dist_online = q_network(data.next_observations) # [B, num_actions, n_atoms]\n next_q_online = torch.sum(next_dist_online * support, dim=2) # [B, num_actions]\n best_actions = torch.argmax(next_q_online, dim=1) # [B]\n next_pmfs = next_dist[torch.arange(args.batch_size), best_actions] # [B, n_atoms]\n\n # compute the n-step Bellman update.\n gamma_n = args.gamma**args.n_step\n next_atoms = data.rewards + gamma_n * support * (1 - data.dones.float())\n tz = next_atoms.clamp(q_network.v_min, q_network.v_max)\n\n # projection\n delta_z = q_network.delta_z\n b = (tz - q_network.v_min) / delta_z # shape: [B, n_atoms]\n l = b.floor().clamp(0, args.n_atoms - 1)\n u = b.ceil().clamp(0, args.n_atoms - 1)\n\n # (l == u).float() handles the case where bj is exactly an integer\n # example bj = 1, then the upper ceiling should be uj= 2, and lj= 1\n d_m_l = (u.float() + (l == b).float() - b) * next_pmfs # [B, n_atoms]\n d_m_u = (b - l) * next_pmfs # [B, n_atoms]\n\n target_pmfs = torch.zeros_like(next_pmfs)\n for i in range(target_pmfs.size(0)):\n target_pmfs[i].index_add_(0, l[i].long(), d_m_l[i])\n target_pmfs[i].index_add_(0, u[i].long(), d_m_u[i])\n\n dist = q_network(data.observations) # [B, num_actions, n_atoms]\n pred_dist = dist.gather(1, data.actions.unsqueeze(-1).expand(-1, -1, args.n_atoms)).squeeze(1)\n log_pred = torch.log(pred_dist.clamp(min=1e-5, max=1 - 1e-5))\n\n loss_per_sample = -(target_pmfs * log_pred).sum(dim=1)\n loss = (loss_per_sample * data.weights.squeeze()).mean()\n\n # update priorities\n new_priorities = loss_per_sample.detach().cpu().numpy()\n rb.update_priorities(data.indices, new_priorities)\n\n if global_step % 100 == 0:\n writer.add_scalar(""losses/td_loss"", loss.item(), global_step)\n q_values = (pred_dist * q_network.support).sum(dim=1) # [B]\n writer.add_scalar(""losses/q_values"", q_values.mean().item(), global_step)\n sps = int(global_step / (time.time() - start_time))\n print(""SPS:"", sps)\n writer.add_scalar(""charts/SPS"", sps, global_step)\n writer.add_scalar(""charts/beta"", rb.beta, global_step)\n\n # optimize the model\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n\n # update target network\n if global_step % args.target_network_frequency == 0:\n for target_param, param in zip(target_network.parameters(), q_network.parameters()):\n target_param.data.copy_(args.tau * param.data + (1.0 - args.tau) * target_param.data)\n\n # optional early stop on dataset completion\n if args.capture_dataset and args.stop_on_complete:\n all_done = all(\n episodes_captured_per_split[s] >= split_targets[s]\n for s in splits_in_order\n ) and len(splits_in_order) > 0\n if all_done:\n break\n\n envs.close()\n writer.close()\n\n # write metadata for dataset\n if args.capture_dataset:\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n os.makedirs(args.output_dir, exist_ok=True)\n metadata_path = os.path.join(args.output_dir, ""metadata.json"")\n if os.path.exists(metadata_path):\n try:\n with open(metadata_path, ""r"") as f:\n metadata = json.load(f)\n except Exception:\n metadata = {}\n else:\n metadata = {}\n\n metadata.setdefault(""env"", args.env_id)\n metadata.setdefault(""num_actions"", int(envs.single_action_space.n))\n for split in [""train"", ""val"", ""test""]:\n metadata.setdefault(f""num_episodes_{split}"", 0)\n metadata.setdefault(f""avg_episode_len_{split}"", 0.0)\n metadata.setdefault(f""episode_metadata_{split}"", [])\n\n for split_key in splits_in_order:\n ep_meta_list = episode_metadata_by_split[split_key]\n if ep_meta_list:\n metadata[f""episode_metadata_{split_key}""].extend(ep_meta_list)\n metadata[f""num_episodes_{split_key}""] = len(metadata[f""episode_metadata_{split_key}""])\n metadata[f""avg_episode_len_{split_key}""] = float(\n np.mean([ep[""avg_seq_len""] for ep in metadata[f""episode_metadata_{split_key}""]])\n )\n\n with open(metadata_path, ""w"") as f:\n json.dump(metadata, f)",python,tab +245,689687,"input_pipeline/generate_atari_dataset.py",968,0,"",python,selection_keyboard +246,689842,"input_pipeline/generate_atari_dataset.py",2265,0,"",python,selection_keyboard +247,690385,"input_pipeline/generate_atari_dataset.py",3449,0,"",python,selection_keyboard +248,690595,"input_pipeline/generate_atari_dataset.py",4532,0,"",python,selection_keyboard +249,691916,"input_pipeline/generate_atari_dataset.py",5810,0,"",python,selection_keyboard +250,692439,"input_pipeline/generate_atari_dataset.py",6895,0,"",python,selection_keyboard +251,692598,"input_pipeline/generate_atari_dataset.py",8018,0,"",python,selection_keyboard +252,692756,"input_pipeline/generate_atari_dataset.py",8995,0,"",python,selection_keyboard +253,692876,"input_pipeline/generate_atari_dataset.py",10024,0,"",python,selection_keyboard +254,693024,"input_pipeline/generate_atari_dataset.py",11108,0,"",python,selection_keyboard +255,693177,"input_pipeline/generate_atari_dataset.py",12067,0,"",python,selection_keyboard +256,693348,"input_pipeline/generate_atari_dataset.py",13451,0,"",python,selection_keyboard +257,693456,"input_pipeline/generate_atari_dataset.py",14470,0,"",python,selection_keyboard +258,693571,"input_pipeline/generate_atari_dataset.py",15710,0,"",python,selection_keyboard +259,693825,"input_pipeline/generate_atari_dataset.py",16920,0,"",python,selection_keyboard +260,693852,"input_pipeline/generate_atari_dataset.py",18541,0,"",python,selection_keyboard +261,693889,"input_pipeline/generate_atari_dataset.py",20433,0,"",python,selection_keyboard +262,693916,"input_pipeline/generate_atari_dataset.py",22170,0,"",python,selection_keyboard +263,693967,"input_pipeline/generate_atari_dataset.py",23923,0,"",python,selection_keyboard +264,693994,"input_pipeline/generate_atari_dataset.py",25607,0,"",python,selection_keyboard +265,694043,"input_pipeline/generate_atari_dataset.py",26845,0,"",python,selection_keyboard +266,694049,"input_pipeline/generate_atari_dataset.py",27953,0,"",python,selection_keyboard +267,694373,"input_pipeline/generate_atari_dataset.py",26545,0,"",python,selection_keyboard +268,694627,"input_pipeline/generate_atari_dataset.py",25317,0,"",python,selection_keyboard +269,694681,"input_pipeline/generate_atari_dataset.py",23500,0,"",python,selection_keyboard +270,694697,"input_pipeline/generate_atari_dataset.py",21920,0,"",python,selection_keyboard +271,694735,"input_pipeline/generate_atari_dataset.py",20107,0,"",python,selection_keyboard +272,694798,"input_pipeline/generate_atari_dataset.py",18106,0,"",python,selection_keyboard +273,694800,"input_pipeline/generate_atari_dataset.py",16644,0,"",python,selection_keyboard +274,694838,"input_pipeline/generate_atari_dataset.py",15538,0,"",python,selection_keyboard +275,694861,"input_pipeline/generate_atari_dataset.py",14237,0,"",python,selection_keyboard +276,694901,"input_pipeline/generate_atari_dataset.py",13229,0,"",python,selection_keyboard +277,694927,"input_pipeline/generate_atari_dataset.py",11872,0,"",python,selection_keyboard +278,694971,"input_pipeline/generate_atari_dataset.py",10823,0,"",python,selection_keyboard +279,695023,"input_pipeline/generate_atari_dataset.py",9801,0,"",python,selection_keyboard +280,695044,"input_pipeline/generate_atari_dataset.py",8772,0,"",python,selection_keyboard +281,695060,"input_pipeline/generate_atari_dataset.py",7807,0,"",python,selection_keyboard +282,695089,"input_pipeline/generate_atari_dataset.py",6657,0,"",python,selection_keyboard +283,695128,"input_pipeline/generate_atari_dataset.py",5454,0,"",python,selection_keyboard +284,695158,"input_pipeline/generate_atari_dataset.py",4245,0,"",python,selection_keyboard +285,695205,"input_pipeline/generate_atari_dataset.py",3262,0,"",python,selection_keyboard +286,695243,"input_pipeline/generate_atari_dataset.py",2017,0,"",python,selection_keyboard +287,695278,"input_pipeline/generate_atari_dataset.py",793,0,"",python,selection_keyboard +288,695315,"input_pipeline/generate_atari_dataset.py",34,0,"",python,selection_keyboard +289,695343,"input_pipeline/generate_atari_dataset.py",0,0,"",python,selection_keyboard +290,696762,"input_pipeline/generate_atari_dataset.py",793,0,"",python,selection_keyboard +291,698287,"input_pipeline/generate_atari_dataset.py",1983,0,"",python,selection_keyboard +292,699235,"input_pipeline/generate_atari_dataset.py",3228,0,"",python,selection_keyboard +293,699597,"input_pipeline/generate_atari_dataset.py",4211,0,"",python,selection_keyboard +294,702664,"input_pipeline/generate_atari_dataset.py",0,0,"",python,selection_command +295,703619,"input_pipeline/generate_atari_dataset.py",2117,0,"",python,selection_command +296,704807,"input_pipeline/generate_atari_dataset.py",16521,0,"",python,selection_command +297,704990,"input_pipeline/generate_atari_dataset.py",17199,0,"",python,selection_command +298,705392,"input_pipeline/generate_atari_dataset.py",2117,0,"",python,selection_command +299,705398,"input_pipeline/generate_atari_dataset.py",16521,0,"",python,selection_command +300,707256,"input_pipeline/generate_atari_dataset.py",24225,0,"",python,selection_command +301,708728,"input_pipeline/generate_atari_dataset.py",0,0,"",python,selection_command +302,711417,"input_pipeline/generate_atari_dataset.py",1842,0,"",python,selection_command +303,718259,"input_pipeline/generate_atari_dataset.py",1362,0,"",python,selection_mouse +304,718259,"input_pipeline/generate_atari_dataset.py",1361,0,"",python,selection_command +305,718762,"input_pipeline/generate_atari_dataset.py",0,0,"",python,selection_command +306,724008,"input_pipeline/generate_atari_dataset.py",37,0,"",python,selection_command +307,724036,"input_pipeline/generate_atari_dataset.py",109,0,"\n",python,content +308,725378,"input_pipeline/generate_atari_dataset.py",110,1,"",python,content +309,725495,"input_pipeline/generate_atari_dataset.py",0,0,"",python,selection_command +310,726058,"input_pipeline/generate_atari_dataset.py",0,0,"\n",python,content +311,728083,"input_pipeline/generate_atari_dataset.py",0,0,"#",python,content +312,728083,"input_pipeline/generate_atari_dataset.py",1,0,"",python,selection_keyboard +313,728530,"input_pipeline/generate_atari_dataset.py",1,0," ",python,content +314,728530,"input_pipeline/generate_atari_dataset.py",2,0,"",python,selection_keyboard +315,728685,"input_pipeline/generate_atari_dataset.py",2,0,"f",python,content +316,728685,"input_pipeline/generate_atari_dataset.py",3,0,"",python,selection_keyboard +317,728802,"input_pipeline/generate_atari_dataset.py",3,0,"r",python,content +318,728802,"input_pipeline/generate_atari_dataset.py",4,0,"",python,selection_keyboard +319,728804,"input_pipeline/generate_atari_dataset.py",4,0,"o",python,content +320,728804,"input_pipeline/generate_atari_dataset.py",5,0,"",python,selection_keyboard +321,728909,"input_pipeline/generate_atari_dataset.py",5,0,"m",python,content +322,728909,"input_pipeline/generate_atari_dataset.py",6,0,"",python,selection_keyboard +323,729046,"input_pipeline/generate_atari_dataset.py",6,0," ",python,content +324,729046,"input_pipeline/generate_atari_dataset.py",7,0,"",python,selection_keyboard +325,730986,"input_pipeline/generate_atari_dataset.py",6,0,"",python,selection_command +326,732579,"input_pipeline/generate_atari_dataset.py",2,0,"",python,selection_command +327,733081,"input_pipeline/generate_atari_dataset.py",2,0,"a",python,content +328,733081,"input_pipeline/generate_atari_dataset.py",3,0,"",python,selection_keyboard +329,733090,"input_pipeline/generate_atari_dataset.py",3,0,"d",python,content +330,733091,"input_pipeline/generate_atari_dataset.py",4,0,"",python,selection_keyboard +331,733236,"input_pipeline/generate_atari_dataset.py",4,0,"a",python,content +332,733236,"input_pipeline/generate_atari_dataset.py",5,0,"",python,selection_keyboard +333,733340,"input_pipeline/generate_atari_dataset.py",5,0,"p",python,content +334,733341,"input_pipeline/generate_atari_dataset.py",6,0,"",python,selection_keyboard +335,733516,"input_pipeline/generate_atari_dataset.py",6,0,"t",python,content +336,733516,"input_pipeline/generate_atari_dataset.py",7,0,"",python,selection_keyboard +337,733691,"input_pipeline/generate_atari_dataset.py",7,0,"e",python,content +338,733691,"input_pipeline/generate_atari_dataset.py",8,0,"",python,selection_keyboard +339,733887,"input_pipeline/generate_atari_dataset.py",8,0,"d",python,content +340,733888,"input_pipeline/generate_atari_dataset.py",9,0,"",python,selection_keyboard +341,733976,"input_pipeline/generate_atari_dataset.py",9,0," ",python,content +342,733977,"input_pipeline/generate_atari_dataset.py",10,0,"",python,selection_keyboard +343,734401,"input_pipeline/generate_atari_dataset.py",9,0,"",python,selection_command +344,734853,"input_pipeline/generate_atari_dataset.py",15,0,"",python,selection_command +345,735586,"input_pipeline/generate_atari_dataset.py",14,0,"",python,selection_command +346,752362,"input_pipeline/generate_atari_dataset.py",15,0,"",python,selection_command +347,752697,"input_pipeline/generate_atari_dataset.py",15,0,"https://github.com/vwxyzjn/cleanrl/blob/master/cleanrl/rainbow_atari.py",python,content +348,752697,"input_pipeline/generate_atari_dataset.py",86,0,"",python,selection_keyboard +349,752912,"input_pipeline/generate_atari_dataset.py",85,0,"",python,selection_command +350,759803,"input_pipeline/generate_atari_dataset.py",172,0,"",python,selection_command +351,760192,"input_pipeline/generate_atari_dataset.py",87,110,"",python,content +352,801269,"input_pipeline/generate_atari_dataset.py",599,27331,"\n\ntry:\n from utils import save_chunks # type: ignore\nexcept Exception: # pragma: no cover\n from input_pipeline.utils import save_chunks # type: ignore\nimport json\n\n\n@dataclass\nclass Args:\n exp_name: str = os.path.basename(__file__)[: -len("".py"")]\n """"""the name of this experiment""""""\n seed: int = 1\n """"""seed of the experiment""""""\n torch_deterministic: bool = True\n """"""if toggled, `torch.backends.cudnn.deterministic=False`""""""\n cuda: bool = True\n """"""if toggled, cuda will be enabled by default""""""\n track: bool = False\n """"""if toggled, this experiment will be tracked with Weights and Biases""""""\n wandb_project_name: str = ""cleanRL""\n """"""the wandb's project name""""""\n wandb_entity: Optional[str] = None\n """"""the entity (team) of wandb's project""""""\n capture_video: bool = False\n """"""whether to capture videos of the agent performances (check out `videos` folder)""""""\n save_model: bool = False\n """"""whether to save model into the `runs/{run_name}` folder""""""\n upload_model: bool = False\n """"""whether to upload the saved model to huggingface""""""\n hf_entity: str = """"\n """"""the user or org name of the model repository from the Hugging Face Hub""""""\n\n env_id: str = ""BreakoutNoFrameskip-v4""\n """"""the id of the environment""""""\n total_timesteps: int = 10000000\n """"""total timesteps of the experiments""""""\n learning_rate: float = 0.0000625\n """"""the learning rate of the optimizer""""""\n num_envs: int = 1\n """"""the number of parallel game environments""""""\n buffer_size: int = 1000000\n """"""the replay memory buffer size""""""\n gamma: float = 0.99\n """"""the discount factor gamma""""""\n tau: float = 1.0\n """"""the target network update rate""""""\n target_network_frequency: int = 8000\n """"""the timesteps it takes to update the target network""""""\n batch_size: int = 32\n """"""the batch size of sample from the reply memory""""""\n start_e: float = 1\n """"""the starting epsilon for exploration""""""\n end_e: float = 0.01\n """"""the ending epsilon for exploration""""""\n exploration_fraction: float = 0.10\n """"""the fraction of `total-timesteps` it takes from start-e to go end-e""""""\n learning_starts: int = 80000\n """"""timestep to start learning""""""\n train_frequency: int = 4\n """"""the frequency of training""""""\n n_step: int = 3\n """"""the number of steps to look ahead for n-step Q learning""""""\n prioritized_replay_alpha: float = 0.5\n """"""alpha parameter for prioritized replay buffer""""""\n prioritized_replay_beta: float = 0.4\n """"""beta parameter for prioritized replay buffer""""""\n prioritized_replay_eps: float = 1e-6\n """"""epsilon parameter for prioritized replay buffer""""""\n n_atoms: int = 51\n """"""the number of atoms""""""\n v_min: float = -10\n """"""the return lower bound""""""\n v_max: float = 10\n """"""the return upper bound""""""\n\n # Dataset capture\n capture_dataset: bool = True\n num_episodes_train: int = 10000\n num_episodes_val: int = 500\n num_episodes_test: int = 500\n output_dir: str = ""data/atari_episodes""\n min_episode_length: int = 1\n chunk_size: int = 160\n chunks_per_file: int = 100\n stop_on_complete: bool = True\n\n\ndef make_env(env_id, seed, idx, capture_video, run_name):\n def thunk():\n if capture_video and idx == 0:\n env = gym.make(env_id, render_mode=""rgb_array"")\n env = gym.wrappers.RecordVideo(env, f""videos/{run_name}"")\n else:\n env = gym.make(env_id)\n env = gym.wrappers.RecordEpisodeStatistics(env)\n\n env = NoopResetEnv(env, noop_max=30)\n env = MaxAndSkipEnv(env, skip=4)\n env = EpisodicLifeEnv(env)\n if ""FIRE"" in env.unwrapped.get_action_meanings():\n env = FireResetEnv(env)\n env = ClipRewardEnv(env)\n env = gym.wrappers.ResizeObservation(env, (84, 84))\n env = gym.wrappers.GrayScaleObservation(env)\n env = gym.wrappers.FrameStack(env, 4)\n\n env.action_space.seed(seed)\n return env\n\n return thunk\n\n\nclass NoisyLinear(nn.Module):\n def __init__(self, in_features, out_features, std_init=0.5):\n super().__init__()\n self.in_features = in_features\n self.out_features = out_features\n self.std_init = std_init\n\n self.weight_mu = nn.Parameter(torch.FloatTensor(out_features, in_features))\n self.weight_sigma = nn.Parameter(torch.FloatTensor(out_features, in_features))\n self.register_buffer(\n ""weight_epsilon"", torch.FloatTensor(out_features, in_features)\n )\n self.bias_mu = nn.Parameter(torch.FloatTensor(out_features))\n self.bias_sigma = nn.Parameter(torch.FloatTensor(out_features))\n self.register_buffer(""bias_epsilon"", torch.FloatTensor(out_features))\n # factorized gaussian noise\n self.reset_parameters()\n self.reset_noise()\n\n def reset_parameters(self):\n mu_range = 1 / math.sqrt(self.in_features)\n self.weight_mu.data.uniform_(-mu_range, mu_range)\n self.weight_sigma.data.fill_(self.std_init / math.sqrt(self.in_features))\n self.bias_mu.data.uniform_(-mu_range, mu_range)\n self.bias_sigma.data.fill_(self.std_init / math.sqrt(self.out_features))\n\n def reset_noise(self):\n self.weight_epsilon.normal_()\n self.bias_epsilon.normal_()\n\n def forward(self, input):\n if self.training:\n weight = self.weight_mu + self.weight_sigma * self.weight_epsilon\n bias = self.bias_mu + self.bias_sigma * self.bias_epsilon\n else:\n weight = self.weight_mu\n bias = self.bias_mu\n return F.linear(input, weight, bias)\n\n\n# ALGO LOGIC: initialize agent here:\nclass NoisyDuelingDistributionalNetwork(nn.Module):\n def __init__(self, env, n_atoms, v_min, v_max):\n super().__init__()\n self.n_atoms = n_atoms\n self.v_min = v_min\n self.v_max = v_max\n self.delta_z = (v_max - v_min) / (n_atoms - 1)\n self.n_actions = env.single_action_space.n\n self.register_buffer(""support"", torch.linspace(v_min, v_max, n_atoms))\n\n self.network = nn.Sequential(\n nn.Conv2d(4, 32, 8, stride=4),\n nn.ReLU(),\n nn.Conv2d(32, 64, 4, stride=2),\n nn.ReLU(),\n nn.Conv2d(64, 64, 3, stride=1),\n nn.ReLU(),\n nn.Flatten(),\n )\n conv_output_size = 3136\n\n self.value_head = nn.Sequential(\n NoisyLinear(conv_output_size, 512), nn.ReLU(), NoisyLinear(512, n_atoms)\n )\n\n self.advantage_head = nn.Sequential(\n NoisyLinear(conv_output_size, 512),\n nn.ReLU(),\n NoisyLinear(512, n_atoms * self.n_actions),\n )\n\n def forward(self, x):\n h = self.network(x / 255.0)\n value = self.value_head(h).view(-1, 1, self.n_atoms)\n advantage = self.advantage_head(h).view(-1, self.n_actions, self.n_atoms)\n q_atoms = value + advantage - advantage.mean(dim=1, keepdim=True)\n q_dist = F.softmax(q_atoms, dim=2)\n return q_dist\n\n def reset_noise(self):\n for layer in self.value_head:\n if isinstance(layer, NoisyLinear):\n layer.reset_noise()\n for layer in self.advantage_head:\n if isinstance(layer, NoisyLinear):\n layer.reset_noise()\n\n\nPrioritizedBatch = collections.namedtuple(\n ""PrioritizedBatch"",\n [\n ""observations"",\n ""actions"",\n ""rewards"",\n ""next_observations"",\n ""dones"",\n ""indices"",\n ""weights"",\n ],\n)\n\n\n# adapted from: https://github.com/openai/baselines/blob/master/baselines/common/segment_tree.py\nclass SumSegmentTree:\n def __init__(self, capacity):\n self.capacity = capacity\n self.tree_size = 2 * capacity - 1\n self.tree = np.zeros(self.tree_size, dtype=np.float32)\n\n def _propagate(self, idx):\n parent = (idx - 1) // 2\n while parent >= 0:\n self.tree[parent] = self.tree[parent * 2 + 1] + self.tree[parent * 2 + 2]\n parent = (parent - 1) // 2\n\n def update(self, idx, value):\n tree_idx = idx + self.capacity - 1\n self.tree[tree_idx] = value\n self._propagate(tree_idx)\n\n def total(self):\n return self.tree[0]\n\n def retrieve(self, value):\n idx = 0\n while idx * 2 + 1 < self.tree_size:\n left = idx * 2 + 1\n right = left + 1\n if value <= self.tree[left]:\n idx = left\n else:\n value -= self.tree[left]\n idx = right\n return idx - (self.capacity - 1)\n\n\n# adapted from: https://github.com/openai/baselines/blob/master/baselines/common/segment_tree.py\nclass MinSegmentTree:\n def __init__(self, capacity):\n self.capacity = capacity\n self.tree_size = 2 * capacity - 1\n self.tree = np.full(self.tree_size, float(""inf""), dtype=np.float32)\n\n def _propagate(self, idx):\n parent = (idx - 1) // 2\n while parent >= 0:\n self.tree[parent] = np.minimum(\n self.tree[parent * 2 + 1], self.tree[parent * 2 + 2]\n )\n parent = (parent - 1) // 2\n\n def update(self, idx, value):\n tree_idx = idx + self.capacity - 1\n self.tree[tree_idx] = value\n self._propagate(tree_idx)\n\n def min(self):\n return self.tree[0]\n\n\nclass PrioritizedReplayBuffer:\n def __init__(\n self, capacity, obs_shape, device, n_step, gamma, alpha=0.6, beta=0.4, eps=1e-6\n ):\n self.capacity = capacity\n self.device = device\n self.n_step = n_step\n self.gamma = gamma\n self.alpha = alpha\n self.beta = beta\n self.eps = eps\n\n self.buffer_obs = np.zeros((capacity,) + obs_shape, dtype=np.uint8)\n self.buffer_next_obs = np.zeros((capacity,) + obs_shape, dtype=np.uint8)\n self.buffer_actions = np.zeros(capacity, dtype=np.int64)\n self.buffer_rewards = np.zeros(capacity, dtype=np.float32)\n self.buffer_dones = np.zeros(capacity, dtype=np.bool_)\n\n self.pos = 0\n self.size = 0\n self.max_priority = 1.0\n\n self.sum_tree = SumSegmentTree(capacity)\n self.min_tree = MinSegmentTree(capacity)\n\n # For n-step returns\n self.n_step_buffer = deque(maxlen=n_step)\n\n def _get_n_step_info(self):\n reward = 0.0\n next_obs = self.n_step_buffer[-1][3]\n done = self.n_step_buffer[-1][4]\n\n for i in range(len(self.n_step_buffer)):\n reward += self.gamma**i * self.n_step_buffer[i][2]\n if self.n_step_buffer[i][4]:\n next_obs = self.n_step_buffer[i][3]\n done = True\n break\n return reward, next_obs, done\n\n def add(self, obs, action, reward, next_obs, done):\n self.n_step_buffer.append((obs, action, reward, next_obs, done))\n\n if len(self.n_step_buffer) < self.n_step:\n return\n\n reward, next_obs, done = self._get_n_step_info()\n obs = self.n_step_buffer[0][0]\n action = self.n_step_buffer[0][1]\n\n idx = self.pos\n self.buffer_obs[idx] = obs\n self.buffer_next_obs[idx] = next_obs\n self.buffer_actions[idx] = action\n self.buffer_rewards[idx] = reward\n self.buffer_dones[idx] = done\n\n priority = self.max_priority**self.alpha\n self.sum_tree.update(idx, priority)\n self.min_tree.update(idx, priority)\n\n self.pos = (self.pos + 1) % self.capacity\n self.size = min(self.size + 1, self.capacity)\n\n if done:\n self.n_step_buffer.clear()\n\n def sample(self, batch_size):\n indices = []\n p_total = self.sum_tree.total()\n segment = p_total / batch_size\n\n for i in range(batch_size):\n a = segment * i\n b = segment * (i + 1)\n upperbound = np.random.uniform(a, b)\n idx = self.sum_tree.retrieve(upperbound)\n indices.append(idx)\n\n samples = {\n ""observations"": torch.from_numpy(self.buffer_obs[indices]).to(self.device),\n ""actions"": torch.from_numpy(self.buffer_actions[indices])\n .to(self.device)\n .unsqueeze(1),\n ""rewards"": torch.from_numpy(self.buffer_rewards[indices])\n .to(self.device)\n .unsqueeze(1),\n ""next_observations"": torch.from_numpy(self.buffer_next_obs[indices]).to(\n self.device\n ),\n ""dones"": torch.from_numpy(self.buffer_dones[indices])\n .to(self.device)\n .unsqueeze(1),\n }\n\n probs = np.array(\n [self.sum_tree.tree[idx + self.capacity - 1] for idx in indices]\n )\n weights = (self.size * probs / p_total) ** -self.beta\n weights = weights / weights.max()\n samples[""weights""] = torch.from_numpy(weights).to(self.device).unsqueeze(1)\n samples[""indices""] = indices\n\n return PrioritizedBatch(**samples)\n\n def update_priorities(self, indices, priorities):\n priorities = np.abs(priorities) + self.eps\n self.max_priority = max(self.max_priority, priorities.max())\n\n for idx, priority in zip(indices, priorities):\n priority = priority**self.alpha\n self.sum_tree.update(idx, priority)\n self.min_tree.update(idx, priority)\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n assert args.num_envs == 1, ""vectorized envs are not supported at the moment""\n run_name = f""{args.env_id}__{args.exp_name}__{args.seed}__{int(time.time())}""\n if args.track:\n import wandb\n\n wandb.init(\n project=args.wandb_project_name,\n entity=args.wandb_entity,\n sync_tensorboard=True,\n config=vars(args),\n name=run_name,\n monitor_gym=True,\n save_code=True,\n )\n writer = SummaryWriter(f""runs/{run_name}"")\n writer.add_text(\n ""hyperparameters"",\n ""|param|value|\n|-|-|\n%s""\n % (""\n"".join([f""|{key}|{value}|"" for key, value in vars(args).items()])),\n )\n\n # TRY NOT TO MODIFY: seeding\n random.seed(args.seed)\n np.random.seed(args.seed)\n torch.manual_seed(args.seed)\n torch.backends.cudnn.deterministic = args.torch_deterministic\n\n device = torch.device(""cuda"" if torch.cuda.is_available() and args.cuda else ""cpu"")\n\n # env setup\n envs = gym.vector.SyncVectorEnv(\n [\n make_env(args.env_id, args.seed + i, i, args.capture_video, run_name)\n for i in range(args.num_envs)\n ]\n )\n assert isinstance(\n envs.single_action_space, gym.spaces.Discrete\n ), ""only discrete action space is supported""\n\n q_network = NoisyDuelingDistributionalNetwork(\n envs, args.n_atoms, args.v_min, args.v_max\n ).to(device)\n optimizer = optim.Adam(q_network.parameters(), lr=args.learning_rate, eps=1.5e-4)\n target_network = NoisyDuelingDistributionalNetwork(\n envs, args.n_atoms, args.v_min, args.v_max\n ).to(device)\n target_network.load_state_dict(q_network.state_dict())\n\n rb = PrioritizedReplayBuffer(\n args.buffer_size,\n envs.single_observation_space.shape,\n device,\n args.n_step,\n args.gamma,\n args.prioritized_replay_alpha,\n args.prioritized_replay_beta,\n args.prioritized_replay_eps,\n )\n\n # dataset capture state\n split_targets = {\n ""train"": args.num_episodes_train,\n ""val"": args.num_episodes_val,\n ""test"": args.num_episodes_test,\n }\n # Determine splits to run (order: train -> val -> test)\n splits_in_order = [s for s in [""train"", ""val"", ""test""] if split_targets[s] > 0]\n\n episodes_captured_per_split: dict[str, int] = {\n s: 0 for s in [""train"", ""val"", ""test""]\n }\n file_idx_by_split: dict[str, int] = {s: 0 for s in [""train"", ""val"", ""test""]}\n episode_metadata_by_split: dict[str, list[dict]] = {\n s: [] for s in [""train"", ""val"", ""test""]\n }\n\n obs_chunks: list[np.ndarray] = []\n act_chunks: list[np.ndarray] = []\n\n current_split_idx = 0\n current_split = splits_in_order[0]\n split_dir = os.path.join(args.output_dir, current_split)\n if args.capture_dataset:\n os.makedirs(split_dir, exist_ok=True)\n\n start_time = time.time()\n\n # TRY NOT TO MODIFY: start the game\n obs, _ = envs.reset(seed=args.seed)\n observations_seq: list[np.ndarray] = []\n actions_seq: list[np.ndarray] = []\n for global_step in range(args.total_timesteps):\n # anneal PER beta to 1\n rb.beta = min(\n 1.0,\n args.prioritized_replay_beta\n + global_step * (1.0 - args.prioritized_replay_beta) / args.total_timesteps,\n )\n\n # ALGO LOGIC: put action logic here\n with torch.no_grad():\n q_dist = q_network(torch.Tensor(obs).to(device))\n q_values = torch.sum(q_dist * q_network.support, dim=2)\n actions = torch.argmax(q_values, dim=1).cpu().numpy()\n\n # TRY NOT TO MODIFY: execute the game and log data.\n next_obs, rewards, terminations, truncations, infos = envs.step(actions)\n\n if args.capture_dataset:\n observations_seq.append(next_obs.astype(np.uint8))\n actions_seq.append(actions.astype(np.int64))\n\n if ""final_info"" in infos:\n for info in infos[""final_info""]:\n if info and ""episode"" in info:\n print(\n f""global_step={global_step}, episodic_return={info['episode']['r']}""\n )\n writer.add_scalar(\n ""charts/episodic_return"", info[""episode""][""r""], global_step\n )\n writer.add_scalar(\n ""charts/episodic_length"", info[""episode""][""l""], global_step\n )\n\n continue_capturing_multi = any(\n episodes_captured_per_split[s] < split_targets[s]\n for s in splits_in_order\n )\n if args.capture_dataset and continue_capturing_multi:\n current_len = len(observations_seq)\n if current_len >= args.min_episode_length:\n frames = np.concatenate(observations_seq, axis=0).astype(\n np.uint8\n )\n acts = np.concatenate(actions_seq, axis=0).astype(np.int64)\n\n episode_obs_chunks = []\n episode_act_chunks = []\n start_idx = 0\n while start_idx < current_len:\n end_idx = min(start_idx + args.chunk_size, current_len)\n if end_idx - start_idx < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {current_len} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(frames[start_idx:end_idx])\n episode_act_chunks.append(acts[start_idx:end_idx])\n start_idx = end_idx\n\n obs_chunks_data = [\n seq.astype(np.uint8) for seq in episode_obs_chunks\n ]\n act_chunks_data = [act for act in episode_act_chunks]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n # Save to the active split\n ep_metadata, obs_chunks, next_file_idx, act_chunks = (\n save_chunks(\n obs_chunks,\n file_idx_by_split[current_split],\n args.chunks_per_file,\n split_dir,\n act_chunks,\n )\n )\n file_idx_by_split[current_split] = next_file_idx\n episode_metadata_by_split[current_split].extend(ep_metadata)\n\n episodes_captured_per_split[current_split] += 1\n\n if (\n episodes_captured_per_split[current_split]\n >= split_targets[current_split]\n ):\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks before switching split '"",\n {current_split},\n ""' for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n obs_chunks = []\n act_chunks = []\n if current_split_idx + 1 < len(splits_in_order):\n current_split_idx += 1\n current_split = splits_in_order[current_split_idx]\n split_dir = os.path.join(\n args.output_dir, current_split\n )\n os.makedirs(split_dir, exist_ok=True)\n else:\n print(\n f""Episode too short ({current_len}), skipping capture...""\n )\n\n observations_seq = []\n actions_seq = []\n\n # TRY NOT TO MODIFY: save data to reply buffer; handle `final_observation`\n real_next_obs = next_obs.copy()\n for idx, trunc in enumerate(truncations):\n if trunc:\n real_next_obs[idx] = infos[""final_observation""][idx]\n rb.add(obs, actions, rewards, real_next_obs, terminations)\n\n # TRY NOT TO MODIFY: CRUCIAL step easy to overlook\n obs = next_obs\n\n # ALGO LOGIC: training.\n if global_step > args.learning_starts:\n if global_step % args.train_frequency == 0:\n # reset the noise for both networks\n q_network.reset_noise()\n target_network.reset_noise()\n data = rb.sample(args.batch_size)\n\n with torch.no_grad():\n next_dist = target_network(\n data.next_observations\n ) # [B, num_actions, n_atoms]\n support = target_network.support # [n_atoms]\n next_q_values = torch.sum(\n next_dist * support, dim=2\n ) # [B, num_actions]\n\n # double q-learning\n next_dist_online = q_network(\n data.next_observations\n ) # [B, num_actions, n_atoms]\n next_q_online = torch.sum(\n next_dist_online * support, dim=2\n ) # [B, num_actions]\n best_actions = torch.argmax(next_q_online, dim=1) # [B]\n next_pmfs = next_dist[\n torch.arange(args.batch_size), best_actions\n ] # [B, n_atoms]\n\n # compute the n-step Bellman update.\n gamma_n = args.gamma**args.n_step\n next_atoms = data.rewards + gamma_n * support * (\n 1 - data.dones.float()\n )\n tz = next_atoms.clamp(q_network.v_min, q_network.v_max)\n\n # projection\n delta_z = q_network.delta_z\n b = (tz - q_network.v_min) / delta_z # shape: [B, n_atoms]\n l = b.floor().clamp(0, args.n_atoms - 1)\n u = b.ceil().clamp(0, args.n_atoms - 1)\n\n # (l == u).float() handles the case where bj is exactly an integer\n # example bj = 1, then the upper ceiling should be uj= 2, and lj= 1\n d_m_l = (\n u.float() + (l == b).float() - b\n ) * next_pmfs # [B, n_atoms]\n d_m_u = (b - l) * next_pmfs # [B, n_atoms]\n\n target_pmfs = torch.zeros_like(next_pmfs)\n for i in range(target_pmfs.size(0)):\n target_pmfs[i].index_add_(0, l[i].long(), d_m_l[i])\n target_pmfs[i].index_add_(0, u[i].long(), d_m_u[i])\n\n dist = q_network(data.observations) # [B, num_actions, n_atoms]\n pred_dist = dist.gather(\n 1, data.actions.unsqueeze(-1).expand(-1, -1, args.n_atoms)\n ).squeeze(1)\n log_pred = torch.log(pred_dist.clamp(min=1e-5, max=1 - 1e-5))\n\n loss_per_sample = -(target_pmfs * log_pred).sum(dim=1)\n loss = (loss_per_sample * data.weights.squeeze()).mean()\n\n # update priorities\n new_priorities = loss_per_sample.detach().cpu().numpy()\n rb.update_priorities(data.indices, new_priorities)\n\n if global_step % 100 == 0:\n writer.add_scalar(""losses/td_loss"", loss.item(), global_step)\n q_values = (pred_dist * q_network.support).sum(dim=1) # [B]\n writer.add_scalar(\n ""losses/q_values"", q_values.mean().item(), global_step\n )\n sps = int(global_step / (time.time() - start_time))\n print(""SPS:"", sps)\n writer.add_scalar(""charts/SPS"", sps, global_step)\n writer.add_scalar(""charts/beta"", rb.beta, global_step)\n\n # optimize the model\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n\n # update target network\n if global_step % args.target_network_frequency == 0:\n for target_param, param in zip(\n target_network.parameters(), q_network.parameters()\n ):\n target_param.data.copy_(\n args.tau * param.data + (1.0 - args.tau) * target_param.data\n )\n\n # optional early stop on dataset completion\n if args.capture_dataset and args.stop_on_complete:\n all_done = (\n all(\n episodes_captured_per_split[s] >= split_targets[s]\n for s in splits_in_order\n )\n and len(splits_in_order) > 0\n )\n if all_done:\n break\n\n envs.close()\n writer.close()\n\n # write metadata for dataset\n if args.capture_dataset:\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n os.makedirs(args.output_dir, exist_ok=True)\n metadata_path = os.path.join(args.output_dir, ""metadata.json"")\n if os.path.exists(metadata_path):\n try:\n with open(metadata_path, ""r"") as f:\n metadata = json.load(f)\n except Exception:\n metadata = {}\n else:\n metadata = {}\n\n metadata.setdefault(""env"", args.env_id)\n metadata.setdefault(""num_actions"", int(envs.single_action_space.n))\n for split in [""train"", ""val"", ""test""]:\n metadata.setdefault(f""num_episodes_{split}"", 0)\n metadata.setdefault(f""avg_episode_len_{split}"", 0.0)\n metadata.setdefault(f""episode_metadata_{split}"", [])\n\n for split_key in splits_in_order:\n ep_meta_list = episode_metadata_by_split[split_key]\n if ep_meta_list:\n metadata[f""episode_metadata_{split_key}""].extend(ep_meta_list)\n metadata[f""num_episodes_{split_key}""] = len(\n metadata[f""episode_metadata_{split_key}""]\n )\n metadata[f""avg_episode_len_{split_key}""] = float(\n np.mean(\n [\n ep[""avg_seq_len""]\n for ep in metadata[f""episode_metadata_{split_key}""]\n ]\n )\n )\n\n with open(metadata_path, ""w"") as f:\n json.dump(metadata, f)\n",python,content +353,1620752,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=64 --mem=100G",,terminal_command +354,1620801,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 29267\r\n",,terminal_output +355,1620903,"TERMINAL",0,0,"salloc: Nodes hai001 are ready for job\r\n",,terminal_output +356,1621296,"TERMINAL",0,0,"Running inside SLURM, Job ID 29267.\r\n",,terminal_output +357,1621377,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +358,1623693,"TERMINAL",0,0,"\r[franz.srambical@hai001.haicore.berlin:~/jafar] $ \r[franz.srambical@hai001.haicore.berlin:~/jafar] $ \r[franz.srambical@hai001.haicore.berlin:~/jafar] $ \r[franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +359,1626376,"TERMINAL",0,0,"s",,terminal_output +360,1626515,"TERMINAL",0,0,"q",,terminal_output +361,1626636,"TERMINAL",0,0,"u",,terminal_output +362,1626748,"TERMINAL",0,0,"eu",,terminal_output +363,1626905,"TERMINAL",0,0,"e",,terminal_output +364,1626979,"TERMINAL",0,0,"\r\n[?2004l\r JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 29267 franz.sram interacti 1 128 R 2025-09-20T17:20:41 2025-09-20T17:20:41 0:06 1-00:00:00 hai001\r\n 29266 xiao.liu interacti 1 64 R 2025-09-20T13:50:24 2025-09-20T13:50:24 3:30:23 23:59:00 hai004\r\n 29265 xiao.liu interacti 1 64 R 2025-09-20T13:48:52 2025-09-20T13:48:52 3:31:55 23:59:00 hai005\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +365,1670074,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_coinrun_reproduction\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n\npython generate_dataset_arr_records.py --num_episodes 10000 --output_dir /fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10k",shellscript,tab +366,1670616,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",20,0,"",shellscript,selection_command +367,1670870,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",21,0,"",shellscript,selection_command +368,1670892,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",39,0,"",shellscript,selection_command +369,1670921,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",67,0,"",shellscript,selection_command +370,1670959,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",91,0,"",shellscript,selection_command +371,1670989,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",117,0,"",shellscript,selection_command +372,1671021,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",138,0,"",shellscript,selection_command +373,1671056,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",232,0,"",shellscript,selection_command +374,1671115,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",325,0,"",shellscript,selection_command +375,1671139,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",381,0,"",shellscript,selection_command +376,1671153,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",399,0,"",shellscript,selection_command +377,1671194,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",450,0,"",shellscript,selection_command +378,1671220,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",451,0,"",shellscript,selection_command +379,1671256,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",452,0,"",shellscript,selection_command +380,1673399,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",459,0,"",shellscript,selection_command +381,1673572,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",487,0,"",shellscript,selection_command +382,1674978,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,0,"",shellscript,selection_mouse +383,1675136,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,2,"/f",shellscript,selection_mouse +384,1675143,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,6,"/fast/",shellscript,selection_mouse +385,1675162,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,13,"/fast/project",shellscript,selection_mouse +386,1675175,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,19,"/fast/project/HFMI_",shellscript,selection_mouse +387,1675180,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,70,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10k",shellscript,selection_mouse +388,1675422,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,32,"/fast/project/HFMI_SynergyUnit/j",shellscript,selection_mouse +389,1675423,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,38,"/fast/project/HFMI_SynergyUnit/jafar_w",shellscript,selection_mouse +390,1675437,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,44,"/fast/project/HFMI_SynergyUnit/jafar_ws/data",shellscript,selection_mouse +391,1675444,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,48,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coi",shellscript,selection_mouse +392,1675463,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,51,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinru",shellscript,selection_mouse +393,1675636,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,52,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun",shellscript,selection_mouse +394,1675658,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,53,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/",shellscript,selection_mouse +395,1675671,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,56,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/arr",shellscript,selection_mouse +396,1675678,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,60,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_r",shellscript,selection_mouse +397,1675695,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,63,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_reco",shellscript,selection_mouse +398,1675727,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,65,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_record",shellscript,selection_mouse +399,1675730,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,67,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_",shellscript,selection_mouse +400,1675761,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,68,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_1",shellscript,selection_mouse +401,1675931,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,69,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10",shellscript,selection_mouse +402,1675965,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",525,70,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10k",shellscript,selection_mouse +403,1676421,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",594,0,"",shellscript,selection_command +404,1676651,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",595,0,"",shellscript,selection_mouse +405,1676653,"slurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch",594,0,"",shellscript,selection_command +406,1678211,"TERMINAL",0,0,"c",,terminal_output +407,1678274,"TERMINAL",0,0,"d ",,terminal_output +408,1678791,"TERMINAL",0,0,"",,terminal_output +409,1681078,"TERMINAL",0,0,"c",,terminal_output +410,1681139,"TERMINAL",0,0,"d",,terminal_output +411,1681186,"TERMINAL",0,0," ",,terminal_output +412,1681450,"TERMINAL",0,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10k",,terminal_output +413,1681655,"TERMINAL",0,0,"\r/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10k\r\n[?2004l\r]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10k[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10k] $ ",,terminal_output +414,1682559,"TERMINAL",0,0,"cd ",,terminal_output +415,1682661,"TERMINAL",0,0,".",,terminal_output +416,1682831,"TERMINAL",0,0,".",,terminal_output +417,1683066,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun] $ ",,terminal_output +418,1683451,"TERMINAL",0,0,"cd ",,terminal_output +419,1683560,"TERMINAL",0,0,".",,terminal_output +420,1683694,"TERMINAL",0,0,".",,terminal_output +421,1683835,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data] $ ",,terminal_output +422,1684055,"TERMINAL",0,0,"l",,terminal_output +423,1684136,"TERMINAL",0,0,"s",,terminal_output +424,1684255,"TERMINAL",0,0,"\r\n[?2004l\rcoinrun pong tars\r\n]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data] $ ",,terminal_output +425,1686594,"TERMINAL",0,0,"cd ",,terminal_output +426,1686709,"TERMINAL",0,0,"p",,terminal_output +427,1686783,"TERMINAL",0,0,"o",,terminal_output +428,1686858,"TERMINAL",0,0,"n",,terminal_output +429,1686966,"TERMINAL",0,0,"g/",,terminal_output +430,1687076,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data/pong[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data/pong] $ ",,terminal_output +431,1687321,"TERMINAL",0,0,"ls",,terminal_output +432,1687455,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data/pong[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data/pong] $ ",,terminal_output +433,1688234,"TERMINAL",0,0,"c",,terminal_output +434,1688338,"TERMINAL",0,0," d",,terminal_output +435,1688427,"TERMINAL",0,0,".",,terminal_output +436,1688605,"TERMINAL",0,0,".",,terminal_output +437,1688780,"TERMINAL",0,0,"\r\n[?2004l\rbash: c: command not found\r\n]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data/pong[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data/pong] $ ",,terminal_output +438,1689354,"TERMINAL",0,0,"c",,terminal_output +439,1689455,"TERMINAL",0,0,"d ",,terminal_output +440,1689543,"TERMINAL",0,0,".",,terminal_output +441,1689715,"TERMINAL",0,0,".",,terminal_output +442,1689890,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data] $ ",,terminal_output +443,1690839,"TERMINAL",0,0,"l",,terminal_output +444,1690939,"TERMINAL",0,0,"s",,terminal_output +445,1691058,"TERMINAL",0,0,"\r\n[?2004l\rcoinrun pong tars\r\n]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data] $ ",,terminal_output +446,1713712,"slurm/jobs/franz/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_fp32_layernorm.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:4\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_fp32_layernorm_coinrun\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nDYNATYPE=""maskgit""\ntags=""dynamics coinrun 38M ${DYNATYPE}""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/train_tokenizer_1e-4_3414046""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/train_lam_model_size_scaling_38M_18742""\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=${DYNATYPE} \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --lam_checkpoint=""${lam_ckpt_dir}"" \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +447,1714490,"slurm/jobs/franz/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_fp32_layernorm.sh",0,0,"",shellscript,selection_command +448,1714855,"slurm/jobs/franz/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_fp32_layernorm.sh",20,0,"",shellscript,selection_command +449,1715109,"slurm/jobs/franz/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_fp32_layernorm.sh",21,0,"",shellscript,selection_command +450,1715143,"slurm/jobs/franz/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_fp32_layernorm.sh",39,0,"",shellscript,selection_command +451,1717127,"slurm/jobs/franz/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_fp32_layernorm.sh",156,0,"",shellscript,selection_command +452,1724099,"TERMINAL",0,0,"l",,terminal_output +453,1724218,"TERMINAL",0,0,"s",,terminal_output +454,1724277,"TERMINAL",0,0,"\r\n[?2004l\rcoinrun pong tars\r\n]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data] $ ",,terminal_output +455,1725765,"TERMINAL",0,0,"mk",,terminal_output +456,1725835,"TERMINAL",0,0,"d",,terminal_output +457,1726050,"TERMINAL",0,0,"ir ",,terminal_output +458,1726129,"TERMINAL",0,0,"b",,terminal_output +459,1726217,"TERMINAL",0,0,"r",,terminal_output +460,1726330,"TERMINAL",0,0,"e",,terminal_output +461,1727020,"TERMINAL",0,0,"a",,terminal_output +462,1727098,"TERMINAL",0,0,"k",,terminal_output +463,1727230,"TERMINAL",0,0,"o",,terminal_output +464,1727308,"TERMINAL",0,0,"u",,terminal_output +465,1727390,"TERMINAL",0,0,"t",,terminal_output +466,1727696,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data] $ ",,terminal_output +467,1728192,"TERMINAL",0,0,"c",,terminal_output +468,1728269,"TERMINAL",0,0,"d ",,terminal_output +469,1728674,"TERMINAL",0,0,"br",,terminal_output +470,1728778,"TERMINAL",0,0,"eak",,terminal_output +471,1728956,"TERMINAL",0,0,"out/",,terminal_output +472,1729072,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout] $ ",,terminal_output +473,1730399,"TERMINAL",0,0,"pw",,terminal_output +474,1730478,"TERMINAL",0,0,"d",,terminal_output +475,1730590,"TERMINAL",0,0,"\r\n[?2004l\r/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout\r\n]0;franz.srambical@hai-login2:/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout[?2004h[franz.srambical@hai001.haicore.berlin:/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout] $ ",,terminal_output +476,1734565,"TERMINAL",0,0,"c",,terminal_output +477,1734661,"TERMINAL",0,0,"d",,terminal_output +478,1734718,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:~[?2004h[franz.srambical@hai001.haicore.berlin:~] $ ",,terminal_output +479,1734867,"TERMINAL",0,0,"c",,terminal_output +480,1734953,"TERMINAL",0,0,"d",,terminal_output +481,1735012,"TERMINAL",0,0," ",,terminal_output +482,1735844,"TERMINAL",0,0,"ja",,terminal_output +483,1735902,"TERMINAL",0,0,"f",,terminal_output +484,1736473,"TERMINAL",0,0,"",,terminal_output +485,1736565,"TERMINAL",0,0,"",,terminal_output +486,1736670,"TERMINAL",0,0,"\r\n[?2004l\rbash: cd: ja: No such file or directory\r\n]0;franz.srambical@hai-login2:~[?2004h[franz.srambical@hai001.haicore.berlin:~] $ ",,terminal_output +487,1737999,"TERMINAL",0,0,"c",,terminal_output +488,1738088,"TERMINAL",0,0,"d ",,terminal_output +489,1738217,"TERMINAL",0,0,"j",,terminal_output +490,1738344,"TERMINAL",0,0,"af",,terminal_output +491,1738479,"TERMINAL",0,0,"a",,terminal_output +492,1738520,"TERMINAL",0,0,"r",,terminal_output +493,1738665,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +494,1747130,"TERMINAL",0,0,"l",,terminal_output +495,1747198,"TERMINAL",0,0,"s",,terminal_output +496,1747344,"TERMINAL",0,0,"\r\n[?2004l\r'$PWD' data generation_1754228185.703112.gif models test utils\r\n LICENSE data_arrayrecord generation_1754228581.3146486.gif requirements.txt tests wandb\r\n README.md experiments generation_1754233948.4598346.gif sample.py train_dynamics.py\r\n __pycache__ freeze.freeze genie.py slurm train_lam.py\r\n checkpoints generation_1754227585.831618.gif input_pipeline tensorboard train_tokenizer.py\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h[franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +497,1750300,"TERMINAL",0,0,"p",,terminal_output +498,1750385,"TERMINAL",0,0,"y",,terminal_output +499,1750492,"TERMINAL",0,0,"t",,terminal_output +500,1750679,"TERMINAL",0,0,"o",,terminal_output +501,1751390,"TERMINAL",0,0,"",,terminal_output +502,1752557,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +503,1752726,"TERMINAL",0,0,"s': lso': . ""/fast/home/franz.srambical/.cursor-server/bin/d750e54bba5cffada6d7b3d18e5688ba5e944ad0/out/vs/workbench/contrib/terminal/common/scripts/shellIntegration-bash.sh""",,terminal_output +504,1752772,"TERMINAL",0,0,"\ru': source /home/franz.srambical/cleanrl/.venv/bin/activate\r\n\r",,terminal_output +505,1752833,"TERMINAL",0,0,"[1@r': sour",,terminal_output +506,1753156,"TERMINAL",0,0,"[1@c': sourc[1@e': source",,terminal_output +507,1754829,"TERMINAL",0,0,"\r[22@[franz.srambical@hai001.haicore.berlin:~/jafar] $ source",,terminal_output +508,1755714,"TERMINAL",0,0,"\r",,terminal_output +509,1755958,"TERMINAL",0,0,"c",,terminal_output +510,1756094,"TERMINAL",0,0,"d ",,terminal_output +511,1756198,"TERMINAL",0,0,".",,terminal_output +512,1756352,"TERMINAL",0,0,".",,terminal_output +513,1756403,"TERMINAL",0,0,"/",,terminal_output +514,1757369,"TERMINAL",0,0,"c",,terminal_output +515,1757659,"TERMINAL",0,0,"l",,terminal_output +516,1757717,"TERMINAL",0,0,"e",,terminal_output +517,1757786,"TERMINAL",0,0,"a",,terminal_output +518,1757867,"TERMINAL",0,0,"n",,terminal_output +519,1758008,"TERMINAL",0,0,"rl/",,terminal_output +520,1758421,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:~/cleanrl[?2004h[franz.srambical@hai001.haicore.berlin:~/cleanrl] $ ",,terminal_output +521,1759169,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +522,1759307,"TERMINAL",0,0,"s': ls",,terminal_output +523,1759489,"TERMINAL",0,0,"o': . ""/fast/home/franz.srambical/.cursor-server/bin/d750e54bba5cffada6d7b3d18e5688ba5e944ad0/out/vs/workbench/contrib/terminal/common/scripts/shellIntegration-bash.sh""\ru': source /home/franz.srambical/cleanrl/.venv/bin/activate\r\n\r[1@r': sour",,terminal_output +524,1759683,"TERMINAL",0,0,"[1@c': sourc",,terminal_output +525,1759740,"TERMINAL",0,0,"[1@e': source",,terminal_output +526,1760100,"TERMINAL",0,0," ': source /home/franz.srambical/cleanrl/.venv/bin/activate\r",,terminal_output +527,1761261,"TERMINAL",0,0,"\r[franz.srambical@hai001.haicore.berlin:~/cleanrl] $ source /home/franz.srambical/cleanrl/.venv/bin/activate\r",,terminal_output +528,1761372,"TERMINAL",0,0,"\r",,terminal_output +529,1761808,"TERMINAL",0,0,"s",,terminal_output +530,1761870,"TERMINAL",0,0,"o",,terminal_output +531,1761919,"TERMINAL",0,0,"u",,terminal_output +532,1761988,"TERMINAL",0,0,"r",,terminal_output +533,1762193,"TERMINAL",0,0,"ce",,terminal_output +534,1762264,"TERMINAL",0,0," ",,terminal_output +535,1762599,"TERMINAL",0,0,".v",,terminal_output +536,1762690,"TERMINAL",0,0,"e",,terminal_output +537,1762817,"TERMINAL",0,0,"n",,terminal_output +538,1762990,"TERMINAL",0,0,"v/",,terminal_output +539,1763127,"TERMINAL",0,0,"b",,terminal_output +540,1763179,"TERMINAL",0,0,"in/",,terminal_output +541,1763318,"TERMINAL",0,0,"in",,terminal_output +542,1763399,"TERMINAL",0,0,"a",,terminal_output +543,1763482,"TERMINAL",0,0,"c",,terminal_output +544,1763713,"TERMINAL",0,0,"",,terminal_output +545,1763817,"TERMINAL",0,0,"\r\n[?2004l\rbash: .venv/bin/inac: No such file or directory\r\n]0;franz.srambical@hai-login2:~/cleanrl[?2004h[franz.srambical@hai001.haicore.berlin:~/cleanrl] $ ",,terminal_output +546,1764440,"TERMINAL",0,0,"source .venv/bin/inac",,terminal_output +547,1764719,"TERMINAL",0,0,"",,terminal_output +548,1764863,"TERMINAL",0,0,"",,terminal_output +549,1765029,"TERMINAL",0,0,"",,terminal_output +550,1765209,"TERMINAL",0,0,"",,terminal_output +551,1765386,"TERMINAL",0,0,"a",,terminal_output +552,1765433,"TERMINAL",0,0,"c",,terminal_output +553,1765582,"TERMINAL",0,0,"tivate",,terminal_output +554,1765662,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:~/cleanrl[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/cleanrl] $ ",,terminal_output +555,1766275,"TERMINAL",0,0,"cd ",,terminal_output +556,1766339,"TERMINAL",0,0,".",,terminal_output +557,1766529,"TERMINAL",0,0,"./",,terminal_output +558,1766649,"TERMINAL",0,0," ",,terminal_output +559,1767239,"TERMINAL",0,0,"",,terminal_output +560,1767440,"TERMINAL",0,0,"jaf",,terminal_output +561,1767597,"TERMINAL",0,0,"a",,terminal_output +562,1767749,"TERMINAL",0,0,"r/",,terminal_output +563,1767860,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +564,1768251,"TERMINAL",0,0,"p",,terminal_output +565,1768322,"TERMINAL",0,0,"y",,terminal_output +566,1768387,"TERMINAL",0,0,"t",,terminal_output +567,1768504,"TERMINAL",0,0,"o",,terminal_output +568,1768556,"TERMINAL",0,0,"n",,terminal_output +569,1768869,"TERMINAL",0,0,"",,terminal_output +570,1768999,"TERMINAL",0,0,"",,terminal_output +571,1769130,"TERMINAL",0,0,"h",,terminal_output +572,1769197,"TERMINAL",0,0,"o",,terminal_output +573,1769304,"TERMINAL",0,0,"n",,terminal_output +574,1769433,"TERMINAL",0,0," i",,terminal_output +575,1769487,"TERMINAL",0,0,"n",,terminal_output +576,1769573,"TERMINAL",0,0,"p",,terminal_output +577,1769676,"TERMINAL",0,0,"u",,terminal_output +578,1769769,"TERMINAL",0,0,"t_pipeline/",,terminal_output +579,1770918,"TERMINAL",0,0,"ge",,terminal_output +580,1770993,"TERMINAL",0,0,"n",,terminal_output +581,1771057,"TERMINAL",0,0,"e",,terminal_output +582,1771133,"TERMINAL",0,0,"r",,terminal_output +583,1771319,"TERMINAL",0,0,"ate_",,terminal_output +584,1772540,"TERMINAL",0,0,"da",,terminal_output +585,1772749,"TERMINAL",0,0,"",,terminal_output +586,1772897,"TERMINAL",0,0,"a",,terminal_output +587,1772969,"TERMINAL",0,0,"t",,terminal_output +588,1773075,"TERMINAL",0,0,"a",,terminal_output +589,1773180,"TERMINAL",0,0,"r",,terminal_output +590,1773359,"TERMINAL",0,0,"i_dataset.py ",,terminal_output +591,1776752,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +592,1777705,"input_pipeline/generate_atari_dataset.py",800,0,"",python,selection_command +593,1778434,"input_pipeline/generate_atari_dataset.py",2251,0,"",python,selection_command +594,1778868,"input_pipeline/generate_atari_dataset.py",3555,0,"",python,selection_command +595,1779665,"input_pipeline/generate_atari_dataset.py",3523,0,"",python,selection_command +596,1779919,"input_pipeline/generate_atari_dataset.py",3487,0,"",python,selection_command +597,1779999,"input_pipeline/generate_atari_dataset.py",3454,0,"",python,selection_command +598,1780005,"input_pipeline/generate_atari_dataset.py",3432,0,"",python,selection_command +599,1780033,"input_pipeline/generate_atari_dataset.py",3427,0,"",python,selection_command +600,1780039,"input_pipeline/generate_atari_dataset.py",3398,0,"",python,selection_command +601,1780073,"input_pipeline/generate_atari_dataset.py",3376,0,"",python,selection_command +602,1780111,"input_pipeline/generate_atari_dataset.py",3343,0,"",python,selection_command +603,1780144,"input_pipeline/generate_atari_dataset.py",3320,0,"",python,selection_command +604,1780245,"input_pipeline/generate_atari_dataset.py",3343,0,"",python,selection_command +605,1780522,"input_pipeline/generate_atari_dataset.py",3376,0,"",python,selection_command +606,1780529,"input_pipeline/generate_atari_dataset.py",3398,0,"",python,selection_command +607,1780561,"input_pipeline/generate_atari_dataset.py",3427,0,"",python,selection_command +608,1780588,"input_pipeline/generate_atari_dataset.py",3432,0,"",python,selection_command +609,1780750,"input_pipeline/generate_atari_dataset.py",3454,0,"",python,selection_command +610,1780930,"input_pipeline/generate_atari_dataset.py",3487,0,"",python,selection_command +611,1781257,"input_pipeline/generate_atari_dataset.py",3454,0,"",python,selection_command +612,1781761,"input_pipeline/generate_atari_dataset.py",3487,0,"",python,selection_command +613,1782196,"input_pipeline/generate_atari_dataset.py",3505,0,"",python,selection_command +614,1782450,"input_pipeline/generate_atari_dataset.py",3507,0,"",python,selection_command +615,1782456,"input_pipeline/generate_atari_dataset.py",3511,0,"",python,selection_command +616,1782788,"input_pipeline/generate_atari_dataset.py",3513,0,"",python,selection_command +617,1789678,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +618,1790484,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,selection_command +619,1791480,"input_pipeline/generate_coinrun_dataset.py",350,0,"",python,selection_keyboard +620,1791704,"input_pipeline/generate_coinrun_dataset.py",338,0,"",python,selection_command +621,1791957,"input_pipeline/generate_coinrun_dataset.py",350,0,"",python,selection_command +622,1792447,"input_pipeline/generate_coinrun_dataset.py",368,0,"",python,selection_command +623,1792602,"input_pipeline/generate_coinrun_dataset.py",370,0,"",python,selection_command +624,1792852,"input_pipeline/generate_coinrun_dataset.py",374,0,"",python,selection_command +625,1793167,"input_pipeline/generate_coinrun_dataset.py",376,0,"",python,selection_command +626,1793764,"input_pipeline/generate_coinrun_dataset.py",377,0,"",python,selection_command +627,1794607,"input_pipeline/generate_coinrun_dataset.py",378,0,"",python,selection_command +628,1795819,"input_pipeline/generate_coinrun_dataset.py",379,0,"",python,selection_command +629,1796007,"input_pipeline/generate_coinrun_dataset.py",380,0,"",python,selection_command +630,1797149,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +631,1798142,"input_pipeline/generate_atari_dataset.py",3514,0,"",python,selection_command +632,1798885,"input_pipeline/generate_atari_dataset.py",3515,0,"",python,selection_command +633,1799317,"input_pipeline/generate_atari_dataset.py",3516,0,"",python,selection_command +634,1800587,"input_pipeline/generate_atari_dataset.py",3515,0,"",python,selection_command +635,1800762,"input_pipeline/generate_atari_dataset.py",3514,0,"",python,selection_command +636,1800867,"input_pipeline/generate_atari_dataset.py",3513,0,"",python,selection_command +637,1801407,"input_pipeline/generate_atari_dataset.py",3514,0,"",python,selection_command +638,1802441,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +639,1803099,"input_pipeline/generate_coinrun_dataset.py",376,0,"",python,selection_command +640,1803319,"input_pipeline/generate_coinrun_dataset.py",377,0,"",python,selection_command +641,1804493,"input_pipeline/generate_coinrun_dataset.py",378,0,"",python,selection_command +642,1804862,"input_pipeline/generate_coinrun_dataset.py",379,0,"",python,selection_command +643,1806146,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +644,1807966,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +645,1808585,"input_pipeline/generate_coinrun_dataset.py",412,0,"",python,selection_command +646,1808771,"input_pipeline/generate_coinrun_dataset.py",445,0,"",python,selection_command +647,1809448,"input_pipeline/generate_coinrun_dataset.py",480,0,"",python,selection_command +648,1810158,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +649,1810672,"input_pipeline/generate_atari_dataset.py",3549,0,"",python,selection_command +650,1810809,"input_pipeline/generate_atari_dataset.py",3582,0,"",python,selection_command +651,1811011,"input_pipeline/generate_atari_dataset.py",3615,0,"",python,selection_command +652,1811166,"input_pipeline/generate_atari_dataset.py",3658,0,"",python,selection_command +653,1812447,"input_pipeline/generate_atari_dataset.py",3615,0,"",python,selection_command +654,1813412,"input_pipeline/generate_atari_dataset.py",3658,0,"",python,selection_command +655,1814313,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +656,1815537,"input_pipeline/generate_coinrun_dataset.py",526,0,"",python,selection_command +657,1816510,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +658,1818060,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +659,1819922,"input_pipeline/generate_coinrun_dataset.py",561,0,"",python,selection_command +660,1820062,"input_pipeline/generate_coinrun_dataset.py",587,0,"",python,selection_command +661,1820337,"input_pipeline/generate_coinrun_dataset.py",618,0,"",python,selection_command +662,1821037,"input_pipeline/generate_coinrun_dataset.py",636,0,"",python,selection_command +663,1821644,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +664,1826448,"TERMINAL",0,0,"-",,terminal_output +665,1826557,"TERMINAL",0,0,"-",,terminal_output +666,1826716,"TERMINAL",0,0,"o",,terminal_output +667,1826866,"TERMINAL",0,0,"ut",,terminal_output +668,1826942,"TERMINAL",0,0,"p",,terminal_output +669,1827042,"TERMINAL",0,0,"ut",,terminal_output +670,1827681,"TERMINAL",0,0,"-",,terminal_output +671,1827763,"TERMINAL",0,0,"d",,terminal_output +672,1827876,"TERMINAL",0,0,"i",,terminal_output +673,1827967,"TERMINAL",0,0,"r",,terminal_output +674,1828697,"TERMINAL",0,0,"=",,terminal_output +675,1829046,"TERMINAL",0,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout",,terminal_output +676,1829856,"TERMINAL",0,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout",,terminal_output +677,1830277,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +678,1836340,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/input_pipeline/generate_atari_dataset.py"", line 29, in \r\n from utils import save_chunks # type: ignore\r\n File ""/fast/home/franz.srambical/jafar/input_pipeline/utils.py"", line 4, in \r\n from array_record.python.array_record_module import ArrayRecordWriter\r\nModuleNotFoundError: No module named 'array_record'\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/input_pipeline/generate_atari_dataset.py"", line 31, in \r\n from input_pipeline.utils import save_chunks # type: ignore\r\nModuleNotFoundError: No module named 'input_pipeline'\r\n",,terminal_output +679,1836717,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +680,1841134,"TERMINAL",0,0,"uv",,terminal_output +681,1841213,"TERMINAL",0,0," ",,terminal_output +682,1841271,"TERMINAL",0,0,"p",,terminal_output +683,1841330,"TERMINAL",0,0,"i",,terminal_output +684,1841477,"TERMINAL",0,0,"p",,terminal_output +685,1841632,"TERMINAL",0,0," ",,terminal_output +686,1841790,"TERMINAL",0,0,"i",,terminal_output +687,1841889,"TERMINAL",0,0,"ns",,terminal_output +688,1841980,"TERMINAL",0,0,"t",,terminal_output +689,1842020,"TERMINAL",0,0,"a",,terminal_output +690,1842107,"TERMINAL",0,0,"l",,terminal_output +691,1842277,"TERMINAL",0,0," l",,terminal_output +692,1843142,"TERMINAL",0,0,"",,terminal_output +693,1843278,"TERMINAL",0,0,"",,terminal_output +694,1843571,"TERMINAL",0,0,"l",,terminal_output +695,1843685,"TERMINAL",0,0," ",,terminal_output +696,1843797,"TERMINAL",0,0,"a",,terminal_output +697,1843872,"TERMINAL",0,0,"r",,terminal_output +698,1844016,"TERMINAL",0,0,"r",,terminal_output +699,1844081,"TERMINAL",0,0,"a",,terminal_output +700,1844182,"TERMINAL",0,0,"y",,terminal_output +701,1852673,"TERMINAL",0,0,"-",,terminal_output +702,1852724,"TERMINAL",0,0,"r",,terminal_output +703,1852944,"TERMINAL",0,0,"ec",,terminal_output +704,1852948,"TERMINAL",0,0,"o",,terminal_output +705,1853082,"TERMINAL",0,0,"rd",,terminal_output +706,1853872,"input_pipeline/generate_atari_dataset.py",0,0,"",python,selection_command +707,1862404,"input_pipeline/generate_atari_dataset.py",628,0,"",python,selection_command +708,1862794,"input_pipeline/utils.py",0,0,"import os\nimport pickle\nimport numpy as np\nfrom array_record.python.array_record_module import ArrayRecordWriter\n\ndef save_chunks(chunks, file_idx, chunks_per_file, output_dir):\n os.makedirs(output_dir, exist_ok=True)\n\n metadata = []\n while len(chunks) >= chunks_per_file:\n chunk_batch = chunks[:chunks_per_file]\n chunks = chunks[chunks_per_file:]\n episode_path = os.path.join(output_dir, f""data_{file_idx:04d}.array_record"") \n writer = ArrayRecordWriter(str(episode_path), ""group_size:1"")\n seq_lens = []\n for chunk in chunk_batch:\n seq_len = chunk.shape[0]\n seq_lens.append(seq_len)\n chunk_record = {\n ""raw_video"": chunk.tobytes(),\n ""sequence_length"": seq_len,\n }\n writer.write(pickle.dumps(chunk_record))\n writer.close()\n file_idx += 1\n metadata.append({""path"": episode_path, ""num_chunks"": len(chunk_batch), ""avg_seq_len"": np.mean(seq_lens)})\n print(f""Created {episode_path} with {len(chunk_batch)} video chunks"")\n\n return metadata, chunks, file_idx\n\n",python,tab +709,1862794,"input_pipeline/utils.py",118,0,"",python,selection_command +710,1866381,"TERMINAL",0,0," ",,terminal_output +711,1866447,"TERMINAL",0,0,"n",,terminal_output +712,1866627,"TERMINAL",0,0,"u",,terminal_output +713,1866765,"TERMINAL",0,0,"m",,terminal_output +714,1866836,"TERMINAL",0,0,"p",,terminal_output +715,1866919,"TERMINAL",0,0,"y",,terminal_output +716,1867074,"TERMINAL",0,0," ",,terminal_output +717,1869982,"TERMINAL",0,0,"pi",,terminal_output +718,1870064,"TERMINAL",0,0,"c",,terminal_output +719,1870254,"TERMINAL",0,0,"klk",,terminal_output +720,1871668,"TERMINAL",0,0,"",,terminal_output +721,1872944,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +722,1873254,"TERMINAL",0,0,"Using Python 3.10.18 environment at: /fast/home/franz.srambical/cleanrl/.venv\r\n⠋ Resolving dependencies... \r⠙ Resolving dependencies... \r⠋ Resolving dependencies... \r⠙ Resolving dependencies... ",,terminal_output +723,1873517,"TERMINAL",0,0,"\r⠙ array-record==0.8.1 \r⠙ numpy==2.2.6 \r⠙ absl-py==2.3.1 \r⠹ absl-py==2.3.1 \r⠹ etils==1.13.0 \r⠹ etils==1.13.0 \r⠹ fsspec==2025.9.0 \r⠹ importlib-resources==6.5.2 \r⠹ typing-extensions==4.15.0 \r⠹ zipp==3.23.0 \r⠹  \rResolved 8 packages in 248ms\r\n░░░░░░░░░░░░░░░░░░░░ [0/0] Installing wheels... \r░░░░░░░░░░░░░░░░░░░░ [0/4] Installing wheels... ",,terminal_output +724,1873698,"TERMINAL",0,0,"\r░░░░░░░░░░░░░░░░░░░░ [0/4] zipp==3.23.0 \r█████░░░░░░░░░░░░░░░ [1/4] zipp==3.23.0 \r█████░░░░░░░░░░░░░░░ [1/4] array-record==0.8.1 \r██████████░░░░░░░░░░ [2/4] array-record==0.8.1 \r██████████░░░░░░░░░░ [2/4] importlib-resources==6.5.2 \r███████████████░░░░░ [3/4] importlib-resources==6.5.2 ",,terminal_output +725,1873905,"TERMINAL",0,0,"\r███████████████░░░░░ [3/4] etils==1.13.0 \r████████████████████ [4/4] etils==1.13.0 \rInstalled 4 packages in 384ms\r\n + array-record==0.8.1\r\n + etils==1.13.0\r\n + importlib-resources==6.5.2\r\n + zipp==3.23.0\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +726,1875216,"TERMINAL",0,0,"uv pip install array-record numpy ",,terminal_output +727,1875387,"TERMINAL",0,0,"python input_pipeline/generate_atari_dataset.py --output-dir=/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout",,terminal_output +728,1876019,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +729,1876687,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +730,1878937,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/input_pipeline/generate_atari_dataset.py"", line 449, in \r\n envs = gym.vector.SyncVectorEnv(\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/vector/sync_vector_env.py"", line 53, in __init__\r\n self.envs = [env_fn() for env_fn in env_fns]\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/vector/sync_vector_env.py"", line 53, in \r\n self.envs = [env_fn() for env_fn in env_fns]\r\n File ""/fast/home/franz.srambical/jafar/input_pipeline/generate_atari_dataset.py"", line 121, in thunk\r\n env = gym.make(env_id)\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 741, in make\r\n env_spec = _find_spec(id)\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 527, in _find_spec\r\n _check_version_exists(ns, name, version)\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 393, in _check_version_exists\r\n _check_name_exists(ns, name)\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 370, in _check_name_exists\r\n raise error.NameNotFound(\r\ngymnasium.error.NameNotFound: Environment `BreakoutNoFrameskip` doesn't exist.\r\n",,terminal_output +731,1879348,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +732,1886502,"input_pipeline/utils.py",0,0,"",python,tab +733,1886918,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +734,1888154,"input_pipeline/generate_atari_dataset.py",621,0,"",python,selection_command +735,1889667,"input_pipeline/generate_atari_dataset.py",1820,0,"",python,selection_command +736,1958367,"input_pipeline/generate_atari_dataset.py",1820,1,"B",python,selection_command +737,1958462,"input_pipeline/generate_atari_dataset.py",1820,19,"BreakoutNoFrameskip",python,selection_command +738,1958637,"input_pipeline/generate_atari_dataset.py",1820,20,"BreakoutNoFrameskip-",python,selection_command +739,1959034,"input_pipeline/generate_atari_dataset.py",1820,21,"BreakoutNoFrameskip-v",python,selection_command +740,1959205,"input_pipeline/generate_atari_dataset.py",1820,22,"BreakoutNoFrameskip-v4",python,selection_command +741,1959511,"input_pipeline/generate_atari_dataset.py",1820,22,"ALE/Breakout-v5",python,content +742,1959511,"input_pipeline/generate_atari_dataset.py",1835,0,"",python,selection_keyboard +743,1961095,"TERMINAL",0,0,"python input_pipeline/generate_atari_dataset.py --output-dir=/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout",,terminal_output +744,1961252,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +745,1964067,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/input_pipeline/generate_atari_dataset.py"", line 449, in \r\n envs = gym.vector.SyncVectorEnv(\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/vector/sync_vector_env.py"", line 53, in __init__\r\n self.envs = [env_fn() for env_fn in env_fns]\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/vector/sync_vector_env.py"", line 53, in \r\n self.envs = [env_fn() for env_fn in env_fns]\r\n File ""/fast/home/franz.srambical/jafar/input_pipeline/generate_atari_dataset.py"", line 121, in thunk\r\n env = gym.make(env_id)\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 741, in make\r\n env_spec = _find_spec(id)\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 527, in _find_spec\r\n _check_version_exists(ns, name, version)\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 393, in _check_version_exists\r\n _check_name_exists(ns, name)\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 356, in _check_name_exists\r\n _check_namespace_exists(ns)\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 350, in _check_namespace_exists\r\n raise error.NamespaceNotFound(f""Namespace {ns} not found. {suggestion_msg}"")\r\ngymnasium.error.NamespaceNotFound: Namespace ALE not found. Have you installed the proper package for ALE?\r\n",,terminal_output +746,1964477,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +747,1970349,"TERMINAL",0,0,"uv",,terminal_output +748,1970428,"TERMINAL",0,0," ",,terminal_output +749,1970572,"TERMINAL",0,0,"pi",,terminal_output +750,1970664,"TERMINAL",0,0,"p",,terminal_output +751,1970827,"TERMINAL",0,0," ",,terminal_output +752,1971112,"TERMINAL",0,0,"insta",,terminal_output +753,1971276,"TERMINAL",0,0,"l",,terminal_output +754,1971432,"TERMINAL",0,0,"l",,terminal_output +755,1971487,"TERMINAL",0,0," ",,terminal_output +756,1995995,"TERMINAL",0,0,".",,terminal_output +757,1996249,"TERMINAL",0,0,"",,terminal_output +758,1996466,"TERMINAL",0,0,"""",,terminal_output +759,1996861,"TERMINAL",0,0,"""",,terminal_output +760,1997121,"TERMINAL",0,0,"",,terminal_output +761,1997309,"TERMINAL",0,0,".""",,terminal_output +762,1997576,"TERMINAL",0,0,"[""",,terminal_output +763,1997864,"TERMINAL",0,0,"]""",,terminal_output +764,1998152,"TERMINAL",0,0,"",,terminal_output +765,1998350,"TERMINAL",0,0,"a]""",,terminal_output +766,1998414,"TERMINAL",0,0,"t]""",,terminal_output +767,1998515,"TERMINAL",0,0,"a]""",,terminal_output +768,1998592,"TERMINAL",0,0,"r]""",,terminal_output +769,1998692,"TERMINAL",0,0,"i]""",,terminal_output +770,2000885,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +771,2001120,"TERMINAL",0,0,"Using Python 3.10.18 environment at: /fast/home/franz.srambical/cleanrl/.venv\r\n⠋ Resolving dependencies... \r⠙ Resolving dependencies... \rerror: /fast/home/franz.srambical/jafar does not appear to be a Python project, as neither `pyproject.toml` nor `setup.py` are present in the directory\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +772,2006159,"TERMINAL",0,0,"cd ",,terminal_output +773,2006255,"TERMINAL",0,0,".",,terminal_output +774,2006439,"TERMINAL",0,0,"./",,terminal_output +775,2007270,"TERMINAL",0,0,"c",,terminal_output +776,2007409,"TERMINAL",0,0,"le",,terminal_output +777,2007476,"TERMINAL",0,0,"a",,terminal_output +778,2007581,"TERMINAL",0,0,"n",,terminal_output +779,2007687,"TERMINAL",0,0,"rl/",,terminal_output +780,2008015,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:~/cleanrl[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/cleanrl] $ ",,terminal_output +781,2008353,"TERMINAL",0,0,"cd ../cleanrl/",,terminal_output +782,2008462,"TERMINAL",0,0,"uv pip install "".[atari]""",,terminal_output +783,2008698,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +784,2008902,"TERMINAL",0,0,"⠋ Resolving dependencies... \r⠙ Resolving dependencies... \r⠋ Resolving dependencies... \r⠙ Resolving dependencies... \r⠋ Resolving dependencies... \r⠙ Resolving dependencies... \r⠙ cleanrl==2.0.0b1 \r⠙ cleanrl==2.0.0b1 ",,terminal_output +785,2009064,"TERMINAL",0,0,"\r⠙ gym==0.23.1 \r⠙ gymnasium==0.29.1 \r⠙ torch==2.4.1 \r⠙ ale-py==0.8.1 ",,terminal_output +786,2009110,"TERMINAL",0,0,"\r⠹ ale-py==0.8.1 ",,terminal_output +787,2009305,"TERMINAL",0,0,"\r⠹ nvidia-cuda-nvrtc-cu12==12.1.105 \r⠹ nvidia-cuda-nvrtc-cu12==12.1.105 \r⠹ nvidia-cuda-runtime-cu12==12.1.105 \r⠹ nvidia-cuda-runtime-cu12==12.1.105 \r⠹ nvidia-cuda-cupti-cu12==12.1.105 \r⠹ nvidia-cuda-cupti-cu12==12.1.105 \r⠹ nvidia-cudnn-cu12==9.1.0.70 \r⠹ nvidia-cudnn-cu12==9.1.0.70 \r⠹ nvidia-cublas-cu12==12.1.3.1 \r⠹ nvidia-cublas-cu12==12.1.3.1 \r⠹ nvidia-cufft-cu12==11.0.2.54 \r⠹ nvidia-cufft-cu12==11.0.2.54 \r⠹ nvidia-curand-cu12==10.3.2.106 \r⠹ nvidia-curand-cu12==10.3.2.106 \r⠹ nvidia-cusolver-cu12==11.4.5.107 \r⠹ nvidia-cusolver-cu12==11.4.5.107 \r⠹ nvidia-cusparse-cu12==12.1.0.106 \r⠹ nvidia-cusparse-cu12==12.1.0.106 \r⠸ autorom==0.4.2 ",,terminal_output +788,2009525,"TERMINAL",0,0,"\r⠼ autorom==0.4.2 \r⠼ autorom==0.4.2 \r⠼ opencv-python==4.12.0.88 \r⠼ shimmy==2.0.0 ",,terminal_output +789,2009604,"TERMINAL",0,0,"\r⠼ pydantic-core==2.33.2 \r⠼ pyyaml==6.0.2 ",,terminal_output +790,2009709,"TERMINAL",0,0,"\r⠴ autorom-accept-rom-license==0.6.1 ",,terminal_output +791,2009915,"TERMINAL",0,0,"\r⠦ autorom-accept-rom-license==0.6.1 ",,terminal_output +792,2010105,"TERMINAL",0,0,"\r⠧ autorom-accept-rom-license==0.6.1 ",,terminal_output +793,2010310,"TERMINAL",0,0,"\r⠇ autorom-accept-rom-license==0.6.1 ",,terminal_output +794,2010500,"TERMINAL",0,0,"\r⠋ autorom-accept-rom-license==0.6.1 ",,terminal_output +795,2010703,"TERMINAL",0,0,"\r⠙ autorom-accept-rom-license==0.6.1 ",,terminal_output +796,2010909,"TERMINAL",0,0,"\r⠹ autorom-accept-rom-license==0.6.1 ",,terminal_output +797,2011115,"TERMINAL",0,0,"\r⠸ autorom-accept-rom-license==0.6.1 ",,terminal_output +798,2011310,"TERMINAL",0,0,"\r⠼ autorom-accept-rom-license==0.6.1 ",,terminal_output +799,2011515,"TERMINAL",0,0,"\r⠴ autorom-accept-rom-license==0.6.1 ",,terminal_output +800,2011717,"TERMINAL",0,0,"\r⠦ autorom-accept-rom-license==0.6.1 ",,terminal_output +801,2011915,"TERMINAL",0,0,"\r⠧ autorom-accept-rom-license==0.6.1 ",,terminal_output +802,2012105,"TERMINAL",0,0,"\r⠇ autorom-accept-rom-license==0.6.1 ",,terminal_output +803,2012310,"TERMINAL",0,0,"\r⠋ autorom-accept-rom-license==0.6.1 ",,terminal_output +804,2012514,"TERMINAL",0,0,"\r⠙ autorom-accept-rom-license==0.6.1 ",,terminal_output +805,2012719,"TERMINAL",0,0,"\r⠹ autorom-accept-rom-license==0.6.1 ",,terminal_output +806,2012905,"TERMINAL",0,0,"\r⠸ autorom-accept-rom-license==0.6.1 ",,terminal_output +807,2013397,"TERMINAL",0,0,"\r⠼ autorom-accept-rom-license==0.6.1 \r⠼ mpmath==1.3.0 \r⠼ markupsafe==3.0.2 \r⠼ gitdb==4.0.12 \r⠼ annotated-types==0.7.0 \r⠼ typing-inspection==0.4.1 \r⠼ charset-normalizer==3.4.3 \r⠼ idna==3.10 \r⠼ urllib3==2.5.0 \r⠼ certifi==2025.8.3 \r⠼ smmap==5.0.2 \r⠼  \rResolved 81 packages in 4.28s\r\n⠋ Preparing packages... (0/0) \r⠋ Preparing packages... (0/5) \r⠙ Preparing packages... (0/5) \r Building autorom-accept-rom-license==0.6.1\r\n⠙ Preparing packages... (0/5) \r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5) \r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 0 B/16.12 KiB \r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB \r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB \r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nale-py  ------------------------------ 0 B/1.58 MiB \r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nale-py  ------------------------------ 16.00 KiB/1.58 MiB \r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nale-py  ------------------------------ 16.00 KiB/1.58 MiB \r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nale-py  ------------------------------ 16.00 KiB/1.58 MiB \r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nshimmy  ------------------------------ 0 B/36.72 KiB\r\nale-py  ------------------------------ 16.00 KiB/1.58 MiB \r\r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nshimmy  ------------------------------ 16.00 KiB/36.72 KiB\r\nale-py  ------------------------------ 16.00 KiB/1.58 MiB \r\r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nshimmy  ------------------------------ 32.00 KiB/36.72 KiB\r\nale-py  ------------------------------ 16.00 KiB/1.58 MiB \r\r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nshimmy  ------------------------------ 32.00 KiB/36.72 KiB\r\nale-py  ------------------------------ 32.00 KiB/1.58 MiB \r\r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nshimmy  ------------------------------ 32.00 KiB/36.72 KiB\r\nale-py  ------------------------------ 48.00 KiB/1.58 MiB \r\r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nshimmy  ------------------------------ 32.00 KiB/36.72 KiB\r\nale-py  ------------------------------ 62.71 KiB/1.58 MiB \r\r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nshimmy  ------------------------------ 32.00 KiB/36.72 KiB\r\nale-py  ------------------------------ 78.71 KiB/1.58 MiB \r\r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nshimmy  ------------------------------ 32.00 KiB/36.72 KiB\r\nale-py  ------------------------------ 94.71 KiB/1.58 MiB \r\r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nautorom  ------------------------------ 16.00 KiB/16.12 KiB\r\nshimmy  ------------------------------ 32.00 KiB/36.72 KiB\r\nale-py  ------------------------------ 110.71 KiB/1.58 MiB \r\r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nshimmy  ------------------------------ 32.00 KiB/36.72 KiB\r\nale-py  ------------------------------ 558.71 KiB/1.58 MiB \r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nshimmy  ------------------------------ 36.72 KiB/36.72 KiB\r\nale-py  ------------------------------ 558.71 KiB/1.58 MiB \r\r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nale-py  ------------------------------ 1.00 MiB/1.58 MiB \r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5)\r\nale-py  ------------------------------ 1.02 MiB/1.58 MiB \r\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (0/5) \r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠹ Preparing packages... (2/5) ",,terminal_output +808,2013605,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠸ Preparing packages... (3/5) ",,terminal_output +809,2013795,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠼ Preparing packages... (3/5) ",,terminal_output +810,2013992,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠴ Preparing packages... (3/5) ",,terminal_output +811,2014206,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠦ Preparing packages... (3/5) ",,terminal_output +812,2014385,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Building cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠧ Preparing packages... (3/5) ",,terminal_output +813,2014444,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Built cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠧ Preparing packages... (3/5) ",,terminal_output +814,2014600,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Built cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠇ Preparing packages... (4/5) ",,terminal_output +815,2014815,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Built cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠋ Preparing packages... (4/5) ",,terminal_output +816,2015004,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Built cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠙ Preparing packages... (4/5) ",,terminal_output +817,2015190,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Built cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠹ Preparing packages... (4/5) ",,terminal_output +818,2015402,"TERMINAL",0,0,"\r\r\r Building autorom-accept-rom-license==0.6.1\r\n Built cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠸ Preparing packages... (4/5) \r\r\r Built autorom-accept-rom-license==0.6.1\r\n Built cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠸ Preparing packages... (4/5) ",,terminal_output +819,2015500,"TERMINAL",0,0,"\r\r Built cleanrl @ file:///fast/home/franz.srambical/cleanrl\r\n⠸  (5/5) \rPrepared 5 packages in 2.30s\r\n",,terminal_output +820,2015817,"TERMINAL",0,0,"Uninstalled 1 package in 229ms\r\n░░░░░░░░░░░░░░░░░░░░ [0/0] Installing wheels... \r░░░░░░░░░░░░░░░░░░░░ [0/5] Installing wheels... \r░░░░░░░░░░░░░░░░░░░░ [0/5] shimmy==1.3.0 \r████░░░░░░░░░░░░░░░░ [1/5] shimmy==1.3.0 \r████░░░░░░░░░░░░░░░░ [1/5] autorom==0.4.2 \r████████░░░░░░░░░░░░ [2/5] autorom==0.4.2 \r████████░░░░░░░░░░░░ [2/5] ale-py==0.8.1 \r████████████░░░░░░░░ [3/5] ale-py==0.8.1 \r████████████░░░░░░░░ [3/5] cleanrl==2.0.0b1 (from file:///fast/home/franz.srambical/cleanrl) \r████████████████░░░░ [4/5] cleanrl==2.0.0b1 (from file:///fast/home/franz.srambical/cleanrl) \r████████████████░░░░ [4/5] autorom-accept-rom-license==0.6.1 \r████████████████████ [5/5] autorom-accept-rom-license==0.6.1 \rInstalled 5 packages in 85ms\r\n + ale-py==0.8.1\r\n + autorom==0.4.2\r\n + autorom-accept-rom-license==0.6.1\r\n ~ cleanrl==2.0.0b1 (from file:///fast/home/franz.srambical/cleanrl)\r\n + shimmy==1.3.0\r\n",,terminal_output +821,2017013,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/cleanrl[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/cleanrl] $ ",,terminal_output +822,2017796,"TERMINAL",0,0,"c",,terminal_output +823,2017905,"TERMINAL",0,0,"d .",,terminal_output +824,2018063,"TERMINAL",0,0,".",,terminal_output +825,2018119,"TERMINAL",0,0,"/",,terminal_output +826,2019139,"TERMINAL",0,0,"ja",,terminal_output +827,2019228,"TERMINAL",0,0,"f",,terminal_output +828,2019285,"TERMINAL",0,0,"a",,terminal_output +829,2019507,"TERMINAL",0,0,"r/",,terminal_output +830,2019849,"TERMINAL",0,0,"\r\n[?2004l\r]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +831,2020114,"TERMINAL",0,0,"cd ../jafar/",,terminal_output +832,2020250,"TERMINAL",0,0,"uv pip install "".[atari]""",,terminal_output +833,2020930,"TERMINAL",0,0,"cd ../cleanrl/",,terminal_output +834,2021515,"TERMINAL",0,0,"uv pip install "".[atari]""",,terminal_output +835,2022140,"TERMINAL",0,0,"python input_pipeline/generate_atari_dataset.py --output-dir=/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout",,terminal_output +836,2022674,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +837,2023762,"TERMINAL",0,0,"Gym has been unmaintained since 2022 and does not support NumPy 2.0 amongst other critical functionality.\r\nPlease upgrade to Gymnasium, the maintained drop-in replacement of Gym, or contact the authors of your software and request that they upgrade.\r\nSee the migration guide at https://gymnasium.farama.org/introduction/migration_guide/ for additional information.\r\n",,terminal_output +838,2023902,"TERMINAL",0,0,"/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/pygame/pkgdata.py:25: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\r\n from pkg_resources import resource_stream, resource_exists\r\n",,terminal_output +839,2026480,"TERMINAL",0,0,"A.L.E: Arcade Learning Environment (version 0.8.1+53f58b7)\r\n[Powered by Stella]\r\n",,terminal_output +840,2030202,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/input_pipeline/generate_atari_dataset.py:355: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.)\r\n self.buffer_actions[idx] = action\r\n",,terminal_output +841,2030365,"TERMINAL",0,0,"global_step=38, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/input_pipeline/generate_atari_dataset.py"", line 581, in \r\n save_chunks(\r\nTypeError: save_chunks() takes 4 positional arguments but 5 were given\r\n",,terminal_output +842,2031335,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +843,2050170,"input_pipeline/generate_atari_dataset.py",20611,0,"",python,selection_command +844,2052536,"input_pipeline/utils.py",0,0,"",python,tab +845,2052536,"input_pipeline/utils.py",118,0,"",python,selection_command +846,2070927,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +847,2070962,"input_pipeline/generate_atari_dataset.py",1801,0,"",python,selection_command +848,2073757,"input_pipeline/generate_atari_dataset.py",1824,0,"",python,selection_mouse +849,2080211,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +850,2081463,"TERMINAL",0,0,"gi",,terminal_output +851,2081608,"TERMINAL",0,0,"t ",,terminal_output +852,2081712,"TERMINAL",0,0,"s",,terminal_output +853,2081872,"TERMINAL",0,0,"ta",,terminal_output +854,2081952,"TERMINAL",0,0,"s",,terminal_output +855,2082073,"TERMINAL",0,0,"h",,terminal_output +856,2082257,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +857,2082434,"TERMINAL",0,0,"Saved working directory and index state WIP on generate-atari-dataset: 35e26ae feat: trajectory collection during rainbow training\r\n",,terminal_output +858,2082524,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +859,2093646,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +860,2095117,"input_pipeline/generate_atari_dataset.py",0,0,"",python,tab +861,2097921,"input_pipeline/generate_atari_dataset.py",1801,36," env_id: str = ""BreakoutNoFrameskip-v4""\n",python,content +862,2100795,"input_pipeline/generate_atari_dataset.py",0,0,"Switched from branch 'generate-atari-dataset' to 'gt-actions'",python,git_branch_checkout +863,2102322,"slurm/jobs/franz/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_fp32_layernorm.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:4\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=train_dynamics_fp32_layernorm_coinrun\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nDYNATYPE=""maskgit""\ntags=""dynamics coinrun 38M ${DYNATYPE}""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/train_tokenizer_1e-4_3414046""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/train_lam_model_size_scaling_38M_18742""\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=${DYNATYPE} \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --lam_checkpoint=""${lam_ckpt_dir}"" \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +864,2108780,"TERMINAL",0,0,"g",,terminal_output +865,2108880,"TERMINAL",0,0,"i",,terminal_output +866,2108978,"TERMINAL",0,0,"t ",,terminal_output +867,2109087,"TERMINAL",0,0,"c",,terminal_output +868,2109209,"TERMINAL",0,0,"h",,terminal_output +869,2109270,"TERMINAL",0,0,"e",,terminal_output +870,2109337,"TERMINAL",0,0,"r",,terminal_output +871,2109466,"TERMINAL",0,0,"r",,terminal_output +872,2109522,"TERMINAL",0,0,"y",,terminal_output +873,2109768,"TERMINAL",0,0,"-",,terminal_output +874,2109907,"TERMINAL",0,0,"p",,terminal_output +875,2109958,"TERMINAL",0,0,"i",,terminal_output +876,2110026,"TERMINAL",0,0,"c",,terminal_output +877,2110163,"TERMINAL",0,0,"k",,terminal_output +878,2110446,"TERMINAL",0,0," ",,terminal_output +879,2134635,"TERMINAL",0,0,"35e26ae50d5ac2a837e0a9670b257ca956b0ad48",,terminal_output +880,2134779,"TERMINAL",0,0,"35e26ae50d5ac2a837e0a9670b257ca956b0ad48\r\n[?2004l\r",,terminal_output +881,2134952,"TERMINAL",0,0,"[gt-actions 26beff7] feat: trajectory collection during rainbow training\r\n Date: Sat Sep 20 17:07:07 2025 +0200\r\n 1 file changed, 782 insertions(+)\r\n create mode 100644 input_pipeline/generate_atari_dataset.py\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +882,2136727,"TERMINAL",0,0,"g",,terminal_output +883,2136793,"TERMINAL",0,0,"i",,terminal_output +884,2136927,"TERMINAL",0,0,"t ",,terminal_output +885,2137032,"TERMINAL",0,0,"s",,terminal_output +886,2137166,"TERMINAL",0,0,"ta",,terminal_output +887,2138016,"TERMINAL",0,0,"sh",,terminal_output +888,2138112,"TERMINAL",0,0," ",,terminal_output +889,2138189,"TERMINAL",0,0,"p",,terminal_output +890,2138279,"TERMINAL",0,0,"o",,terminal_output +891,2138353,"TERMINAL",0,0,"p",,terminal_output +892,2138722,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +893,2138883,"TERMINAL",0,0,"On branch gt-actions\r\nYour branch is ahead of 'origin/gt-actions' by 1 commit.\r\n (use ""git push"" to publish your local commits)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: .gitignore\r\n\tmodified: input_pipeline/generate_atari_dataset.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tfreeze.freeze\r\n\truns/\r\n\tslurm/\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\nDropped refs/stash@{0} (104962de4bc3c792be982238cf873448472a503e)\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +894,2140677,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +895,2140829,"TERMINAL",0,0,"p': git stash pop",,terminal_output +896,2140927,"TERMINAL",0,0,"y': python input_pipeline/generate_atari_dataset.py --output-dir=/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout",,terminal_output +897,2140997,"TERMINAL",0,0,"\r[1@t': python input_pipeline/generate_atari_dataset.py\r",,terminal_output +898,2141074,"TERMINAL",0,0,"[1@h': pyth",,terminal_output +899,2141140,"TERMINAL",0,0,"[1@o': pytho",,terminal_output +900,2141241,"TERMINAL",0,0,"[1@n': python",,terminal_output +901,2141382,"TERMINAL",0,0,"[1@ ': python ",,terminal_output +902,2141868,"TERMINAL",0,0,"\rcleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ python input_pipeline/generate_atari_dataset.py --output-dir=/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout\r\n\r\r\n[?2004l\r",,terminal_output +903,2142879,"TERMINAL",0,0,"Gym has been unmaintained since 2022 and does not support NumPy 2.0 amongst other critical functionality.\r\nPlease upgrade to Gymnasium, the maintained drop-in replacement of Gym, or contact the authors of your software and request that they upgrade.\r\nSee the migration guide at https://gymnasium.farama.org/introduction/migration_guide/ for additional information.\r\n/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/pygame/pkgdata.py:25: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\r\n from pkg_resources import resource_stream, resource_exists\r\n^CTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/input_pipeline/generate_atari_dataset.py"", line 10, in \r\n import gymnasium as gym\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/__init__.py"", line 12, in \r\n from gymnasium.envs.registration import (\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/__init__.py"", line 387, in \r\n load_plugin_envs()\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 594, in load_plugin_envs\r\n fn()\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 304, in register_gymnasium_envs\r\n _register_atari_envs()\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 244, in _register_atari_envs\r\n _register_atari_configs(\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 191, in _register_atari_configs\r\n register(\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 691, in register\r\n _check_spec_register(new_spec)\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 447, in _check_spec_register\r\n unversioned_spec = next(\r\n File ""/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 452, in \r\n and env_spec.name == testing_spec.name\r\nKeyboardInterrupt\r\n\r\n]0;franz.srambical@hai-login2:~/jafar[?2004h(cleanrl) [franz.srambical@hai001.haicore.berlin:~/jafar] $ ",,terminal_output +904,2143486,"TERMINAL",0,0,"python input_pipeline/generate_atari_dataset.py --output-dir=/fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout",,terminal_output +905,2143716,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +906,2144368,"TERMINAL",0,0,"Gym has been unmaintained since 2022 and does not support NumPy 2.0 amongst other critical functionality.\r\nPlease upgrade to Gymnasium, the maintained drop-in replacement of Gym, or contact the authors of your software and request that they upgrade.\r\nSee the migration guide at https://gymnasium.farama.org/introduction/migration_guide/ for additional information.\r\n",,terminal_output +907,2144419,"TERMINAL",0,0,"/fast/home/franz.srambical/cleanrl/.venv/lib/python3.10/site-packages/pygame/pkgdata.py:25: UserWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html. The pkg_resources package is slated for removal as early as 2025-11-30. Refrain from using this package or pin to Setuptools<81.\r\n from pkg_resources import resource_stream, resource_exists\r\n",,terminal_output +908,2146928,"TERMINAL",0,0,"A.L.E: Arcade Learning Environment (version 0.8.1+53f58b7)\r\n[Powered by Stella]\r\n",,terminal_output +909,2148882,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/input_pipeline/generate_atari_dataset.py:355: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.)\r\n self.buffer_actions[idx] = action\r\n",,terminal_output +910,2149041,"TERMINAL",0,0,"global_step=38, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +911,2149387,"TERMINAL",0,0,"global_step=132, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +912,2149664,"TERMINAL",0,0,"global_step=205, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +913,2149846,"TERMINAL",0,0,"global_step=251, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +914,2150131,"TERMINAL",0,0,"global_step=330, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +915,2150372,"TERMINAL",0,0,"global_step=391, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +916,2150494,"TERMINAL",0,0,"global_step=423, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +917,2150803,"TERMINAL",0,0,"global_step=502, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +918,2151206,"TERMINAL",0,0,"global_step=581, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +919,2151366,"TERMINAL",0,0,"global_step=660, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +920,2151722,"TERMINAL",0,0,"global_step=754, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +921,2151924,"TERMINAL",0,0,"global_step=808, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +922,2152139,"TERMINAL",0,0,"global_step=862, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +923,2152347,"TERMINAL",0,0,"global_step=916, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +924,2152605,"TERMINAL",0,0,"global_step=981, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +925,2152814,"TERMINAL",0,0,"global_step=1027, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +926,2153009,"TERMINAL",0,0,"global_step=1086, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +927,2153325,"TERMINAL",0,0,"global_step=1172, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +928,2153976,"TERMINAL",0,0,"global_step=1266, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=1352, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +929,2154195,"TERMINAL",0,0,"global_step=1406, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +930,2154451,"TERMINAL",0,0,"global_step=1479, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +931,2155016,"TERMINAL",0,0,"global_step=1573, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=1627, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +932,2155316,"TERMINAL",0,0,"global_step=1713, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +933,2155506,"TERMINAL",0,0,"global_step=1759, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +934,2155758,"TERMINAL",0,0,"global_step=1831, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +935,2156102,"TERMINAL",0,0,"global_step=1917, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +936,2156393,"TERMINAL",0,0,"global_step=2003, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +937,2156732,"TERMINAL",0,0,"global_step=2089, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +938,2157058,"TERMINAL",0,0,"global_step=2183, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +939,2157396,"TERMINAL",0,0,"global_step=2277, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +940,2157568,"TERMINAL",0,0,"global_step=2316, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +941,2157909,"TERMINAL",0,0,"global_step=2410, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +942,2158135,"TERMINAL",0,0,"global_step=2471, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +943,2158346,"TERMINAL",0,0,"global_step=2524, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +944,2158555,"TERMINAL",0,0,"global_step=2578, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +945,2158709,"TERMINAL",0,0,"global_step=2617, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +946,2158887,"TERMINAL",0,0,"global_step=2663, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +947,2159163,"TERMINAL",0,0,"global_step=2742, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +948,2159408,"TERMINAL",0,0,"global_step=2796, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +949,2159737,"TERMINAL",0,0,"global_step=2890, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +950,2160036,"TERMINAL",0,0,"global_step=2969, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +951,2160300,"TERMINAL",0,0,"global_step=3039, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +952,2160525,"TERMINAL",0,0,"global_step=3085, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +953,2160702,"TERMINAL",0,0,"global_step=3139, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +954,2160919,"TERMINAL",0,0,"global_step=3200, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +955,2161266,"TERMINAL",0,0,"global_step=3294, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +956,2161497,"TERMINAL",0,0,"global_step=3355, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +957,2161749,"TERMINAL",0,0,"global_step=3409, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +958,2161880,"TERMINAL",0,0,"global_step=3448, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +959,2162217,"TERMINAL",0,0,"global_step=3542, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +960,2162405,"TERMINAL",0,0,"global_step=3588, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +961,2162575,"TERMINAL",0,0,"global_step=3634, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +962,2162813,"TERMINAL",0,0,"global_step=3699, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +963,2163039,"TERMINAL",0,0,"global_step=3757, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 58 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +964,2163215,"TERMINAL",0,0,"global_step=3803, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +965,2163415,"TERMINAL",0,0,"global_step=3842, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +966,2163635,"TERMINAL",0,0,"global_step=3907, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +967,2163876,"TERMINAL",0,0,"global_step=3966, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +968,2164091,"TERMINAL",0,0,"global_step=4020, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +969,2164293,"TERMINAL",0,0,"global_step=4074, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +970,2164506,"TERMINAL",0,0,"global_step=4127, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +971,2164682,"TERMINAL",0,0,"global_step=4173, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +972,2164875,"TERMINAL",0,0,"global_step=4219, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +973,2165099,"TERMINAL",0,0,"global_step=4278, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +974,2165374,"TERMINAL",0,0,"global_step=4349, episodic_return=[12.]\r\nWarning: Inconsistent chunk_sizes. Episode has 71 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +975,2165565,"TERMINAL",0,0,"global_step=4395, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +976,2165762,"TERMINAL",0,0,"global_step=4448, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +977,2165902,"TERMINAL",0,0,"global_step=4480, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +978,2166138,"TERMINAL",0,0,"global_step=4541, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +979,2166376,"TERMINAL",0,0,"global_step=4600, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +980,2166725,"TERMINAL",0,0,"global_step=4694, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +981,2166917,"TERMINAL",0,0,"global_step=4740, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +982,2167095,"TERMINAL",0,0,"global_step=4786, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +983,2167293,"TERMINAL",0,0,"global_step=4840, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +984,2167637,"TERMINAL",0,0,"global_step=4901, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +985,2167989,"TERMINAL",0,0,"global_step=4955, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +986,2167989,"TERMINAL",0,0,"global_step=5009, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +987,2168121,"TERMINAL",0,0,"global_step=5048, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +988,2168360,"TERMINAL",0,0,"global_step=5102, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +989,2168527,"TERMINAL",0,0,"global_step=5148, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +990,2168708,"TERMINAL",0,0,"global_step=5194, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +991,2169048,"TERMINAL",0,0,"global_step=5288, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +992,2169217,"TERMINAL",0,0,"global_step=5334, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +993,2169465,"TERMINAL",0,0,"global_step=5395, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +994,2169980,"TERMINAL",0,0,"global_step=5489, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +995,2170028,"TERMINAL",0,0,"global_step=5543, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +996,2170328,"TERMINAL",0,0,"global_step=5629, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +997,2170598,"TERMINAL",0,0,"global_step=5700, episodic_return=[12.]\r\nWarning: Inconsistent chunk_sizes. Episode has 71 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +998,2170841,"TERMINAL",0,0,"global_step=5765, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +999,2171118,"TERMINAL",0,0,"global_step=5835, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1000,2171409,"TERMINAL",0,0,"global_step=5921, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1001,2171749,"TERMINAL",0,0,"global_step=6015, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1002,2172092,"TERMINAL",0,0,"global_step=6101, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1003,2172265,"TERMINAL",0,0,"global_step=6147, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1004,2172570,"TERMINAL",0,0,"global_step=6233, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1005,2173057,"TERMINAL",0,0,"global_step=6327, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1006,2173105,"TERMINAL",0,0,"global_step=6380, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1007,2173349,"TERMINAL",0,0,"global_step=6439, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1008,2173495,"TERMINAL",0,0,"Created /fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout/train/data_0000.array_record with 100 video chunks\r\n",,terminal_output +1009,2173678,"TERMINAL",0,0,"global_step=6493, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1010,2173909,"TERMINAL",0,0,"global_step=6554, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1011,2174113,"TERMINAL",0,0,"global_step=6608, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1012,2174374,"TERMINAL",0,0,"global_step=6680, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1013,2174712,"TERMINAL",0,0,"global_step=6766, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1014,2174887,"TERMINAL",0,0,"global_step=6812, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1015,2175090,"TERMINAL",0,0,"global_step=6866, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1016,2175316,"TERMINAL",0,0,"global_step=6920, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1017,2175564,"TERMINAL",0,0,"global_step=6992, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1018,2175859,"TERMINAL",0,0,"global_step=7064, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1019,2176478,"TERMINAL",0,0,"global_step=7150, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=7215, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1020,2176636,"TERMINAL",0,0,"global_step=7268, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1021,2176808,"TERMINAL",0,0,"global_step=7314, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1022,2177037,"TERMINAL",0,0,"global_step=7368, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1023,2177358,"TERMINAL",0,0,"global_step=7454, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1024,2177550,"TERMINAL",0,0,"global_step=7508, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1025,2177876,"TERMINAL",0,0,"global_step=7581, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1026,2178150,"TERMINAL",0,0,"global_step=7667, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1027,2178474,"TERMINAL",0,0,"global_step=7761, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1028,2178819,"TERMINAL",0,0,"global_step=7855, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1029,2179163,"TERMINAL",0,0,"global_step=7941, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1030,2179467,"TERMINAL",0,0,"global_step=8035, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1031,2179670,"TERMINAL",0,0,"global_step=8081, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1032,2179873,"TERMINAL",0,0,"global_step=8135, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1033,2180128,"TERMINAL",0,0,"global_step=8205, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1034,2180414,"TERMINAL",0,0,"global_step=8284, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1035,2180664,"TERMINAL",0,0,"global_step=8354, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1036,2180936,"TERMINAL",0,0,"global_step=8413, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1037,2181470,"TERMINAL",0,0,"global_step=8499, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1038,2181588,"TERMINAL",0,0,"global_step=8545, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1039,2181876,"TERMINAL",0,0,"global_step=8631, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1040,2181923,"TERMINAL",0,0,"global_step=8685, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1041,2182284,"TERMINAL",0,0,"global_step=8764, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1042,2182507,"TERMINAL",0,0,"global_step=8843, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1043,2182888,"TERMINAL",0,0,"global_step=8929, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1044,2183020,"TERMINAL",0,0,"global_step=8975, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1045,2183243,"TERMINAL",0,0,"global_step=9034, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1046,2183469,"TERMINAL",0,0,"global_step=9088, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1047,2183680,"TERMINAL",0,0,"global_step=9142, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1048,2183966,"TERMINAL",0,0,"global_step=9221, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1049,2184175,"TERMINAL",0,0,"global_step=9274, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1050,2184477,"TERMINAL",0,0,"global_step=9360, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1051,2184698,"TERMINAL",0,0,"global_step=9406, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1052,2184992,"TERMINAL",0,0,"global_step=9492, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1053,2185270,"TERMINAL",0,0,"global_step=9538, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1054,2185591,"TERMINAL",0,0,"global_step=9592, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=9645, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1055,2185847,"TERMINAL",0,0,"global_step=9717, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1056,2186064,"TERMINAL",0,0,"global_step=9771, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1057,2186276,"TERMINAL",0,0,"global_step=9825, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1058,2186617,"TERMINAL",0,0,"global_step=9911, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1059,2186879,"TERMINAL",0,0,"global_step=9990, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1060,2187073,"TERMINAL",0,0,"global_step=10044, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1061,2187319,"TERMINAL",0,0,"global_step=10105, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1062,2187567,"TERMINAL",0,0,"global_step=10164, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1063,2187824,"TERMINAL",0,0,"global_step=10236, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1064,2188163,"TERMINAL",0,0,"global_step=10330, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1065,2188516,"TERMINAL",0,0,"global_step=10424, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1066,2188707,"TERMINAL",0,0,"global_step=10477, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1067,2188908,"TERMINAL",0,0,"global_step=10531, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1068,2189099,"TERMINAL",0,0,"global_step=10577, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1069,2189287,"TERMINAL",0,0,"global_step=10623, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1070,2189602,"TERMINAL",0,0,"global_step=10709, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1071,2189837,"TERMINAL",0,0,"global_step=10768, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1072,2190127,"TERMINAL",0,0,"global_step=10847, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1073,2190418,"TERMINAL",0,0,"global_step=10926, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1074,2190683,"TERMINAL",0,0,"global_step=11005, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1075,2190850,"TERMINAL",0,0,"global_step=11044, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1076,2191098,"TERMINAL",0,0,"global_step=11103, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1077,2191263,"TERMINAL",0,0,"global_step=11149, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1078,2191577,"TERMINAL",0,0,"global_step=11235, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1079,2191872,"TERMINAL",0,0,"global_step=11314, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1080,2192208,"TERMINAL",0,0,"global_step=11408, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1081,2192444,"TERMINAL",0,0,"global_step=11462, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1082,2193029,"TERMINAL",0,0,"global_step=11556, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=11609, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1083,2193289,"TERMINAL",0,0,"global_step=11688, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1084,2193563,"TERMINAL",0,0,"global_step=11774, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1085,2193872,"TERMINAL",0,0,"global_step=11852, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 78 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1086,2194163,"TERMINAL",0,0,"global_step=11938, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1087,2194437,"TERMINAL",0,0,"global_step=12011, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1088,2194649,"TERMINAL",0,0,"global_step=12065, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1089,2194897,"TERMINAL",0,0,"global_step=12124, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1090,2195124,"TERMINAL",0,0,"global_step=12190, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 66 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1091,2195305,"TERMINAL",0,0,"global_step=12222, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1092,2195595,"TERMINAL",0,0,"global_step=12308, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1093,2195892,"TERMINAL",0,0,"global_step=12394, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1094,2196141,"TERMINAL",0,0,"global_step=12448, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1095,2196336,"TERMINAL",0,0,"global_step=12501, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1096,2196623,"TERMINAL",0,0,"global_step=12580, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1097,2196817,"TERMINAL",0,0,"global_step=12633, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1098,2197067,"TERMINAL",0,0,"global_step=12694, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1099,2197312,"TERMINAL",0,0,"global_step=12764, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1100,2197501,"TERMINAL",0,0,"global_step=12810, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1101,2197742,"TERMINAL",0,0,"global_step=12871, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1102,2197997,"TERMINAL",0,0,"global_step=12944, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1103,2198232,"TERMINAL",0,0,"global_step=12998, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1104,2198388,"TERMINAL",0,0,"global_step=13044, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1105,2198592,"TERMINAL",0,0,"global_step=13090, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1106,2198692,"TERMINAL",0,0,"Created /fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout/train/data_0001.array_record with 100 video chunks\r\n",,terminal_output +1107,2198949,"TERMINAL",0,0,"global_step=13149, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1108,2199148,"TERMINAL",0,0,"global_step=13203, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1109,2199332,"TERMINAL",0,0,"global_step=13257, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1110,2199497,"TERMINAL",0,0,"global_step=13296, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1111,2199655,"TERMINAL",0,0,"global_step=13335, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1112,2199867,"TERMINAL",0,0,"global_step=13381, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1113,2200048,"TERMINAL",0,0,"global_step=13435, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1114,2200253,"TERMINAL",0,0,"global_step=13489, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1115,2200465,"TERMINAL",0,0,"global_step=13535, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1116,2200662,"TERMINAL",0,0,"global_step=13589, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1117,2200893,"TERMINAL",0,0,"global_step=13650, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1118,2201079,"TERMINAL",0,0,"global_step=13696, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1119,2201399,"TERMINAL",0,0,"global_step=13782, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1120,2201675,"TERMINAL",0,0,"global_step=13854, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1121,2201994,"TERMINAL",0,0,"global_step=13940, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1122,2202192,"TERMINAL",0,0,"global_step=13994, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1123,2202409,"TERMINAL",0,0,"global_step=14053, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1124,2202643,"TERMINAL",0,0,"global_step=14106, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1125,2202792,"TERMINAL",0,0,"global_step=14145, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1126,2203107,"TERMINAL",0,0,"global_step=14231, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1127,2203365,"TERMINAL",0,0,"global_step=14285, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1128,2203506,"TERMINAL",0,0,"global_step=14331, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1129,2203950,"TERMINAL",0,0,"global_step=14425, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1130,2204134,"TERMINAL",0,0,"global_step=14504, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1131,2204405,"TERMINAL",0,0,"global_step=14574, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1132,2204589,"TERMINAL",0,0,"global_step=14620, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1133,2204755,"TERMINAL",0,0,"global_step=14659, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1134,2204956,"TERMINAL",0,0,"global_step=14713, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1135,2205137,"TERMINAL",0,0,"global_step=14759, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1136,2205470,"TERMINAL",0,0,"global_step=14845, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1137,2205659,"TERMINAL",0,0,"global_step=14899, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1138,2205878,"TERMINAL",0,0,"global_step=14952, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1139,2206069,"TERMINAL",0,0,"global_step=15005, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1140,2206424,"TERMINAL",0,0,"global_step=15099, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1141,2206554,"TERMINAL",0,0,"global_step=15131, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1142,2206759,"TERMINAL",0,0,"global_step=15185, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1143,2207099,"TERMINAL",0,0,"global_step=15279, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1144,2207449,"TERMINAL",0,0,"global_step=15373, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1145,2207624,"TERMINAL",0,0,"global_step=15419, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1146,2207814,"TERMINAL",0,0,"global_step=15465, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1147,2208354,"TERMINAL",0,0,"global_step=15551, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1148,2208437,"TERMINAL",0,0,"global_step=15637, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1149,2208624,"TERMINAL",0,0,"global_step=15683, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1150,2208819,"TERMINAL",0,0,"global_step=15729, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1151,2209124,"TERMINAL",0,0,"global_step=15815, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1152,2209453,"TERMINAL",0,0,"global_step=15909, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1153,2209888,"TERMINAL",0,0,"global_step=16003, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1154,2209949,"TERMINAL",0,0,"global_step=16042, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1155,2210149,"TERMINAL",0,0,"global_step=16088, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1156,2210339,"TERMINAL",0,0,"global_step=16134, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1157,2210599,"TERMINAL",0,0,"global_step=16204, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1158,2210819,"TERMINAL",0,0,"global_step=16257, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1159,2211467,"TERMINAL",0,0,"global_step=16336, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=16382, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=16428, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1160,2211774,"TERMINAL",0,0,"global_step=16507, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1161,2211984,"TERMINAL",0,0,"global_step=16553, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1162,2212222,"TERMINAL",0,0,"global_step=16625, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1163,2212489,"TERMINAL",0,0,"global_step=16695, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1164,2212817,"TERMINAL",0,0,"global_step=16789, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1165,2212970,"TERMINAL",0,0,"global_step=16821, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1166,2213575,"TERMINAL",0,0,"global_step=16907, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=16966, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1167,2213720,"TERMINAL",0,0,"global_step=17020, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1168,2213885,"TERMINAL",0,0,"global_step=17059, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1169,2214029,"TERMINAL",0,0,"global_step=17098, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1170,2214269,"TERMINAL",0,0,"global_step=17157, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1171,2214554,"TERMINAL",0,0,"global_step=17236, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1172,2214760,"TERMINAL",0,0,"global_step=17282, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1173,2214944,"TERMINAL",0,0,"global_step=17336, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1174,2215201,"TERMINAL",0,0,"global_step=17395, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1175,2215487,"TERMINAL",0,0,"global_step=17481, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1176,2215684,"TERMINAL",0,0,"global_step=17535, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1177,2216004,"TERMINAL",0,0,"global_step=17621, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1178,2216210,"TERMINAL",0,0,"global_step=17674, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1179,2216415,"TERMINAL",0,0,"global_step=17720, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1180,2216544,"TERMINAL",0,0,"global_step=17752, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1181,2216844,"TERMINAL",0,0,"global_step=17831, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1182,2217044,"TERMINAL",0,0,"global_step=17877, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1183,2217248,"TERMINAL",0,0,"global_step=17936, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1184,2217460,"TERMINAL",0,0,"global_step=17989, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1185,2217759,"TERMINAL",0,0,"global_step=18068, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1186,2217948,"TERMINAL",0,0,"global_step=18114, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1187,2218269,"TERMINAL",0,0,"global_step=18200, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1188,2218485,"TERMINAL",0,0,"global_step=18259, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1189,2218714,"TERMINAL",0,0,"global_step=18313, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1190,2218909,"TERMINAL",0,0,"global_step=18366, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1191,2219239,"TERMINAL",0,0,"global_step=18452, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1192,2219583,"TERMINAL",0,0,"global_step=18546, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1193,2219804,"TERMINAL",0,0,"global_step=18605, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1194,2220112,"TERMINAL",0,0,"global_step=18691, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1195,2220324,"TERMINAL",0,0,"global_step=18744, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1196,2220605,"TERMINAL",0,0,"global_step=18823, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1197,2220855,"TERMINAL",0,0,"global_step=18877, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1198,2221012,"TERMINAL",0,0,"global_step=18923, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1199,2221409,"TERMINAL",0,0,"global_step=19017, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1200,2221659,"TERMINAL",0,0,"global_step=19096, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1201,2221977,"TERMINAL",0,0,"global_step=19182, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1202,2222179,"TERMINAL",0,0,"global_step=19236, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1203,2222374,"TERMINAL",0,0,"global_step=19275, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1204,2222443,"TERMINAL",0,0,"Created /fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout/train/data_0002.array_record with 100 video chunks\r\n",,terminal_output +1205,2222654,"TERMINAL",0,0,"global_step=19329, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1206,2222869,"TERMINAL",0,0,"global_step=19375, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1207,2223035,"TERMINAL",0,0,"global_step=19429, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1208,2223370,"TERMINAL",0,0,"global_step=19515, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1209,2223700,"TERMINAL",0,0,"global_step=19609, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1210,2224023,"TERMINAL",0,0,"global_step=19703, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1211,2224334,"TERMINAL",0,0,"global_step=19789, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1212,2224699,"TERMINAL",0,0,"global_step=19883, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1213,2224914,"TERMINAL",0,0,"global_step=19936, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1214,2225082,"TERMINAL",0,0,"global_step=19982, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1215,2225289,"TERMINAL",0,0,"global_step=20036, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1216,2225594,"TERMINAL",0,0,"global_step=20122, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1217,2225864,"TERMINAL",0,0,"global_step=20168, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1218,2226074,"TERMINAL",0,0,"global_step=20247, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1219,2226317,"TERMINAL",0,0,"global_step=20317, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1220,2226519,"TERMINAL",0,0,"global_step=20371, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1221,2226675,"TERMINAL",0,0,"global_step=20410, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1222,2226884,"TERMINAL",0,0,"global_step=20464, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1223,2227189,"TERMINAL",0,0,"global_step=20543, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1224,2227364,"TERMINAL",0,0,"global_step=20589, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1225,2227726,"TERMINAL",0,0,"global_step=20675, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1226,2227911,"TERMINAL",0,0,"global_step=20733, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 58 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1227,2228234,"TERMINAL",0,0,"global_step=20819, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1228,2228469,"TERMINAL",0,0,"global_step=20884, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1229,2228767,"TERMINAL",0,0,"global_step=20970, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1230,2229114,"TERMINAL",0,0,"global_step=21064, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1231,2229340,"TERMINAL",0,0,"global_step=21123, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1232,2229649,"TERMINAL",0,0,"global_step=21209, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1233,2229847,"TERMINAL",0,0,"global_step=21255, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1234,2230052,"TERMINAL",0,0,"global_step=21309, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1235,2230187,"TERMINAL",0,0,"global_step=21341, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1236,2230501,"TERMINAL",0,0,"global_step=21427, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1237,2230694,"TERMINAL",0,0,"global_step=21473, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1238,2230964,"TERMINAL",0,0,"global_step=21546, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1239,2231129,"TERMINAL",0,0,"global_step=21585, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1240,2231332,"TERMINAL",0,0,"global_step=21631, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1241,2231633,"TERMINAL",0,0,"global_step=21717, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1242,2231884,"TERMINAL",0,0,"global_step=21789, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1243,2232168,"TERMINAL",0,0,"global_step=21868, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1244,2232379,"TERMINAL",0,0,"global_step=21914, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1245,2232687,"TERMINAL",0,0,"global_step=22000, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1246,2232959,"TERMINAL",0,0,"global_step=22078, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 78 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1247,2233164,"TERMINAL",0,0,"global_step=22124, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1248,2233464,"TERMINAL",0,0,"global_step=22210, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1249,2233720,"TERMINAL",0,0,"global_step=22273, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 63 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1250,2234044,"TERMINAL",0,0,"global_step=22327, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1251,2234111,"TERMINAL",0,0,"global_step=22373, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1252,2234414,"TERMINAL",0,0,"global_step=22459, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1253,2234729,"TERMINAL",0,0,"global_step=22545, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1254,2235417,"TERMINAL",0,0,"global_step=22631, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=22709, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 78 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1255,2235594,"TERMINAL",0,0,"global_step=22788, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1256,2235929,"TERMINAL",0,0,"global_step=22874, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1257,2236154,"TERMINAL",0,0,"global_step=22933, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1258,2236350,"TERMINAL",0,0,"global_step=22979, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1259,2236634,"TERMINAL",0,0,"global_step=23058, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1260,2236961,"TERMINAL",0,0,"global_step=23144, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1261,2237225,"TERMINAL",0,0,"global_step=23216, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1262,2237574,"TERMINAL",0,0,"global_step=23310, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1263,2237867,"TERMINAL",0,0,"global_step=23389, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1264,2238024,"TERMINAL",0,0,"global_step=23428, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1265,2238249,"TERMINAL",0,0,"global_step=23482, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1266,2238459,"TERMINAL",0,0,"global_step=23536, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1267,2238914,"TERMINAL",0,0,"global_step=23630, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1268,2239110,"TERMINAL",0,0,"global_step=23716, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1269,2239298,"TERMINAL",0,0,"global_step=23762, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1270,2239564,"TERMINAL",0,0,"global_step=23834, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1271,2239804,"TERMINAL",0,0,"global_step=23892, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 58 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1272,2240120,"TERMINAL",0,0,"global_step=23986, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1273,2240309,"TERMINAL",0,0,"global_step=24032, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1274,2240554,"TERMINAL",0,0,"global_step=24093, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1275,2240755,"TERMINAL",0,0,"global_step=24147, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1276,2241069,"TERMINAL",0,0,"global_step=24233, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1277,2241370,"TERMINAL",0,0,"global_step=24312, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1278,2241574,"TERMINAL",0,0,"global_step=24371, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1279,2241809,"TERMINAL",0,0,"global_step=24425, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1280,2242012,"TERMINAL",0,0,"global_step=24479, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1281,2242194,"TERMINAL",0,0,"global_step=24526, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 47 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1282,2242380,"TERMINAL",0,0,"global_step=24572, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1283,2242680,"TERMINAL",0,0,"global_step=24658, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1284,2242902,"TERMINAL",0,0,"global_step=24712, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1285,2243194,"TERMINAL",0,0,"global_step=24791, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1286,2243377,"TERMINAL",0,0,"global_step=24837, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1287,2243955,"TERMINAL",0,0,"global_step=24931, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1288,2244039,"TERMINAL",0,0,"global_step=25017, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1289,2244163,"TERMINAL",0,0,"global_step=25049, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1290,2244490,"TERMINAL",0,0,"global_step=25135, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1291,2244704,"TERMINAL",0,0,"global_step=25189, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1292,2244949,"TERMINAL",0,0,"global_step=25248, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1293,2245124,"TERMINAL",0,0,"global_step=25302, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1294,2245324,"TERMINAL",0,0,"global_step=25356, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1295,2245670,"TERMINAL",0,0,"global_step=25450, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1296,2245899,"TERMINAL",0,0,"global_step=25509, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1297,2246290,"TERMINAL",0,0,"global_step=25603, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1298,2246469,"TERMINAL",0,0,"global_step=25657, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1299,2246619,"TERMINAL",0,0,"global_step=25703, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1300,2246954,"TERMINAL",0,0,"global_step=25797, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1301,2247179,"TERMINAL",0,0,"global_step=25851, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1302,2247357,"TERMINAL",0,0,"global_step=25890, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1303,2247579,"TERMINAL",0,0,"global_step=25948, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 58 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1304,2247703,"TERMINAL",0,0,"Created /fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout/train/data_0003.array_record with 100 video chunks\r\n",,terminal_output +1305,2248035,"TERMINAL",0,0,"global_step=26042, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1306,2248169,"TERMINAL",0,0,"global_step=26074, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1307,2248619,"TERMINAL",0,0,"global_step=26168, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1308,2248745,"TERMINAL",0,0,"global_step=26227, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1309,2249086,"TERMINAL",0,0,"global_step=26321, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1310,2249286,"TERMINAL",0,0,"global_step=26375, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1311,2249584,"TERMINAL",0,0,"global_step=26454, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1312,2250150,"TERMINAL",0,0,"global_step=26548, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=26607, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1313,2250392,"TERMINAL",0,0,"global_step=26661, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1314,2250688,"TERMINAL",0,0,"global_step=26747, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1315,2251007,"TERMINAL",0,0,"global_step=26833, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1316,2251317,"TERMINAL",0,0,"global_step=26919, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1317,2251646,"TERMINAL",0,0,"global_step=26998, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1318,2251827,"TERMINAL",0,0,"global_step=27059, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1319,2252202,"TERMINAL",0,0,"global_step=27145, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1320,2252314,"TERMINAL",0,0,"global_step=27191, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1321,2252900,"TERMINAL",0,0,"global_step=27285, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1322,2252960,"TERMINAL",0,0,"global_step=27339, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1323,2253030,"TERMINAL",0,0,"global_step=27378, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1324,2253264,"TERMINAL",0,0,"global_step=27437, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1325,2253627,"TERMINAL",0,0,"global_step=27523, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1326,2253750,"TERMINAL",0,0,"global_step=27569, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1327,2253964,"TERMINAL",0,0,"global_step=27623, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1328,2254304,"TERMINAL",0,0,"global_step=27717, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1329,2254562,"TERMINAL",0,0,"global_step=27778, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1330,2254717,"TERMINAL",0,0,"global_step=27824, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1331,2255382,"TERMINAL",0,0,"global_step=27910, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=27949, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=28003, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1332,2255658,"TERMINAL",0,0,"global_step=28075, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1333,2255895,"TERMINAL",0,0,"global_step=28129, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1334,2256057,"TERMINAL",0,0,"global_step=28175, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1335,2256211,"TERMINAL",0,0,"global_step=28214, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1336,2256749,"TERMINAL",0,0,"global_step=28300, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1337,2256815,"TERMINAL",0,0,"global_step=28379, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1338,2257205,"TERMINAL",0,0,"global_step=28458, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1339,2257416,"TERMINAL",0,0,"global_step=28544, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1340,2257588,"TERMINAL",0,0,"global_step=28590, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1341,2257807,"TERMINAL",0,0,"global_step=28636, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1342,2258058,"TERMINAL",0,0,"global_step=28709, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1343,2258367,"TERMINAL",0,0,"global_step=28795, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1344,2258645,"TERMINAL",0,0,"global_step=28873, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 78 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1345,2258981,"TERMINAL",0,0,"global_step=28959, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1346,2259151,"TERMINAL",0,0,"global_step=29005, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1347,2259362,"TERMINAL",0,0,"global_step=29051, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1348,2259535,"TERMINAL",0,0,"global_step=29105, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1349,2259787,"TERMINAL",0,0,"global_step=29164, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1350,2260112,"TERMINAL",0,0,"global_step=29243, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1351,2260256,"TERMINAL",0,0,"global_step=29289, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1352,2260485,"TERMINAL",0,0,"global_step=29348, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1353,2260767,"TERMINAL",0,0,"global_step=29427, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1354,2261313,"TERMINAL",0,0,"global_step=29521, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1355,2261479,"TERMINAL",0,0,"global_step=29607, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1356,2261589,"TERMINAL",0,0,"global_step=29653, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1357,2262464,"TERMINAL",0,0,"global_step=29747, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=29841, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1358,2262593,"TERMINAL",0,0,"global_step=29895, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1359,2262803,"TERMINAL",0,0,"global_step=29981, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1360,2262955,"TERMINAL",0,0,"global_step=30020, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1361,2263157,"TERMINAL",0,0,"global_step=30074, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1362,2263463,"TERMINAL",0,0,"global_step=30153, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1363,2263964,"TERMINAL",0,0,"global_step=30247, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1364,2264092,"TERMINAL",0,0,"global_step=30326, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1365,2264377,"TERMINAL",0,0,"global_step=30405, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1366,2264683,"TERMINAL",0,0,"global_step=30484, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1367,2265000,"TERMINAL",0,0,"global_step=30578, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1368,2265202,"TERMINAL",0,0,"global_step=30632, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1369,2265413,"TERMINAL",0,0,"global_step=30686, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1370,2265772,"TERMINAL",0,0,"global_step=30780, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1371,2266427,"TERMINAL",0,0,"global_step=30874, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=30968, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1372,2266625,"TERMINAL",0,0,"global_step=31022, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1373,2266945,"TERMINAL",0,0,"global_step=31108, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1374,2267140,"TERMINAL",0,0,"global_step=31154, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1375,2267455,"TERMINAL",0,0,"global_step=31240, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1376,2267643,"TERMINAL",0,0,"global_step=31286, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1377,2267962,"TERMINAL",0,0,"global_step=31365, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1378,2268121,"TERMINAL",0,0,"global_step=31411, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1379,2268302,"TERMINAL",0,0,"global_step=31457, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1380,2268502,"TERMINAL",0,0,"global_step=31503, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1381,2268956,"TERMINAL",0,0,"global_step=31589, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1382,2269102,"TERMINAL",0,0,"global_step=31668, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1383,2269359,"TERMINAL",0,0,"global_step=31740, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1384,2269673,"TERMINAL",0,0,"global_step=31819, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1385,2269852,"TERMINAL",0,0,"global_step=31865, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1386,2270032,"TERMINAL",0,0,"global_step=31911, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1387,2270335,"TERMINAL",0,0,"global_step=31997, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1388,2270509,"TERMINAL",0,0,"global_step=32043, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1389,2270862,"TERMINAL",0,0,"global_step=32129, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1390,2271173,"TERMINAL",0,0,"global_step=32215, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1391,2271371,"TERMINAL",0,0,"global_step=32261, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1392,2271642,"TERMINAL",0,0,"global_step=32347, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1393,2271830,"TERMINAL",0,0,"global_step=32393, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1394,2272063,"TERMINAL",0,0,"global_step=32447, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1395,2272362,"TERMINAL",0,0,"global_step=32533, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1396,2272515,"TERMINAL",0,0,"global_step=32565, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1397,2272745,"TERMINAL",0,0,"global_step=32624, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1398,2273042,"TERMINAL",0,0,"global_step=32703, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1399,2273374,"TERMINAL",0,0,"global_step=32797, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1400,2273473,"TERMINAL",0,0,"Created /fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout/train/data_0004.array_record with 100 video chunks\r\n",,terminal_output +1401,2273675,"TERMINAL",0,0,"global_step=32850, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1402,2273975,"TERMINAL",0,0,"global_step=32936, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1403,2274205,"TERMINAL",0,0,"global_step=32995, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1404,2274433,"TERMINAL",0,0,"global_step=33049, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1405,2274716,"TERMINAL",0,0,"global_step=33135, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1406,2274919,"TERMINAL",0,0,"global_step=33181, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1407,2275080,"TERMINAL",0,0,"global_step=33220, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1408,2275269,"TERMINAL",0,0,"global_step=33266, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1409,2275491,"TERMINAL",0,0,"global_step=33325, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1410,2275794,"TERMINAL",0,0,"global_step=33411, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1411,2276023,"TERMINAL",0,0,"global_step=33465, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1412,2276199,"TERMINAL",0,0,"global_step=33511, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1413,2276413,"TERMINAL",0,0,"global_step=33564, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1414,2276563,"TERMINAL",0,0,"global_step=33603, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1415,2276813,"TERMINAL",0,0,"global_step=33662, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1416,2277116,"TERMINAL",0,0,"global_step=33756, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1417,2277351,"TERMINAL",0,0,"global_step=33815, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1418,2277694,"TERMINAL",0,0,"global_step=33909, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1419,2278051,"TERMINAL",0,0,"global_step=34003, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1420,2278223,"TERMINAL",0,0,"global_step=34049, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1421,2278534,"TERMINAL",0,0,"global_step=34135, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1422,2278774,"TERMINAL",0,0,"global_step=34198, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 63 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1423,2278967,"TERMINAL",0,0,"global_step=34252, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1424,2279344,"TERMINAL",0,0,"global_step=34346, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1425,2279601,"TERMINAL",0,0,"global_step=34432, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1426,2279916,"TERMINAL",0,0,"global_step=34518, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1427,2280192,"TERMINAL",0,0,"global_step=34588, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1428,2280477,"TERMINAL",0,0,"global_step=34667, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1429,2280785,"TERMINAL",0,0,"global_step=34746, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1430,2281110,"TERMINAL",0,0,"global_step=34840, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1431,2281404,"TERMINAL",0,0,"global_step=34926, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1432,2281750,"TERMINAL",0,0,"global_step=35020, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1433,2281930,"TERMINAL",0,0,"global_step=35066, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1434,2282146,"TERMINAL",0,0,"global_step=35120, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1435,2282349,"TERMINAL",0,0,"global_step=35174, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1436,2282603,"TERMINAL",0,0,"global_step=35235, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1437,2282945,"TERMINAL",0,0,"global_step=35321, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1438,2283147,"TERMINAL",0,0,"global_step=35394, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1439,2283481,"TERMINAL",0,0,"global_step=35473, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1440,2283756,"TERMINAL",0,0,"global_step=35559, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1441,2284123,"TERMINAL",0,0,"global_step=35645, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1442,2284300,"TERMINAL",0,0,"global_step=35698, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1443,2284620,"TERMINAL",0,0,"global_step=35792, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1444,2284841,"TERMINAL",0,0,"global_step=35846, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1445,2285166,"TERMINAL",0,0,"global_step=35932, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1446,2285377,"TERMINAL",0,0,"global_step=35991, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1447,2285659,"TERMINAL",0,0,"global_step=36063, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1448,2285963,"TERMINAL",0,0,"global_step=36157, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1449,2286330,"TERMINAL",0,0,"global_step=36251, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1450,2286543,"TERMINAL",0,0,"global_step=36312, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1451,2286732,"TERMINAL",0,0,"global_step=36358, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1452,2286908,"TERMINAL",0,0,"global_step=36404, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1453,2287103,"TERMINAL",0,0,"global_step=36450, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1454,2287470,"TERMINAL",0,0,"global_step=36544, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1455,2287607,"TERMINAL",0,0,"global_step=36583, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1456,2287898,"TERMINAL",0,0,"global_step=36661, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 78 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1457,2288207,"TERMINAL",0,0,"global_step=36747, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1458,2288517,"TERMINAL",0,0,"global_step=36833, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1459,2288859,"TERMINAL",0,0,"global_step=36927, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1460,2289167,"TERMINAL",0,0,"global_step=37013, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1461,2289301,"TERMINAL",0,0,"global_step=37038, episodic_return=[0.]\r\nWarning: Inconsistent chunk_sizes. Episode has 25 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1462,2289483,"TERMINAL",0,0,"global_step=37077, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1463,2289666,"TERMINAL",0,0,"global_step=37123, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1464,2289799,"TERMINAL",0,0,"global_step=37162, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1465,2290076,"TERMINAL",0,0,"global_step=37234, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1466,2290248,"TERMINAL",0,0,"global_step=37280, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1467,2290606,"TERMINAL",0,0,"global_step=37374, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1468,2290905,"TERMINAL",0,0,"global_step=37453, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1469,2291074,"TERMINAL",0,0,"global_step=37499, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1470,2291352,"TERMINAL",0,0,"global_step=37571, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1471,2291538,"TERMINAL",0,0,"global_step=37625, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1472,2291812,"TERMINAL",0,0,"global_step=37698, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1473,2292047,"TERMINAL",0,0,"global_step=37752, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1474,2292254,"TERMINAL",0,0,"global_step=37806, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1475,2292551,"TERMINAL",0,0,"global_step=37869, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 63 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1476,2292694,"TERMINAL",0,0,"global_step=37923, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1477,2292863,"TERMINAL",0,0,"global_step=37962, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1478,2293083,"TERMINAL",0,0,"global_step=38020, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 58 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1479,2293411,"TERMINAL",0,0,"global_step=38106, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1480,2293689,"TERMINAL",0,0,"global_step=38185, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1481,2293898,"TERMINAL",0,0,"global_step=38238, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1482,2294211,"TERMINAL",0,0,"global_step=38324, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1483,2294522,"TERMINAL",0,0,"global_step=38410, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1484,2294713,"TERMINAL",0,0,"global_step=38456, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1485,2295043,"TERMINAL",0,0,"global_step=38542, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1486,2295240,"TERMINAL",0,0,"global_step=38595, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1487,2295432,"TERMINAL",0,0,"global_step=38641, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1488,2295757,"TERMINAL",0,0,"global_step=38727, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1489,2296014,"TERMINAL",0,0,"global_step=38797, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1490,2296170,"TERMINAL",0,0,"global_step=38836, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1491,2296474,"TERMINAL",0,0,"global_step=38922, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1492,2296848,"TERMINAL",0,0,"global_step=39016, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1493,2297082,"TERMINAL",0,0,"global_step=39081, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1494,2297293,"TERMINAL",0,0,"global_step=39140, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1495,2297491,"TERMINAL",0,0,"global_step=39186, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1496,2297671,"TERMINAL",0,0,"global_step=39239, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1497,2297883,"TERMINAL",0,0,"global_step=39285, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1498,2298139,"TERMINAL",0,0,"global_step=39346, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1499,2298406,"TERMINAL",0,0,"global_step=39425, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1500,2298738,"TERMINAL",0,0,"global_step=39519, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1501,2298871,"TERMINAL",0,0,"Created /fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout/train/data_0005.array_record with 100 video chunks\r\n",,terminal_output +1502,2299049,"TERMINAL",0,0,"global_step=39565, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1503,2299318,"TERMINAL",0,0,"global_step=39638, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1504,2299644,"TERMINAL",0,0,"global_step=39732, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1505,2299825,"TERMINAL",0,0,"global_step=39778, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1506,2300101,"TERMINAL",0,0,"global_step=39851, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1507,2300298,"TERMINAL",0,0,"global_step=39905, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1508,2300609,"TERMINAL",0,0,"global_step=39991, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1509,2301165,"TERMINAL",0,0,"global_step=40077, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1510,2301267,"TERMINAL",0,0,"global_step=40171, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1511,2301616,"TERMINAL",0,0,"global_step=40257, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1512,2301822,"TERMINAL",0,0,"global_step=40311, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1513,2302154,"TERMINAL",0,0,"global_step=40405, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1514,2302415,"TERMINAL",0,0,"global_step=40484, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1515,2302817,"TERMINAL",0,0,"global_step=40570, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1516,2303075,"TERMINAL",0,0,"global_step=40664, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1517,2303282,"TERMINAL",0,0,"global_step=40718, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1518,2303605,"TERMINAL",0,0,"global_step=40804, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1519,2303967,"TERMINAL",0,0,"global_step=40898, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1520,2304362,"TERMINAL",0,0,"global_step=40984, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1521,2304450,"TERMINAL",0,0,"global_step=41030, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1522,2304641,"TERMINAL",0,0,"global_step=41076, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1523,2305012,"TERMINAL",0,0,"global_step=41170, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1524,2305175,"TERMINAL",0,0,"global_step=41224, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1525,2305401,"TERMINAL",0,0,"global_step=41285, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1526,2305566,"TERMINAL",0,0,"global_step=41324, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1527,2305975,"TERMINAL",0,0,"global_step=41410, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1528,2306120,"TERMINAL",0,0,"global_step=41463, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1529,2306325,"TERMINAL",0,0,"global_step=41509, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1530,2306545,"TERMINAL",0,0,"global_step=41555, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1531,2306703,"TERMINAL",0,0,"global_step=41601, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1532,2306949,"TERMINAL",0,0,"global_step=41647, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1533,2307155,"TERMINAL",0,0,"global_step=41733, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1534,2307363,"TERMINAL",0,0,"global_step=41786, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1535,2307555,"TERMINAL",0,0,"global_step=41840, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1536,2307905,"TERMINAL",0,0,"global_step=41934, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1537,2308149,"TERMINAL",0,0,"global_step=41993, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1538,2308310,"TERMINAL",0,0,"global_step=42039, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1539,2308683,"TERMINAL",0,0,"global_step=42125, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1540,2308925,"TERMINAL",0,0,"global_step=42204, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1541,2309099,"TERMINAL",0,0,"global_step=42243, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1542,2309592,"TERMINAL",0,0,"global_step=42302, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=42356, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1543,2309804,"TERMINAL",0,0,"global_step=42435, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1544,2310127,"TERMINAL",0,0,"global_step=42514, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1545,2310464,"TERMINAL",0,0,"global_step=42600, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1546,2310612,"TERMINAL",0,0,"global_step=42654, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1547,2310813,"TERMINAL",0,0,"global_step=42700, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1548,2311158,"TERMINAL",0,0,"global_step=42794, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1549,2311412,"TERMINAL",0,0,"global_step=42866, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1550,2311604,"TERMINAL",0,0,"global_step=42919, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1551,2311845,"TERMINAL",0,0,"global_step=42980, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1552,2312168,"TERMINAL",0,0,"global_step=43059, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1553,2312440,"TERMINAL",0,0,"global_step=43145, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1554,2312752,"TERMINAL",0,0,"global_step=43231, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1555,2312933,"TERMINAL",0,0,"global_step=43277, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1556,2313222,"TERMINAL",0,0,"global_step=43356, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1557,2313423,"TERMINAL",0,0,"global_step=43410, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1558,2313671,"TERMINAL",0,0,"global_step=43471, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1559,2313872,"TERMINAL",0,0,"global_step=43527, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 56 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1560,2314164,"TERMINAL",0,0,"global_step=43606, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1561,2314469,"TERMINAL",0,0,"global_step=43692, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1562,2314632,"TERMINAL",0,0,"global_step=43724, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1563,2314959,"TERMINAL",0,0,"global_step=43818, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1564,2315154,"TERMINAL",0,0,"global_step=43872, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1565,2315342,"TERMINAL",0,0,"global_step=43918, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1566,2315583,"TERMINAL",0,0,"global_step=43984, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 66 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1567,2315930,"TERMINAL",0,0,"global_step=44070, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1568,2316092,"TERMINAL",0,0,"global_step=44116, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1569,2316304,"TERMINAL",0,0,"global_step=44169, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1570,2316559,"TERMINAL",0,0,"global_step=44242, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1571,2316784,"TERMINAL",0,0,"global_step=44296, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1572,2317076,"TERMINAL",0,0,"global_step=44375, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1573,2317268,"TERMINAL",0,0,"global_step=44429, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1574,2317531,"TERMINAL",0,0,"global_step=44494, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1575,2317672,"TERMINAL",0,0,"global_step=44533, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1576,2317892,"TERMINAL",0,0,"global_step=44587, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1577,2318448,"TERMINAL",0,0,"global_step=44681, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1578,2318517,"TERMINAL",0,0,"global_step=44767, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1579,2318705,"TERMINAL",0,0,"global_step=44806, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1580,2318952,"TERMINAL",0,0,"global_step=44871, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1581,2319340,"TERMINAL",0,0,"global_step=44957, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1582,2319571,"TERMINAL",0,0,"global_step=45043, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1583,2319792,"TERMINAL",0,0,"global_step=45097, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1584,2319967,"TERMINAL",0,0,"global_step=45136, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1585,2320145,"TERMINAL",0,0,"global_step=45190, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1586,2320335,"TERMINAL",0,0,"global_step=45236, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1587,2320512,"TERMINAL",0,0,"global_step=45282, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1588,2320681,"TERMINAL",0,0,"global_step=45321, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1589,2321004,"TERMINAL",0,0,"global_step=45400, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1590,2321185,"TERMINAL",0,0,"global_step=45458, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 58 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1591,2321485,"TERMINAL",0,0,"global_step=45537, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1592,2321712,"TERMINAL",0,0,"global_step=45591, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1593,2322022,"TERMINAL",0,0,"global_step=45677, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1594,2322412,"TERMINAL",0,0,"global_step=45763, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1595,2322654,"TERMINAL",0,0,"global_step=45857, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1596,2322922,"TERMINAL",0,0,"global_step=45929, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1597,2323102,"TERMINAL",0,0,"global_step=45975, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1598,2323409,"TERMINAL",0,0,"global_step=46061, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1599,2323690,"TERMINAL",0,0,"global_step=46131, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1600,2323853,"TERMINAL",0,0,"global_step=46177, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1601,2323960,"TERMINAL",0,0,"Created /fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout/train/data_0006.array_record with 100 video chunks\r\n",,terminal_output +1602,2324275,"TERMINAL",0,0,"global_step=46263, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1603,2324603,"TERMINAL",0,0,"global_step=46349, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1604,2324763,"TERMINAL",0,0,"global_step=46381, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1605,2325067,"TERMINAL",0,0,"global_step=46467, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1606,2325352,"TERMINAL",0,0,"global_step=46546, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1607,2325489,"TERMINAL",0,0,"global_step=46585, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1608,2325883,"TERMINAL",0,0,"global_step=46671, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1609,2326130,"TERMINAL",0,0,"global_step=46724, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1610,2326285,"TERMINAL",0,0,"global_step=46802, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 78 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1611,2326694,"TERMINAL",0,0,"global_step=46848, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1612,2326872,"TERMINAL",0,0,"global_step=46907, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1613,2327059,"TERMINAL",0,0,"global_step=46979, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1614,2327583,"TERMINAL",0,0,"global_step=47065, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1615,2327742,"TERMINAL",0,0,"global_step=47123, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 58 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1616,2327964,"TERMINAL",0,0,"global_step=47202, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1617,2328201,"TERMINAL",0,0,"global_step=47248, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=47294, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1618,2328353,"TERMINAL",0,0,"global_step=47333, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1619,2328576,"TERMINAL",0,0,"global_step=47392, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1620,2328783,"TERMINAL",0,0,"global_step=47451, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1621,2329046,"TERMINAL",0,0,"global_step=47523, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1622,2329254,"TERMINAL",0,0,"global_step=47577, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1623,2329436,"TERMINAL",0,0,"global_step=47623, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1624,2329644,"TERMINAL",0,0,"global_step=47669, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1625,2329952,"TERMINAL",0,0,"global_step=47755, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1626,2330178,"TERMINAL",0,0,"global_step=47814, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1627,2330367,"TERMINAL",0,0,"global_step=47868, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1628,2330673,"TERMINAL",0,0,"global_step=47947, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1629,2330832,"TERMINAL",0,0,"global_step=47986, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1630,2331155,"TERMINAL",0,0,"global_step=48072, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1631,2331311,"TERMINAL",0,0,"global_step=48118, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1632,2331533,"TERMINAL",0,0,"global_step=48172, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1633,2331674,"TERMINAL",0,0,"global_step=48211, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1634,2331994,"TERMINAL",0,0,"global_step=48297, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1635,2332415,"TERMINAL",0,0,"global_step=48391, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1636,2332518,"TERMINAL",0,0,"global_step=48437, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1637,2332753,"TERMINAL",0,0,"global_step=48491, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1638,2332952,"TERMINAL",0,0,"global_step=48549, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 58 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1639,2333175,"TERMINAL",0,0,"global_step=48610, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1640,2333474,"TERMINAL",0,0,"global_step=48689, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1641,2333692,"TERMINAL",0,0,"global_step=48742, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1642,2334024,"TERMINAL",0,0,"global_step=48836, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1643,2334283,"TERMINAL",0,0,"global_step=48908, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1644,2334623,"TERMINAL",0,0,"global_step=48987, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1645,2334895,"TERMINAL",0,0,"global_step=49073, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1646,2335073,"TERMINAL",0,0,"global_step=49119, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1647,2335245,"TERMINAL",0,0,"global_step=49158, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1648,2335568,"TERMINAL",0,0,"global_step=49252, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1649,2335822,"TERMINAL",0,0,"global_step=49313, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1650,2336044,"TERMINAL",0,0,"global_step=49374, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1651,2336319,"TERMINAL",0,0,"global_step=49453, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1652,2336632,"TERMINAL",0,0,"global_step=49539, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1653,2336815,"TERMINAL",0,0,"global_step=49585, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1654,2337045,"TERMINAL",0,0,"global_step=49639, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1655,2337202,"TERMINAL",0,0,"global_step=49685, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1656,2337519,"TERMINAL",0,0,"global_step=49771, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1657,2337699,"TERMINAL",0,0,"global_step=49817, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1658,2338010,"TERMINAL",0,0,"global_step=49903, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1659,2338248,"TERMINAL",0,0,"global_step=49962, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1660,2338559,"TERMINAL",0,0,"global_step=50048, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1661,2338802,"TERMINAL",0,0,"global_step=50109, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1662,2339032,"TERMINAL",0,0,"global_step=50168, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1663,2339222,"TERMINAL",0,0,"global_step=50221, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1664,2339438,"TERMINAL",0,0,"global_step=50275, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1665,2339677,"TERMINAL",0,0,"global_step=50340, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1666,2339914,"TERMINAL",0,0,"global_step=50393, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1667,2340199,"TERMINAL",0,0,"global_step=50479, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1668,2340421,"TERMINAL",0,0,"global_step=50532, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1669,2340748,"TERMINAL",0,0,"global_step=50626, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1670,2341029,"TERMINAL",0,0,"global_step=50704, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 78 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1671,2341239,"TERMINAL",0,0,"global_step=50758, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1672,2341594,"TERMINAL",0,0,"global_step=50852, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1673,2341834,"TERMINAL",0,0,"global_step=50905, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1674,2342144,"TERMINAL",0,0,"global_step=50963, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 58 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1675,2342207,"TERMINAL",0,0,"global_step=51009, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1676,2342515,"TERMINAL",0,0,"global_step=51095, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1677,2342820,"TERMINAL",0,0,"global_step=51181, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1678,2343044,"TERMINAL",0,0,"global_step=51240, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1679,2343364,"TERMINAL",0,0,"global_step=51326, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1680,2343568,"TERMINAL",0,0,"global_step=51379, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1681,2343845,"TERMINAL",0,0,"global_step=51451, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1682,2344177,"TERMINAL",0,0,"global_step=51545, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1683,2344354,"TERMINAL",0,0,"global_step=51591, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1684,2344582,"TERMINAL",0,0,"global_step=51645, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1685,2344730,"TERMINAL",0,0,"global_step=51684, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1686,2345042,"TERMINAL",0,0,"global_step=51770, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1687,2345331,"TERMINAL",0,0,"global_step=51848, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 78 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1688,2345522,"TERMINAL",0,0,"global_step=51894, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1689,2345722,"TERMINAL",0,0,"global_step=51940, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1690,2345934,"TERMINAL",0,0,"global_step=51994, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1691,2346135,"TERMINAL",0,0,"global_step=52047, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1692,2346445,"TERMINAL",0,0,"global_step=52133, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1693,2346742,"TERMINAL",0,0,"global_step=52212, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1694,2346945,"TERMINAL",0,0,"global_step=52265, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1695,2347134,"TERMINAL",0,0,"global_step=52319, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1696,2347372,"TERMINAL",0,0,"global_step=52378, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1697,2347572,"TERMINAL",0,0,"global_step=52424, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1698,2347745,"TERMINAL",0,0,"global_step=52470, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1699,2347930,"TERMINAL",0,0,"global_step=52516, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1700,2348269,"TERMINAL",0,0,"global_step=52610, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1701,2348399,"TERMINAL",0,0,"Created /fast/project/HFMI_SynergyUnit/jafar_ws/data/breakout/train/data_0007.array_record with 100 video chunks\r\n",,terminal_output +1702,2348835,"TERMINAL",0,0,"global_step=52704, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1703,2348998,"TERMINAL",0,0,"global_step=52776, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1704,2349345,"TERMINAL",0,0,"global_step=52855, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1705,2349461,"TERMINAL",0,0,"global_step=52901, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1706,2349622,"TERMINAL",0,0,"global_step=52940, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1707,2349965,"TERMINAL",0,0,"global_step=53034, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1708,2350143,"TERMINAL",0,0,"global_step=53073, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1709,2350271,"TERMINAL",0,0,"global_step=53119, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1710,2350485,"TERMINAL",0,0,"global_step=53173, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1711,2350651,"TERMINAL",0,0,"global_step=53212, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1712,2350843,"TERMINAL",0,0,"global_step=53258, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1713,2351051,"TERMINAL",0,0,"global_step=53312, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1714,2351205,"TERMINAL",0,0,"global_step=53351, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1715,2351502,"TERMINAL",0,0,"global_step=53430, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1716,2351806,"TERMINAL",0,0,"global_step=53516, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1717,2351929,"TERMINAL",0,0,"global_step=53548, episodic_return=[1.]\r\nWarning: Inconsistent chunk_sizes. Episode has 32 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1718,2352362,"TERMINAL",0,0,"global_step=53642, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1719,2352552,"TERMINAL",0,0,"global_step=53728, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1720,2352769,"TERMINAL",0,0,"global_step=53781, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1721,2353092,"TERMINAL",0,0,"global_step=53875, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1722,2353401,"TERMINAL",0,0,"global_step=53961, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1723,2353555,"TERMINAL",0,0,"global_step=54007, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1724,2353705,"TERMINAL",0,0,"global_step=54046, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1725,2353918,"TERMINAL",0,0,"global_step=54107, episodic_return=[8.]\r\nWarning: Inconsistent chunk_sizes. Episode has 61 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1726,2354231,"TERMINAL",0,0,"global_step=54193, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1727,2354414,"TERMINAL",0,0,"global_step=54239, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1728,2354593,"TERMINAL",0,0,"global_step=54285, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1729,2354895,"TERMINAL",0,0,"global_step=54371, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1730,2355205,"TERMINAL",0,0,"global_step=54457, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1731,2355418,"TERMINAL",0,0,"global_step=54516, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1732,2355609,"TERMINAL",0,0,"global_step=54570, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1733,2355804,"TERMINAL",0,0,"global_step=54623, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 53 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1734,2355998,"TERMINAL",0,0,"global_step=54669, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1735,2356172,"TERMINAL",0,0,"global_step=54715, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1736,2356469,"TERMINAL",0,0,"global_step=54801, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1737,2356775,"TERMINAL",0,0,"global_step=54887, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1738,2357042,"TERMINAL",0,0,"global_step=54960, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1739,2357366,"TERMINAL",0,0,"global_step=55054, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1740,2357638,"TERMINAL",0,0,"global_step=55125, episodic_return=[12.]\r\nWarning: Inconsistent chunk_sizes. Episode has 71 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1741,2357831,"TERMINAL",0,0,"global_step=55171, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1742,2357995,"TERMINAL",0,0,"global_step=55217, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1743,2358291,"TERMINAL",0,0,"global_step=55303, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1744,2358491,"TERMINAL",0,0,"global_step=55357, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1745,2358804,"TERMINAL",0,0,"global_step=55443, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1746,2359133,"TERMINAL",0,0,"global_step=55537, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1747,2359391,"TERMINAL",0,0,"global_step=55616, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1748,2359787,"TERMINAL",0,0,"global_step=55710, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1749,2359904,"TERMINAL",0,0,"global_step=55756, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1750,2360095,"TERMINAL",0,0,"global_step=55810, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1751,2360366,"TERMINAL",0,0,"global_step=55883, episodic_return=[9.]\r\nWarning: Inconsistent chunk_sizes. Episode has 73 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1752,2360551,"TERMINAL",0,0,"global_step=55929, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1753,2360773,"TERMINAL",0,0,"global_step=55988, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1754,2360922,"TERMINAL",0,0,"global_step=56027, episodic_return=[2.]\r\nWarning: Inconsistent chunk_sizes. Episode has 39 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1755,2361149,"TERMINAL",0,0,"global_step=56086, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 59 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1756,2361398,"TERMINAL",0,0,"global_step=56156, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1757,2362884,"TERMINAL",0,0,"global_step=56210, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1758,2362884,"TERMINAL",0,0,"global_step=56256, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=56335, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=56429, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1759,2362884,"TERMINAL",0,0,"global_step=56523, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1760,2363034,"TERMINAL",0,0,"global_step=56617, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1761,2363309,"TERMINAL",0,0,"global_step=56696, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1762,2363567,"TERMINAL",0,0,"global_step=56768, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 72 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1763,2363895,"TERMINAL",0,0,"global_step=56862, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1764,2364213,"TERMINAL",0,0,"global_step=56948, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1765,2364509,"TERMINAL",0,0,"global_step=57042, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1766,2364703,"TERMINAL",0,0,"global_step=57096, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 54 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1767,2365000,"TERMINAL",0,0,"global_step=57182, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1768,2365300,"TERMINAL",0,0,"global_step=57261, episodic_return=[6.]\r\nWarning: Inconsistent chunk_sizes. Episode has 79 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1769,2365583,"TERMINAL",0,0,"global_step=57347, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1770,2366355,"TERMINAL",0,0,"global_step=57441, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\nglobal_step=57527, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1771,2366441,"TERMINAL",0,0,"global_step=57592, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 65 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1772,2366732,"TERMINAL",0,0,"global_step=57678, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1773,2367040,"TERMINAL",0,0,"global_step=57764, episodic_return=[7.]\r\nWarning: Inconsistent chunk_sizes. Episode has 86 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1774,2367316,"TERMINAL",0,0,"global_step=57834, episodic_return=[5.]\r\nWarning: Inconsistent chunk_sizes. Episode has 70 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1775,2367611,"TERMINAL",0,0,"global_step=57928, episodic_return=[11.]\r\nWarning: Inconsistent chunk_sizes. Episode has 94 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1776,2367842,"TERMINAL",0,0,"global_step=57986, episodic_return=[4.]\r\nWarning: Inconsistent chunk_sizes. Episode has 58 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +1777,2368012,"TERMINAL",0,0,"global_step=58032, episodic_return=[3.]\r\nWarning: Inconsistent chunk_sizes. Episode has 46 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output diff --git a/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-accd586c-9376-4507-a888-197a6c40bdf51757184416102-2025_09_06-20.47.03.130/source.csv b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-accd586c-9376-4507-a888-197a6c40bdf51757184416102-2025_09_06-20.47.03.130/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..78be28e2197007cb6247e4c48c1b5be0e1de71f4 --- /dev/null +++ b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-accd586c-9376-4507-a888-197a6c40bdf51757184416102-2025_09_06-20.47.03.130/source.csv @@ -0,0 +1,4 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,3,"utils/nn.py",0,0,"import math\nfrom typing import Tuple, Callable, List\n\nfrom flax import nnx\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass SpatioTemporalPositionalEncoding(nnx.Module):\n """"""\n Applies separate sinusoidal positional encodings to the temporal and spatial dimensions.\n """"""\n\n def __init__(self, d_model: int, max_len: int = 5000):\n self.d_model = d_model\n self.max_len = max_len\n\n pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n pe = pe.at[:, 0::2].set(jnp.sin(position * div_term))\n pe = pe.at[:, 1::2].set(jnp.cos(position * div_term))\n self.pe = nnx.Variable(pe)\n\n def __call__(self, x: jax.Array) -> jax.Array:\n """"""\n Args:\n x: The input tensor of shape (Batch, Time, Space, Dimension).\n\n Returns:\n The input tensor with positional encodings added.\n """"""\n assert x.ndim == 4, f""Input must be 4-dimensional, but got shape {x.shape}""\n\n num_timesteps = x.shape[1]\n num_spatial_patches = x.shape[2]\n\n # Temporal positional encoding: (1, T, 1, D)\n temporal_pe = self.pe.value[None, :num_timesteps, None, :]\n x = x + temporal_pe\n\n # Spatial positional encoding: (1, 1, S, D)\n spatial_pe = self.pe.value[None, None, :num_spatial_patches, :]\n x = x + spatial_pe\n\n return x\n\n\nclass STBlock(nnx.Module):\n def __init__(\n self,\n dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n sow_weights: bool,\n sow_activations: bool,\n ):\n self.dim = dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n rngs=rngs,\n decode=False,\n )\n\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=False,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x_BTNM: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z_BTNM = self.spatial_norm(x_BTNM)\n z_BTNM = self.spatial_attention(z_BTNM, sow_weights=self.sow_weights)\n x_BTNM = x_BTNM + z_BTNM\n\n # --- Temporal attention ---\n x_BNTM = x_BTNM.swapaxes(1, 2)\n z_BNTM = self.temporal_norm(x_BNTM)\n z_BNTM = self.temporal_attention(z_BNTM, sow_weights=self.sow_weights)\n x_BNTM = x_BNTM + z_BNTM\n x_BTNM = x_BNTM.swapaxes(1, 2)\n\n # --- Feedforward ---\n z_BTNM = self.ffn_norm(x_BTNM)\n z_BTND = self.ffn_dense1(z_BTNM)\n z_BTND = jax.nn.gelu(z_BTND)\n z_BTNM = self.ffn_dense2(z_BTND)\n x_BTNM = x_BTNM + z_BTNM\n if self.sow_activations:\n self.sow(nnx.Intermediate, ""activations"", x_BTNM)\n return x_BTNM\n\n\nclass STTransformer(nnx.Module):\n """"""\n Dimension keys:\n B: batch size\n T: number of frames\n N: number of patches per frame\n I: number of input features\n M: model dimension\n D: FFN dimension\n V: vocabulary size\n """"""\n\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n sow_weights: bool = False,\n sow_activations: bool = False,\n sow_logits: bool = False,\n max_len: int = 5000,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_logits = sow_logits\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n\n self.pos_enc = SpatioTemporalPositionalEncoding(self.model_dim, max_len=max_len)\n\n self.blocks = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n sow_weights=self.sow_weights,\n sow_activations=self.sow_activations,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x_BTNI: jax.Array) -> jax.Array:\n x_BTNI = self.input_norm1(x_BTNI)\n x_BTNM = self.input_dense(x_BTNI)\n x_BTNM = self.input_norm2(x_BTNM)\n x_BTNM = self.pos_enc(x_BTNM)\n for block in self.blocks:\n x_BTNM = block(x_BTNM)\n\n x_BTNV = self.output_dense(x_BTNM)\n if self.sow_logits:\n self.sow(nnx.Intermediate, ""logits"", x_BTNV)\n return x_BTNV\n\n\nclass TransformerBlock(nnx.Module):\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n sow_weights: bool,\n sow_activations: bool,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.model_dim,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.model_dim,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(\n self, x_BTNM: jax.Array, pos_index: Tuple[jax.Array, jax.Array] | None = None\n ) -> jax.Array:\n # --- Spatial attention ---\n B, T, N, M = x_BTNM.shape\n z_FNM = einops.rearrange(x_BTNM, ""b t n m -> (b t) n m"")\n z_FNM = self.spatial_norm(z_FNM)\n z_FNM = self.spatial_attention(z_FNM, sow_weights=self.sow_weights)\n z_BTNM = einops.rearrange(z_FNM, ""(b t) n m -> b t n m"", t=T)\n x_BTNM = x_BTNM + z_BTNM\n # --- Temporal attention ---\n z_PTM = einops.rearrange(x_BTNM, ""b t n m -> (b n) t m"")\n z_PTM = self.temporal_norm(z_PTM)\n z_PTM = self.temporal_attention(z_PTM, sow_weights=self.sow_weights)\n z_BTNM = einops.rearrange(z_PTM, ""(b n) t m -> b t n m"", n=N)\n x_BTNM = x_BTNM + z_BTNM\n # --- Feedforward ---\n z_BTNM = self.ffn_norm(x_BTNM)\n z_BTND = self.ffn_dense1(z_BTNM)\n z_BTND = jax.nn.gelu(z_BTND)\n z_BTNM = self.ffn_dense2(z_BTND)\n x_BTNM = x_BTNM + z_BTNM\n if self.sow_activations:\n self.sow(nnx.Intermediate, ""activations"", x_BTNM)\n\n return x_BTNM\n\n\nclass Transformer(nnx.Module):\n """"""\n Dimension keys:\n B: batch size\n T: number of frames\n N: number of patches per frame\n I: number of input features\n M: model dimension\n D: FFN dimension\n V: vocabulary size\n F: number of frames in batch\n P: number of patch positions in batch\n """"""\n\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n sow_logits: bool = False,\n sow_weights: bool = False,\n sow_activations: bool = False,\n max_len: int = 5000,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_logits = sow_logits\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n\n self.pos_enc = SpatioTemporalPositionalEncoding(self.model_dim, max_len=max_len)\n\n self.blocks: List[TransformerBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n TransformerBlock(\n model_dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n sow_weights=self.sow_weights,\n sow_activations=self.sow_activations,\n rngs=rngs,\n )\n )\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self, x_BTNI: jax.Array, pos_index: Tuple[jax.Array, jax.Array] | None = None\n ) -> jax.Array:\n x_BTNI = self.input_norm1(x_BTNI)\n x_BTNM = self.input_dense(x_BTNI)\n x_BTNM = self.input_norm2(x_BTNM)\n x_BTNM = self.pos_enc(x_BTNM)\n for block in self.blocks:\n x_BTNM = block(x_BTNM, pos_index)\n\n x_BTNV = self.output_dense(x_BTNM)\n if self.sow_logits:\n self.sow(nnx.Intermediate, ""logits"", x_BTNV)\n return x_BTNV\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n """"""\n Dimension keys:\n D: B * T * N\n K: number of latents\n L: latent dimension\n """"""\n\n def __init__(\n self,\n latent_dim: int,\n num_latents: int,\n dropout: float,\n dtype: jnp.dtype,\n rngs: nnx.Rngs,\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n self.dtype = dtype\n\n self.codebook = nnx.Param(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x_DL: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x_DL = x_DL.astype(self.dtype)\n codebook = self.codebook.value.astype(self.dtype)\n\n x_normalized_DL = normalize(x_DL)\n normalized_codebook_KL = normalize(codebook)\n distance_DK = -jnp.matmul(x_normalized_DL, normalized_codebook_KL.T)\n if training:\n distance_DK = self.drop(distance_DK)\n\n # --- Get indices and embeddings ---\n indices_D = jnp.argmin(distance_DK, axis=-1)\n z_DL = codebook[indices_D]\n\n # --- Straight through estimator ---\n z_q_DL = x_normalized_DL + jax.lax.stop_gradient(z_DL - x_normalized_DL)\n return z_q_DL, z_DL, x_DL, indices_D\n\n def get_codes(self, indices_E: jax.Array) -> jax.Array:\n return self.codebook[indices_E]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n flax.nnx.MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim),\n but jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim). We reshape to\n ensure compatibility. cuDNN's flash attention additionally requires a sequence length that\n is a multiple of 4. We pad the sequence length to the nearest multiple of 4 and mask\n accordingly. Note that cuDNN requires the mask to be broadcast before calling the attention\n function due to strict shape checking.\n """"""\n\n def attention_fn(\n query_BTHD, key_BSHD, value_BSHD, bias=None, mask_B111=None, **kwargs\n ):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _merge_batch_dims(x):\n return einops.rearrange(x, ""... l h k -> (...) l h k"")\n\n def _pad(x, pad_size):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n original_shape = query_BTHD.shape\n T = query_BTHD.shape[-3]\n S = key_BSHD.shape[-3]\n\n # Pad to nearest multiple of 4\n Q = ((T + 3) // 4) * 4\n pad_size_Q = Q - T\n K = ((S + 3) // 4) * 4\n pad_size_K = K - S\n\n query_BQHD = _pad(_merge_batch_dims(query_BTHD), pad_size_Q)\n key_BKHD = _pad(_merge_batch_dims(key_BSHD), pad_size_K)\n value_BKHD = _pad(_merge_batch_dims(value_BSHD), pad_size_K)\n\n attention_mask = jnp.ones((Q, K), dtype=jnp.bool_)\n attention_mask = attention_mask.at[T:, :].set(False)\n attention_mask = attention_mask.at[:, S:].set(False)\n\n mask_11TS = attention_mask[jnp.newaxis, jnp.newaxis, :, :]\n\n bias_4d = (\n jnp.pad(\n _merge_batch_dims(bias),\n ((0, 0), (0, 0), (0, pad_size_Q), (0, pad_size_K)),\n )\n if bias is not None\n else None\n )\n\n # NOTE: jax.nn.dot_product_attention does not support dropout\n output_4d = jax.nn.dot_product_attention(\n query=query_BQHD,\n key=key_BKHD,\n value=value_BKHD,\n bias=bias_4d,\n mask=mask_11TS,\n implementation=implementation,\n is_causal=is_causal,\n )\n return output_4d[..., :T, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +2,164,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"8:47:03 PM [info] Activating crowd-code\n8:47:03 PM [info] Recording started\n8:47:03 PM [info] Initializing git provider using file system watchers...\n8:47:03 PM [info] Git repository found\n8:47:03 PM [info] Git provider initialized successfully\n8:47:03 PM [info] Initial git state: [object Object]\n",Log,tab +3,3295,"utils/nn.py",0,0,"",python,tab diff --git a/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-dedac322-1282-4d89-8a49-f3a5624493ea1762171752270-2025_11_03-13.09.19.936/source.csv b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-dedac322-1282-4d89-8a49-f3a5624493ea1762171752270-2025_11_03-13.09.19.936/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..96906c345774c3ba611503014d8a3267b32bfc9e --- /dev/null +++ b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-dedac322-1282-4d89-8a49-f3a5624493ea1762171752270-2025_11_03-13.09.19.936/source.csv @@ -0,0 +1,255 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,366,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"1:09:19 PM [info] Activating crowd-code\n1:09:19 PM [info] Recording started\n1:09:19 PM [info] Initializing git provider using file system watchers...\n1:09:20 PM [error] Not a git repository: EntryNotFound (FileSystemError): Error: ENOENT: no such file or directory, stat '/home/franz.srambical/jafar/slurm/dev/franz/berlin/crowd-pilot/.git'\n",Log,tab +3,4284,"TERMINAL",0,0,"",,terminal_command +4,7859,"start_sglang",0,0,"",plaintext,tab +5,11243,"TERMINAL",0,0,"",,terminal_command +6,12735,"start_sglang_server.py",0,0,"",python,tab +7,27073,"start_sglang_server.sh",0,0,"",shellscript,tab +8,27890,"start_sglang_server.sh",0,0,"python3 -m sglang.launch_server --model-path qwen/qwen2.5-0.5b-instruct --host 0.0.0.0\n",shellscript,content +9,28380,"start_sglang_server.sh",86,0,"\n",shellscript,content +10,29134,"start_sglang_server.sh",87,1,"",shellscript,content +11,30095,"start_sglang_server.sh",0,0,"",shellscript,selection_command +12,30473,"start_sglang_server.sh",0,0,"\n",shellscript,content +13,31314,"start_sglang_server.sh",0,0,"s",shellscript,content +14,31314,"start_sglang_server.sh",1,0,"",shellscript,selection_keyboard +15,31358,"start_sglang_server.sh",1,0,"o",shellscript,content +16,31358,"start_sglang_server.sh",2,0,"",shellscript,selection_keyboard +17,31448,"start_sglang_server.sh",2,0,"u",shellscript,content +18,31449,"start_sglang_server.sh",3,0,"",shellscript,selection_keyboard +19,31476,"start_sglang_server.sh",3,0,"r",shellscript,content +20,31476,"start_sglang_server.sh",4,0,"",shellscript,selection_keyboard +21,31919,"start_sglang_server.sh",4,0,"c",shellscript,content +22,31919,"start_sglang_server.sh",5,0,"",shellscript,selection_keyboard +23,32140,"start_sglang_server.sh",5,0,"e",shellscript,content +24,32140,"start_sglang_server.sh",6,0,"",shellscript,selection_keyboard +25,32304,"start_sglang_server.sh",6,0," ",shellscript,content +26,32304,"start_sglang_server.sh",7,0,"",shellscript,selection_keyboard +27,32546,"start_sglang_server.sh",7,0,".venv/bin/activate",shellscript,content +28,32775,"start_sglang_server.sh",24,0,"",shellscript,selection_command +29,33216,"start_sglang_server.sh",25,0,"\n",shellscript,content +30,35383,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab +31,37385,"TERMINAL",0,0,"",,terminal_focus +32,37386,"start_sglang_server.sh",0,0,"",shellscript,tab +33,43060,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +34,49399,"TERMINAL",0,0,"ls",,terminal_command +35,52175,"TERMINAL",0,0,"cd",,terminal_command +36,56006,"TERMINAL",0,0,"cd crowd-pilot/",,terminal_command +37,57470,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +38,61752,"TERMINAL",0,0,"uv pip show sglang",,terminal_command +39,61798,"TERMINAL",0,0,"]633;C",,terminal_output +40,62576,"TERMINAL",0,0,"Name: sglang\r\nVersion: 0.5.4.post1\r\nLocation: /fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages\r\nRequires: aiohttp, anthropic, blobfile, build, compressed-tensors, cuda-python, datasets, decord2, einops, fastapi, flashinfer-python, gguf, grpcio, grpcio-health-checking, grpcio-reflection, grpcio-tools, hf-transfer, huggingface-hub, interegular, ipython, llguidance, modelscope, msgspec, ninja, numpy, nvidia-cutlass-dsl, nvidia-ml-py, openai, openai-harmony, orjson, outlines, packaging, partial-json-parser, pillow, prometheus-client, psutil, py-spy, pybase64, pydantic, python-multipart, pyzmq, requests, scipy, sentencepiece, setproctitle, sgl-kernel, soundfile, tiktoken, timm, torch, torch-memory-saver, torchao, torchaudio, torchvision, tqdm, transformers, uvicorn, uvloop, xgrammar\r\nRequired-by:\r\n]0;franz.srambical@hai-login1:~/crowd-pilot",,terminal_output +41,71102,"TERMINAL",0,0,"deactivate",,terminal_command +42,77402,"TERMINAL",0,0,"bash /home/franz.srambical/jafar/slurm/dev/franz/berlin/crowd-pilot/start_sglang_server.sh",,terminal_command +43,77468,"TERMINAL",0,0,"]633;C",,terminal_output +44,85588,"TERMINAL",0,0,"^CTraceback (most recent call last):\r\n File """", line 198, in _run_module_as_main\r\n File """", line 88, in _run_code\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/sglang/launch_server.py"", line 7, in \r\n from sglang.srt.server_args import prepare_server_args\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/sglang/srt/server_args.py"", line 29, in \r\n from sglang.srt.connector import ConnectorType\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/sglang/srt/connector/__init__.py"", line 6, in \r\n from sglang.srt.connector.base_connector import (\r\n ...<3 lines>...\r\n )\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/sglang/srt/connector/base_connector.py"", line 10, in \r\n import torch\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/torch/__init__.py"", line 2150, in \r\n from torch import _VF as _VF, functional as functional # usort: skip\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/torch/functional.py"", line 8, in \r\n import torch.nn.functional as F\r\n File """", line 1360, in _find_and_load\r\n File """", line 1331, in _find_and_load_unlocked\r\n File """", line 935, in _load_unlocked\r\n File """", line 1022, in exec_module\r\n File """", line 1118, in get_code\r\n File """", line 1217, in get_data\r\nKeyboardInterrupt\r\n",,terminal_output +45,85639,"TERMINAL",0,0,"^C\r\n]0;franz.srambical@hai-login1:~/crowd-pilot",,terminal_output +46,85825,"TERMINAL",0,0,"^C",,terminal_command +47,86834,"start_sglang_server.sh",27,0,"",shellscript,selection_command +48,87198,"start_sglang_server.sh",27,86,"python3 -m sglang.launch_server --model-path qwen/qwen2.5-0.5b-instruct --host 0.0.0.0",shellscript,selection_command +49,88964,"start_sglang_server.sh",27,0,"",shellscript,selection_command +50,89979,"TERMINAL",0,0,"/home/franz.srambical/jafar/slurm/dev/franz/berlin/crowd-pilot/start_sglang_server.sh",,terminal_command +51,93031,"start_sglang_server.sh",27,0,"/home/franz.srambical/jafar/slurm/dev/franz/berlin/crowd-pilot/start_sglang_server.sh",shellscript,content +52,93032,"start_sglang_server.sh",112,0,"",shellscript,selection_keyboard +53,93769,"start_sglang_server.sh",27,85,"",shellscript,content +54,93771,"start_sglang_server.sh",112,0,"",shellscript,selection_command +55,94569,"start_sglang_server.sh",114,0,"",shellscript,selection_command +56,95993,"start_sglang_server.sh",27,0,"",shellscript,selection_command +57,96402,"start_sglang_server.sh",27,86,"python3 -m sglang.launch_server --model-path qwen/qwen2.5-0.5b-instruct --host 0.0.0.0",shellscript,selection_command +58,96799,"start_sglang_server.sh",27,0,"",shellscript,selection_command +59,99144,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +60,102053,"TERMINAL",0,0,"python3 -m sglang.launch_server --model-path qwen/qwen2.5-0.5b-instruct --host 0.0.0.0",,terminal_command +61,102098,"TERMINAL",0,0,"]633;C",,terminal_output +62,118857,"TERMINAL",0,0,"",,terminal_command +63,123331,"TERMINAL",0,0,"2025-11-03 13:11:23.179652: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\r\n",,terminal_output +64,126934,"TERMINAL",0,0,"2025-11-03 13:11:26.785110: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\r\nTo enable the following instructions: AVX2 AVX512F AVX512_VNNI AVX512_BF16 AVX512_FP16 AVX_VNNI AMX_TILE AMX_INT8 AMX_BF16 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\r\n",,terminal_output +65,140724,"TERMINAL",0,0,"2025-11-03 13:11:40.573353: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\r\n",,terminal_output +66,151901,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File """", line 198, in _run_module_as_main\r\n File """", line 88, in _run_code\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/sglang/launch_server.py"", line 11, in \r\n server_args = prepare_server_args(sys.argv[1:])\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/sglang/srt/server_args.py"", line 3850, in prepare_server_args\r\n return ServerArgs.from_cli_args(raw_args)\r\n ~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/sglang/srt/server_args.py"", line 3472, in from_cli_args\r\n return cls(**{attr: getattr(args, attr) for attr in attrs})\r\n File """", line 268, in __init__\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/sglang/srt/server_args.py"", line 538, in __post_init__\r\n self._handle_missing_default_values()\r\n ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/sglang/srt/server_args.py"", line 623, in _handle_missing_default_values\r\n self.device = get_device()\r\n ~~~~~~~~~~^^\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/sglang/srt/utils/common.py"", line 1781, in get_device\r\n raise RuntimeError(""No accelerator (CUDA, XPU, HPU) is available."")\r\nRuntimeError: No accelerator (CUDA, XPU, HPU) is available.\r\n",,terminal_output +67,155163,"TERMINAL",0,0,"]0;franz.srambical@hai-login1:~/crowd-pilot",,terminal_output +68,246640,"TERMINAL",0,0,"id",,terminal_command +69,246640,"TERMINAL",0,0,"]633;Cuid=961800067(franz.srambical) gid=961800067(franz.srambical) groups=961800067(franz.srambical),961800017(helmholtz-member),961800019(helmholtz-all),961800033(hmgu),961900525(hfmi_synergyunit)\r\n]0;franz.srambical@hai-login1:~/crowd-pilot",,terminal_output +70,278403,"TERMINAL",0,0,"squeue",,terminal_command +71,278419,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 33317 xiao.liu interacti 1 128 R 2025-11-02T17:43:38 2025-11-02T17:43:38 19:30:20 23:59:00 hai006\r\n 33328 kalyan.nad standard 1 64 R 2025-11-03T11:56:23 2025-11-03T11:56:38 1:17:20 1-00:00:00 hai002\r\n 33320 kalyan.nad standard 1 64 R 2025-11-03T11:36:55 2025-11-03T11:36:55 1:37:03 1-00:00:00 hai001\r\n 33318 xiao.liu standard 1 128 R 2025-11-02T19:29:40 2025-11-02T19:30:38 17:43:20 23:59:00 hai004\r\n]0;franz.srambical@hai-login1:~/crowd-pilot",,terminal_output +72,281131,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=10 --mem=100G",,terminal_command +73,281187,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 33333\r\n",,terminal_output +74,281282,"TERMINAL",0,0,"salloc: Nodes hai003 are ready for job\r\n",,terminal_output +75,281642,"TERMINAL",0,0,"Running inside SLURM, Job ID 33333.\r\n",,terminal_output +76,281742,"TERMINAL",0,0,"]0;franz.srambical@hai-login1:~/crowd-pilot[?2004h[franz.srambical@hai003.haicore.berlin:~/crowd-pilot] $ ",,terminal_output +77,283584,"TERMINAL",0,0,"l",,terminal_output +78,283694,"TERMINAL",0,0,"s",,terminal_output +79,283773,"TERMINAL",0,0,"\r\n[?2004l\rLICENSE README.md crowd-pilot maxtext pyproject.toml slurm uv.lock\r\n]0;franz.srambical@hai-login1:~/crowd-pilot[?2004h[franz.srambical@hai003.haicore.berlin:~/crowd-pilot] $ ",,terminal_output +80,284758,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +81,284889,"TERMINAL",0,0,"s': lso': . ""/fast/home/franz.srambical/.cursor-server/bin/3ccce8f55d8cca49f6d28b491a844c699b8719a0/out/vs/workbench/contrib/terminal/common/scripts/shellIntegration-bash.sh""",,terminal_output +82,284952,"TERMINAL",0,0,"\ru': source .venv/bin/activate\r\n\r",,terminal_output +83,285009,"TERMINAL",0,0,"[1@r': sour",,terminal_output +84,285651,"TERMINAL",0,0,"\r[30@[franz.srambical@hai003.haicore.berlin:~/crowd-pilot] $ sour\r\n[?2004l\r]0;franz.srambical@hai-login1:~/crowd-pilot[?2004h(crowd-pilot) [franz.srambical@hai003.haicore.berlin:~/crowd-pilot] $ ",,terminal_output +85,287855,"start_sglang_server.sh",27,86,"python3 -m sglang.launch_server --model-path qwen/qwen2.5-0.5b-instruct --host 0.0.0.0",shellscript,selection_command +86,288194,"start_sglang_server.sh",27,0,"",shellscript,selection_command +87,289101,"TERMINAL",0,0,"python3 -m sglang.launch_server --model-path qwen/qwen2.5-0.5b-instruct --host 0.0.0.0",,terminal_output +88,289327,"TERMINAL",0,0,"\rpython3 -m sglang.launch_server --model-path qwen/qwen2.5-0.5b-instruct --host 0.0.0.0\r\n[?2004l\r",,terminal_output +89,306946,"TERMINAL",0,0,"2025-11-03 13:14:26.791489: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\r\n",,terminal_output +90,308085,"TERMINAL",0,0,"2025-11-03 13:14:27.923401: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\r\nTo enable the following instructions: AVX2 AVX512F AVX512_VNNI AVX512_BF16 AVX512_FP16 AVX_VNNI AMX_TILE AMX_INT8 AMX_BF16 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\r\n",,terminal_output +91,312755,"TERMINAL",0,0,"2025-11-03 13:14:32.604499: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\r\n",,terminal_output +92,326515,"TERMINAL",0,0,"[2025-11-03 13:14:46] WARNING server_args.py:1104: Attention backend not explicitly specified. Use fa3 backend by default.\r\n[2025-11-03 13:14:46] INFO trace.py:48: opentelemetry package is not installed, tracing disabled\r\n",,terminal_output +93,328112,"TERMINAL",0,0,"[2025-11-03 13:14:47] server_args=ServerArgs(model_path='qwen/qwen2.5-0.5b-instruct', tokenizer_path='qwen/qwen2.5-0.5b-instruct', tokenizer_mode='auto', tokenizer_worker_num=1, skip_tokenizer_init=False, load_format='auto', model_loader_extra_config='{}', trust_remote_code=False, context_length=None, is_embedding=False, enable_multimodal=None, revision=None, model_impl='auto', host='0.0.0.0', port=30000, grpc_mode=False, skip_server_warmup=False, warmups=None, nccl_port=None, checkpoint_engine_wait_weights_before_ready=False, dtype='auto', quantization=None, quantization_param_path=None, kv_cache_dtype='auto', enable_fp32_lm_head=False, modelopt_quant=None, modelopt_checkpoint_restore_path=None, modelopt_checkpoint_save_path=None, modelopt_export_path=None, quantize_and_serve=False, mem_fraction_static=0.835, max_running_requests=None, max_queued_requests=None, max_total_tokens=None, chunked_prefill_size=8192, max_prefill_tokens=16384, schedule_policy='fcfs', enable_priority_scheduling=False, abort_on_priority_when_disabled=False, schedule_low_priority_values_first=False, priority_scheduling_preemption_threshold=10, schedule_conservativeness=1.0, page_size=1, hybrid_kvcache_ratio=None, swa_full_tokens_ratio=0.8, disable_hybrid_swa_memory=False, radix_eviction_policy='lru', device='cuda', tp_size=1, pp_size=1, pp_max_micro_batch_size=None, stream_interval=1, stream_output=False, random_seed=541394942, constrained_json_whitespace_pattern=None, constrained_json_disable_any_whitespace=False, watchdog_timeout=300, dist_timeout=None, download_dir=None, base_gpu_id=0, gpu_id_step=1, sleep_on_idle=False, log_level='info', log_level_http=None, log_requests=False, log_requests_level=2, crash_dump_folder=None, show_time_cost=False, enable_metrics=False, enable_metrics_for_all_schedulers=False, tokenizer_metrics_custom_labels_header='x-custom-labels', tokenizer_metrics_allowed_custom_labels=None, bucket_time_to_first_token=None, bucket_inter_token_latency=None, bucket_e2e_request_latency=None, collect_tokens_histogram=False, prompt_tokens_buckets=None, generation_tokens_buckets=None, gc_warning_threshold_secs=0.0, decode_log_interval=40, enable_request_time_stats_logging=False, kv_events_config=None, enable_trace=False, oltp_traces_endpoint='localhost:4317', api_key=None, served_model_name='qwen/qwen2.5-0.5b-instruct', weight_version='default', chat_template=None, completion_template=None, file_storage_path='sglang_storage', enable_cache_report=False, reasoning_parser=None, tool_call_parser=None, tool_server=None, sampling_defaults='model', dp_size=1, load_balance_method='round_robin', load_watch_interval=0.1, prefill_round_robin_balance=False, dist_init_addr=None, nnodes=1, node_rank=0, json_model_override_args='{}', preferred_sampling_params=None, enable_lora=None, max_lora_rank=None, lora_target_modules=None, lora_paths=None, max_loaded_loras=None, max_loras_per_batch=8, lora_eviction_policy='lru', lora_backend='triton', max_lora_chunk_size=16, attention_backend='fa3', decode_attention_backend=None, prefill_attention_backend=None, sampling_backend='flashinfer', grammar_backend='xgrammar', mm_attention_backend=None, nsa_prefill_backend='flashmla_sparse', nsa_decode_backend='fa3', speculative_algorithm=None, speculative_draft_model_path=None, speculative_draft_model_revision=None, speculative_draft_load_format=None, speculative_num_steps=None, speculative_eagle_topk=None, speculative_num_draft_tokens=None, speculative_accept_threshold_single=1.0, speculative_accept_threshold_acc=1.0, speculative_token_map=None, speculative_attention_mode='prefill', speculative_ngram_min_match_window_size=1, speculative_ngram_max_match_window_size=12, speculative_ngram_min_bfs_breadth=1, speculative_ngram_max_bfs_breadth=10, speculative_ngram_match_type='BFS', speculative_ngram_branch_length=18, speculative_ngram_capacity=10000000, ep_size=1, moe_a2a_backend='none', moe_runner_backend='auto', flashinfer_mxfp4_moe_precision='default', enable_flashinfer_allreduce_fusion=False, deepep_mode='auto', ep_num_redundant_experts=0, ep_dispatch_algorithm='static', init_expert_location='trivial', enable_eplb=False, eplb_algorithm='auto', eplb_rebalance_num_iterations=1000, eplb_rebalance_layers_per_chunk=None, eplb_min_rebalancing_utilization_threshold=1.0, expert_distribution_recorder_mode=None, expert_distribution_recorder_buffer_size=1000, enable_expert_distribution_metrics=False, deepep_config=None, moe_dense_tp_size=None, elastic_ep_backend=None, mooncake_ib_device=None, max_mamba_cache_size=None, mamba_ssm_dtype='float32', mamba_full_memory_ratio=0.9, enable_hierarchical_cache=False, hicache_ratio=2.0, hicache_size=0, hicache_write_policy='write_through', hicache_io_backend='kernel', hicache_mem_layout='layer_first', hicache_storage_backend=None, hicache_storage_prefetch_policy='best_effort', hicache_storage_backend_extra_config=None, enable_lmcache=False, kt_amx_weight_path=None, kt_amx_method='AMXINT4', kt_cpuinfer=None, kt_threadpool_count=2, kt_num_gpu_experts=None, enable_double_sparsity=False, ds_channel_config_path=None, ds_heavy_channel_num=32, ds_heavy_token_num=256, ds_heavy_channel_type='qk', ds_sparse_decode_threshold=4096, cpu_offload_gb=0, offload_group_size=-1, offload_num_in_group=1, offload_prefetch_step=1, offload_mode='cpu', multi_item_scoring_delimiter=None, disable_radix_cache=False, cuda_graph_max_bs=256, cuda_graph_bs=[1, 2, 4, 8, 12, 16, 24, 32, 40, 48, 56, 64, 72, 80, 88, 96, 104, 112, 120, 128, 136, 144, 152, 160, 168, 176, 184, 192, 200, 208, 216, 224, 232, 240, 248, 256], disable_cuda_graph=False, disable_cuda_graph_padding=False, enable_profile_cuda_graph=False, enable_cudagraph_gc=False, enable_nccl_nvls=False, enable_symm_mem=False, disable_flashinfer_cutlass_moe_fp4_allgather=False, enable_tokenizer_batch_encode=False, disable_tokenizer_batch_decode=False, disable_outlines_disk_cache=False, disable_custom_all_reduce=False, enable_mscclpp=False, enable_torch_symm_mem=False, disable_overlap_schedule=False, enable_mixed_chunk=False, enable_dp_attention=False, enable_dp_lm_head=False, enable_two_batch_overlap=False, enable_single_batch_overlap=False, tbo_token_distribution_threshold=0.48, enable_torch_compile=False, enable_piecewise_cuda_graph=False, torch_compile_max_bs=32, piecewise_cuda_graph_max_tokens=4096, piecewise_cuda_graph_tokens=[4, 8, 12, 16, 20, 24, 28, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240, 256, 288, 320, 352, 384, 416, 448, 480, 512, 640, 768, 896, 1024, 1152, 1280, 1408, 1536, 1664, 1792, 1920, 2048, 2176, 2304, 2432, 2560, 2688, 2816, 2944, 3072, 3200, 3328, 3456, 3584, 3712, 3840, 3968, 4096], piecewise_cuda_graph_compiler='eager', torchao_config='', enable_nan_detection=False, enable_p2p_check=False, triton_attention_reduce_in_fp32=False, triton_attention_num_kv_splits=8, triton_attention_split_tile_size=None, num_continuous_decode_steps=1, delete_ckpt_after_loading=False, enable_memory_saver=False, enable_weights_cpu_backup=False, allow_auto_truncate=False, enable_custom_logit_processor=False, flashinfer_mla_disable_ragged=False, disable_shared_experts_fusion=False, disable_chunked_prefix_cache=False, disable_fast_image_processor=False, keep_mm_feature_on_device=False, enable_return_hidden_states=False, scheduler_recv_interval=1, numa_node=None, enable_deterministic_inference=False, rl_on_policy_target=None, enable_dynamic_batch_tokenizer=False, dynamic_batch_tokenizer_batch_size=32, dynamic_batch_tokenizer_batch_timeout=0.002, debug_tensor_dump_output_folder=None, debug_tensor_dump_input_file=None, debug_tensor_dump_inject=False, disaggregation_mode='null', disaggregation_transfer_backend='mooncake', disaggregation_bootstrap_port=8998, disaggregation_decode_tp=None, disaggregation_decode_dp=None, disaggregation_prefill_pp=1, disaggregation_ib_device=None, disaggregation_decode_enable_offload_kvcache=False, num_reserved_decode_tokens=512, disaggregation_decode_polling_interval=1, custom_weight_loader=[], weight_loader_disable_mmap=False, remote_instance_weight_loader_seed_instance_ip=None, remote_instance_weight_loader_seed_instance_service_port=None, remote_instance_weight_loader_send_weights_group_ports=None, enable_pdmux=False, pdmux_config_path=None, sm_group_num=8)\r\n",,terminal_output +94,329705,"TERMINAL",0,0,"[2025-11-03 13:14:49] Using default HuggingFace chat template with detected content format: string\r\n",,terminal_output +95,346598,"TERMINAL",0,0,"[2025-11-03 13:15:06] INFO trace.py:48: opentelemetry package is not installed, tracing disabled\r\n",,terminal_output +96,348586,"TERMINAL",0,0,"[2025-11-03 13:15:08] Init torch distributed begin.\r\n",,terminal_output +97,348951,"TERMINAL",0,0,"[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n",,terminal_output +98,349007,"TERMINAL",0,0,"[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[2025-11-03 13:15:08] Init torch distributed ends. mem usage=0.00 GB\r\n",,terminal_output +99,349159,"TERMINAL",0,0,"[2025-11-03 13:15:09] MOE_RUNNER_BACKEND is not initialized, the backend will be automatically selected\r\n",,terminal_output +100,349970,"TERMINAL",0,0,"[2025-11-03 13:15:09] INFO trace.py:48: opentelemetry package is not installed, tracing disabled\r\n",,terminal_output +101,354706,"TERMINAL",0,0,"[2025-11-03 13:15:14] Load weight begin. avail mem=78.68 GB\r\n",,terminal_output +102,355343,"TERMINAL",0,0,"[2025-11-03 13:15:15] TensorFlow version 2.20.0 available.\r\n",,terminal_output +103,360584,"TERMINAL",0,0,"[2025-11-03 13:15:20] Using model weights format ['*.safetensors']\r\n",,terminal_output +104,361111,"TERMINAL",0,0,"[2025-11-03 13:15:20] No model.safetensors.index.json found in remote.\r\n\rLoading safetensors checkpoint shards: 0% Completed | 0/1 [00:00>> [?12l[?25h",,terminal_output +125,953679,"TERMINAL",0,0,"[?25l\n>>> import requests... [?12l[?25h[?25l\n... [?12l[?25h[?25l... url = f""http://localhost:{port}/v1/chat/completions""[?12l[?25h",,terminal_output +126,954821,"TERMINAL",0,0,"",,terminal_output +127,954978,"TERMINAL",0,0,"",,terminal_output +128,955111,"TERMINAL",0,0,"",,terminal_output +129,955280,"TERMINAL",0,0,"",,terminal_output +130,956069,"TERMINAL",0,0,"",,terminal_output +131,956484,"TERMINAL",0,0,"[?25l... url = f""http://localhost:{port/v1/chat/completions""[?12l[?25h",,terminal_output +132,957029,"TERMINAL",0,0,"[?25l... url = f""http://localhost:{por/v1/chat/completions""[?12l[?25h[?25l... url = f""http://localhost:{po/v1/chat/completions""[?12l[?25h[?25l... url = f""http://localhost:{p/v1/chat/completions""[?12l[?25h[?25l... url = f""http://localhost:{/v1/chat/completions""[?12l[?25h[?25l... url = f""http://localhost:/v1/chat/completions""[?12l[?25h[?25l... url = f""http://localhost/v1/chat/completions""[?12l[?25h[?25l... url = f""http://localhos/v1/chat/completions""[?12l[?25h[?25l... url = f""http://localho/v1/chat/completions""[?12l[?25h[?25l... url = f""http://localh/v1/chat/completions""[?12l[?25h[?25l... url = f""http://local/v1/chat/completions""[?12l[?25h",,terminal_output +133,957146,"TERMINAL",0,0,"[?25l... url = f""http://loca/v1/chat/completions""[?12l[?25h",,terminal_output +134,957298,"TERMINAL",0,0,"[?25l... url = f""http://loc/v1/chat/completions""[?12l[?25h",,terminal_output +135,957432,"TERMINAL",0,0,"[?25l... url = f""http://lo/v1/chat/completions""[?12l[?25h",,terminal_output +136,957569,"TERMINAL",0,0,"[?25l... url = f""http://l/v1/chat/completions""[?12l[?25h",,terminal_output +137,958025,"TERMINAL",0,0,"[?25l... url = f""http:///v1/chat/completions""[?12l[?25h",,terminal_output +138,959244,"TERMINAL",0,0,"",,terminal_focus +139,960619,"TERMINAL",0,0,"squeue",,terminal_command +140,960623,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 33333 franz.sram interacti 1 20 R 2025-11-03T13:14:01 2025-11-03T13:14:01 11:19 1-00:00:00 hai003\r\n 33317 xiao.liu interacti 1 128 R 2025-11-02T17:43:38 2025-11-02T17:43:38 19:41:42 23:59:00 hai006\r\n 33328 kalyan.nad standard 1 64 R 2025-11-03T11:56:23 2025-11-03T11:56:38 1:28:42 1-00:00:00 hai002\r\n 33318 xiao.liu standard 1 128 R 2025-11-02T19:29:40 2025-11-02T19:30:38 17:54:42 23:59:00 hai004\r\n]0;franz.srambical@hai-login1:~/jafar/slurm/dev/franz/berlin/crowd-pilot",,terminal_output +141,965159,"TERMINAL",0,0,"python",,terminal_focus +142,965738,"TERMINAL",0,0,"srun",,terminal_focus +143,966961,"TERMINAL",0,0,"python",,terminal_focus +144,968671,"TERMINAL",0,0,"[?25l... url = f""http://h/v1/chat/completions""[?12l[?25h",,terminal_output +145,968869,"TERMINAL",0,0,"[?25l... url = f""http://ha/v1/chat/completions""[?12l[?25h[?25l... url = f""http://hai/v1/chat/completions""[?12l[?25h",,terminal_output +146,969460,"TERMINAL",0,0,"[?25l... url = f""http://hai0/v1/chat/completions""[?12l[?25h",,terminal_output +147,969519,"TERMINAL",0,0,"[?25l... url = f""http://hai00/v1/chat/completions""[?12l[?25h",,terminal_output +148,969617,"TERMINAL",0,0,"[?25l... url = f""http://hai003/v1/chat/completions""[?12l[?25h",,terminal_output +149,970391,"TERMINAL",0,0,"[?25l... url = f""http://hai003:/v1/chat/completions""[?12l[?25h",,terminal_output +150,973152,"TERMINAL",0,0,"srun",,terminal_focus +151,975372,"TERMINAL",0,0,"python",,terminal_focus +152,977464,"TERMINAL",0,0,"[?25l... url = f""http://hai003:3/v1/chat/completions""[?12l[?25h",,terminal_output +153,977548,"TERMINAL",0,0,"[?25l... url = f""http://hai003:30/v1/chat/completions""[?12l[?25h",,terminal_output +154,978092,"TERMINAL",0,0,"[?25l... url = f""http://hai003:300/v1/chat/completions""[?12l[?25h",,terminal_output +155,978256,"TERMINAL",0,0,"[?25l... url = f""http://hai003:3000/v1/chat/completions""[?12l[?25h",,terminal_output +156,978414,"TERMINAL",0,0,"[?25l... url = f""http://hai003:30000/v1/chat/completions""[?12l[?25h",,terminal_output +157,978848,"TERMINAL",0,0,"",,terminal_output +158,979078,"TERMINAL",0,0,"[?25l\n... [?12l[?25h",,terminal_output +159,981853,"TERMINAL",0,0,"\n\r[?2004l[?1l>",,terminal_output +160,982027,"TERMINAL",0,0,"[?2004h[?1h=[?25l\n>>> [?12l[?25h",,terminal_output +161,986222,"TERMINAL",0,0,"[?25l\n>>> data = {... [?12l[?25h[?25l\n...  ""model"": ""qwen/qwen2.5-0.5b-instruct"",... [?12l[?25h[?25l\n...  ""messages"": [{""role"": ""user"", ""content"": ""What is the capital of France?""}],... [?12l[?25h[?25l... }[?12l[?25h",,terminal_output +162,986446,"TERMINAL",0,0,"\n\r[?2004l[?1l>[?2004h[?1h=[?25l\n>>> [?12l[?25h",,terminal_output +163,986621,"TERMINAL",0,0,"\n\r[?2004l[?1l>[?2004h[?1h=[?25l\n>>> [?12l[?25h",,terminal_output +164,990025,"TERMINAL",0,0,"[?25l>>> response = requests.post(url, json=data)[?12l[?25h",,terminal_output +165,990225,"TERMINAL",0,0,"\n\r[?2004l[?1l>",,terminal_output +166,990235,"TERMINAL",0,0,"[2025-11-03 13:25:50] Prefill batch, #new-seq: 1, #new-token: 36, #cached-token: 0, token usage: 0.00, #running-req: 0, #queue-req: 0, \r\n",,terminal_output +167,990372,"TERMINAL",0,0,"\r\n[?2004h[?1h=[?25l\n>>> [?12l[?25h\n\r[?2004l[?1l>[?2004h[?1h=[?25l\n>>> [?12l[?25h",,terminal_output +168,990374,"TERMINAL",0,0,"[2025-11-03 13:25:50] INFO: 10.86.2.251:49732 - ""POST /v1/chat/completions HTTP/1.1"" 200 OK\r\n",,terminal_output +169,992718,"TERMINAL",0,0,"[?25l>>> p[?12l[?25h",,terminal_output +170,992795,"TERMINAL",0,0,"[?25l>>> pr[?12l[?25h",,terminal_output +171,992936,"TERMINAL",0,0,"[?25l>>> pri[?12l[?25h[?25l>>> prin[?12l[?25h",,terminal_output +172,993015,"TERMINAL",0,0,"[?25l>>> print[?12l[?25h",,terminal_output +173,993292,"TERMINAL",0,0,"[?25l>>> print([?12l[?25h",,terminal_output +174,993402,"TERMINAL",0,0,"[?25l>>> print()[?12l[?25h",,terminal_output +175,993786,"TERMINAL",0,0,"",,terminal_output +176,994396,"TERMINAL",0,0,"[?25l>>> print(r)[?12l[?25h",,terminal_output +177,994490,"TERMINAL",0,0,"[?25l>>> print(re)[?12l[?25h",,terminal_output +178,994656,"TERMINAL",0,0,"[?25l>>> print(res)[?12l[?25h",,terminal_output +179,994799,"TERMINAL",0,0,"[?25l>>> print(response)[?12l[?25h",,terminal_output +180,996827,"TERMINAL",0,0,"[?25l>>> print(response.)[?12l[?25h",,terminal_output +181,996908,"TERMINAL",0,0,"[?25l>>> print(response.s)[?12l[?25h",,terminal_output +182,997008,"TERMINAL",0,0,"[?25l>>> print(response.sj)[?12l[?25h",,terminal_output +183,997447,"TERMINAL",0,0,"[?25l>>> print(response.s)[?12l[?25h",,terminal_output +184,997591,"TERMINAL",0,0,"[?25l>>> print(response.)[?12l[?25h",,terminal_output +185,997715,"TERMINAL",0,0,"[?25l>>> print(response.j)[?12l[?25h",,terminal_output +186,997801,"TERMINAL",0,0,"[?25l>>> print(response.js)[?12l[?25h",,terminal_output +187,997933,"TERMINAL",0,0,"[?25l>>> print(response.json()[?12l[?25h",,terminal_output +188,998729,"TERMINAL",0,0,"",,terminal_output +189,998898,"TERMINAL",0,0,"[?25l\n... [?12l[?25h",,terminal_output +190,1000184,"TERMINAL",0,0,"[?25l... _[?12l[?25h",,terminal_output +191,1000759,"TERMINAL",0,0,"[?25l... [?12l[?25h",,terminal_output +192,1001046,"TERMINAL",0,0,"[?25l... )[?12l[?25h",,terminal_output +193,1001598,"TERMINAL",0,0,"\n\r[?2004l[?1l>{'id': 'b76c04917df44d9aaf99082ae9769ded', 'object': 'chat.completion', 'created': 1762172750, 'model': 'qwen/qwen2.5-0.5b-instruct', 'choices': [{'index': 0, 'message': {'role': 'assistant', 'content': 'The capital of France is Paris.', 'reasoning_content': None, 'tool_calls': None}, 'logprobs': None, 'finish_reason': 'stop', 'matched_stop': 151645}], 'usage': {'prompt_tokens': 36, 'total_tokens': 44, 'completion_tokens': 8, 'prompt_tokens_details': None, 'reasoning_tokens': 0}, 'metadata': {'weight_version': 'default'}}\r\n[?2004h[?1h=[?25l\n>>> [?12l[?25h",,terminal_output +194,1024494,"TERMINAL",0,0,"srun",,terminal_focus +195,1055248,"TERMINAL",0,0,"python",,terminal_focus +196,1241705,"TERMINAL",0,0,"[2025-11-03 13:30:01] SIGTERM received. signum=None frame=None. Draining requests and shutting down...\r\n",,terminal_output +197,1243235,"TERMINAL",0,0,"[2025-11-03 13:30:03] Gracefully exiting... Remaining number of requests 0. Remaining requests remaining_rids=[].\r\n",,terminal_output +198,1243545,"TERMINAL",0,0,"Killed\r\n]0;franz.srambical@hai-login1:~/crowd-pilot[?2004h(crowd-pilot) [franz.srambical@hai003.haicore.berlin:~/crowd-pilot] $ ",,terminal_output +199,1381345,"TERMINAL",0,0,"[?25l\n>>> print(response.json()... )[?12l[?25h",,terminal_output +200,1381698,"TERMINAL",0,0,"[?25l>>> response = requests.post(url, json=data)[?12l[?25h",,terminal_output +201,1382817,"TERMINAL",0,0,"[?25l\n\n\n>>> data = {...  ""model"": ""qwen/qwen2.5-0.5b-instruct"",...  ""messages"": [{""role"": ""user"", ""content"": ""What is the capital of France?""}],... }[?12l[?25h",,terminal_output +202,1383740,"TERMINAL",0,0,"[?25l>>> import requests... ... url = f""http://hai003:30000/v1/chat/completions""[?12l[?25h",,terminal_output +203,1385966,"TERMINAL",0,0,"[?25l\n... [?12l[?25h",,terminal_output +204,1386716,"TERMINAL",0,0,"\n\r[?2004l[?1l>[?2004h[?1h=[?25l\n>>> [?12l[?25h",,terminal_output +205,1387796,"TERMINAL",0,0,"[?25l\n\n>>> import requests... ... url = f""http://hai003:30000/v1/chat/completions""[?12l[?25h",,terminal_output +206,1387869,"TERMINAL",0,0,"[?25l>>> print(response.json()... )[?12l[?25h",,terminal_output +207,1388047,"TERMINAL",0,0,"[?25l>>> response = requests.post(url, json=data)[?12l[?25h",,terminal_output +208,1388536,"TERMINAL",0,0,"[?25l\n\n\n>>> data = {...  ""model"": ""qwen/qwen2.5-0.5b-instruct"",...  ""messages"": [{""role"": ""user"", ""content"": ""What is the capital of France?""}],... }[?12l[?25h",,terminal_output +209,1390082,"TERMINAL",0,0,"[?25l>>> import requests... ... url = f""http://hai003:30000/v1/chat/completions""[?12l[?25h",,terminal_output +210,1390618,"TERMINAL",0,0,"[?25l\n>>> data = {...  ""model"": ""qwen/qwen2.5-0.5b-instruct"",...  ""messages"": [{""role"": ""user"", ""content"": ""What is the capital of France?""}],... }[?12l[?25h",,terminal_output +211,1391047,"TERMINAL",0,0,"\n\r[?2004l[?1l>[?2004h[?1h=[?25l\n>>> [?12l[?25h",,terminal_output +212,1391308,"TERMINAL",0,0,"\n\r[?2004l[?1l>[?2004h[?1h=[?25l\n>>> [?12l[?25h",,terminal_output +213,1391930,"TERMINAL",0,0,"[?25l\n\n\n>>> data = {...  ""model"": ""qwen/qwen2.5-0.5b-instruct"",...  ""messages"": [{""role"": ""user"", ""content"": ""What is the capital of France?""}],... }[?12l[?25h",,terminal_output +214,1392014,"TERMINAL",0,0,"[?25l>>> import requests... ... url = f""http://hai003:30000/v1/chat/completions""[?12l[?25h",,terminal_output +215,1392471,"TERMINAL",0,0,"[?25l>>> print(response.json()... )[?12l[?25h",,terminal_output +216,1392947,"TERMINAL",0,0,"[?25l>>> response = requests.post(url, json=data)[?12l[?25h",,terminal_output +217,1393731,"TERMINAL",0,0,"\n\r[?2004l[?1l>Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/urllib3/connection.py"", line 198, in _new_conn\r\n sock = connection.create_connection(\r\n (self._dns_host, self.port),\r\n ...<2 lines>...\r\n socket_options=self.socket_options,\r\n )\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/urllib3/util/connection.py"", line 85, in create_connection\r\n raise err\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/urllib3/util/connection.py"", line 73, in create_connection\r\n sock.connect(sa)\r\n ~~~~~~~~~~~~^^^^\r\nConnectionRefusedError: [Errno 111] Connection refused\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/urllib3/connectionpool.py"", line 787, in urlopen\r\n response = self._make_request(\r\n conn,\r\n ...<10 lines>...\r\n **response_kw,\r\n )\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/urllib3/connectionpool.py"", line 493, in _make_request\r\n conn.request(\r\n ~~~~~~~~~~~~^\r\n method,\r\n ^^^^^^^\r\n ...<6 lines>...\r\n enforce_content_length=enforce_content_length,\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n )\r\n ^\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/urllib3/connection.py"", line 494, in request\r\n self.endheaders()\r\n ~~~~~~~~~~~~~~~^^\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.13.5-linux-x86_64-gnu/lib/python3.13/http/client.py"", line 1333, in endheaders\r\n self._send_output(message_body, encode_chunked=encode_chunked)\r\n ~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.13.5-linux-x86_64-gnu/lib/python3.13/http/client.py"", line 1093, in _send_output\r\n self.send(msg)\r\n ~~~~~~~~~^^^^^\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.13.5-linux-x86_64-gnu/lib/python3.13/http/client.py"", line 1037, in send\r\n self.connect()\r\n ~~~~~~~~~~~~^^\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/urllib3/connection.py"", line 325, in connect\r\n self.sock = self._new_conn()\r\n ~~~~~~~~~~~~~~^^\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/urllib3/connection.py"", line 213, in _new_conn\r\n raise NewConnectionError(\r\n self, f""Failed to establish a new connection: {e}""\r\n ) from e\r\nurllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/requests/adapters.py"", line 644, in send\r\n resp = conn.urlopen(\r\n method=request.method,\r\n ...<9 lines>...\r\n chunked=chunked,\r\n )\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/urllib3/connectionpool.py"", line 841, in urlopen\r\n retries = retries.increment(\r\n method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2]\r\n )\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/urllib3/util/retry.py"", line 519, in increment\r\n raise MaxRetryError(_pool, url, reason) from reason # type: ignore[arg-type]\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nurllib3.exceptions.MaxRetryError: HTTPConnectionPool(host='hai003', port=30000): Max retries exceeded with url: /v1/chat/completions (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File """", line 1, in \r\n response = requests.post(url, json=data)\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/requests/api.py"", line 115, in post\r\n return request(""post"", url, data=data, json=json, **kwargs)\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/requests/api.py"", line 59, in request\r\n return session.request(method=method, url=url, **kwargs)\r\n ~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/requests/sessions.py"", line 589, in request\r\n resp = self.send(prep, **send_kwargs)\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/requests/sessions.py"", line 703, in send\r\n r = adapter.send(request, **kwargs)\r\n File ""/fast/home/franz.srambical/crowd-pilot/.venv/lib/python3.13/site-packages/requests/adapters.py"", line 677, in send\r\n raise ConnectionError(e, request=request)\r\nrequests.exceptions.ConnectionError: HTTPConnectionPool(host='hai003', port=30000): Max retries exceeded with url: /v1/chat/completions (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))\r\n[?2004h[?1h=[?25l\n>>> [?12l[?25h",,terminal_output +218,1398331,"TERMINAL",0,0,"srun",,terminal_focus +219,1403264,"TERMINAL",0,0,"python3 -m sglang.launch_server --model-path qwen/qwen2.5-0.5b-instruct --host 0.0.0.0",,terminal_output +220,1403498,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +221,1413006,"TERMINAL",0,0,"2025-11-03 13:32:52.861020: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\r\n",,terminal_output +222,1413064,"TERMINAL",0,0,"2025-11-03 13:32:52.916612: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\r\nTo enable the following instructions: AVX2 AVX512F AVX512_VNNI AVX512_BF16 AVX512_FP16 AVX_VNNI AMX_TILE AMX_INT8 AMX_BF16 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\r\n",,terminal_output +223,1415760,"TERMINAL",0,0,"2025-11-03 13:32:55.587656: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\r\n",,terminal_output +224,1422741,"TERMINAL",0,0,"[2025-11-03 13:33:02] WARNING server_args.py:1104: Attention backend not explicitly specified. Use fa3 backend by default.\r\n[2025-11-03 13:33:02] INFO trace.py:48: opentelemetry package is not installed, tracing disabled\r\n",,terminal_output +225,1423202,"TERMINAL",0,0,"[2025-11-03 13:33:03] server_args=ServerArgs(model_path='qwen/qwen2.5-0.5b-instruct', tokenizer_path='qwen/qwen2.5-0.5b-instruct', tokenizer_mode='auto', tokenizer_worker_num=1, skip_tokenizer_init=False, load_format='auto', model_loader_extra_config='{}', trust_remote_code=False, context_length=None, is_embedding=False, enable_multimodal=None, revision=None, model_impl='auto', host='0.0.0.0', port=30000, grpc_mode=False, skip_server_warmup=False, warmups=None, nccl_port=None, checkpoint_engine_wait_weights_before_ready=False, dtype='auto', quantization=None, quantization_param_path=None, kv_cache_dtype='auto', enable_fp32_lm_head=False, modelopt_quant=None, modelopt_checkpoint_restore_path=None, modelopt_checkpoint_save_path=None, modelopt_export_path=None, quantize_and_serve=False, mem_fraction_static=0.835, max_running_requests=None, max_queued_requests=None, max_total_tokens=None, chunked_prefill_size=8192, max_prefill_tokens=16384, schedule_policy='fcfs', enable_priority_scheduling=False, abort_on_priority_when_disabled=False, schedule_low_priority_values_first=False, priority_scheduling_preemption_threshold=10, schedule_conservativeness=1.0, page_size=1, hybrid_kvcache_ratio=None, swa_full_tokens_ratio=0.8, disable_hybrid_swa_memory=False, radix_eviction_policy='lru', device='cuda', tp_size=1, pp_size=1, pp_max_micro_batch_size=None, stream_interval=1, stream_output=False, random_seed=417737316, constrained_json_whitespace_pattern=None, constrained_json_disable_any_whitespace=False, watchdog_timeout=300, dist_timeout=None, download_dir=None, base_gpu_id=0, gpu_id_step=1, sleep_on_idle=False, log_level='info', log_level_http=None, log_requests=False, log_requests_level=2, crash_dump_folder=None, show_time_cost=False, enable_metrics=False, enable_metrics_for_all_schedulers=False, tokenizer_metrics_custom_labels_header='x-custom-labels', tokenizer_metrics_allowed_custom_labels=None, bucket_time_to_first_token=None, bucket_inter_token_latency=None, bucket_e2e_request_latency=None, collect_tokens_histogram=False, prompt_tokens_buckets=None, generation_tokens_buckets=None, gc_warning_threshold_secs=0.0, decode_log_interval=40, enable_request_time_stats_logging=False, kv_events_config=None, enable_trace=False, oltp_traces_endpoint='localhost:4317', api_key=None, served_model_name='qwen/qwen2.5-0.5b-instruct', weight_version='default', chat_template=None, completion_template=None, file_storage_path='sglang_storage', enable_cache_report=False, reasoning_parser=None, tool_call_parser=None, tool_server=None, sampling_defaults='model', dp_size=1, load_balance_method='round_robin', load_watch_interval=0.1, prefill_round_robin_balance=False, dist_init_addr=None, nnodes=1, node_rank=0, json_model_override_args='{}', preferred_sampling_params=None, enable_lora=None, max_lora_rank=None, lora_target_modules=None, lora_paths=None, max_loaded_loras=None, max_loras_per_batch=8, lora_eviction_policy='lru', lora_backend='triton', max_lora_chunk_size=16, attention_backend='fa3', decode_attention_backend=None, prefill_attention_backend=None, sampling_backend='flashinfer', grammar_backend='xgrammar', mm_attention_backend=None, nsa_prefill_backend='flashmla_sparse', nsa_decode_backend='fa3', speculative_algorithm=None, speculative_draft_model_path=None, speculative_draft_model_revision=None, speculative_draft_load_format=None, speculative_num_steps=None, speculative_eagle_topk=None, speculative_num_draft_tokens=None, speculative_accept_threshold_single=1.0, speculative_accept_threshold_acc=1.0, speculative_token_map=None, speculative_attention_mode='prefill', speculative_ngram_min_match_window_size=1, speculative_ngram_max_match_window_size=12, speculative_ngram_min_bfs_breadth=1, speculative_ngram_max_bfs_breadth=10, speculative_ngram_match_type='BFS', speculative_ngram_branch_length=18, speculative_ngram_capacity=10000000, ep_size=1, moe_a2a_backend='none', moe_runner_backend='auto', flashinfer_mxfp4_moe_precision='default', enable_flashinfer_allreduce_fusion=False, deepep_mode='auto', ep_num_redundant_experts=0, ep_dispatch_algorithm='static', init_expert_location='trivial', enable_eplb=False, eplb_algorithm='auto', eplb_rebalance_num_iterations=1000, eplb_rebalance_layers_per_chunk=None, eplb_min_rebalancing_utilization_threshold=1.0, expert_distribution_recorder_mode=None, expert_distribution_recorder_buffer_size=1000, enable_expert_distribution_metrics=False, deepep_config=None, moe_dense_tp_size=None, elastic_ep_backend=None, mooncake_ib_device=None, max_mamba_cache_size=None, mamba_ssm_dtype='float32', mamba_full_memory_ratio=0.9, enable_hierarchical_cache=False, hicache_ratio=2.0, hicache_size=0, hicache_write_policy='write_through', hicache_io_backend='kernel', hicache_mem_layout='layer_first', hicache_storage_backend=None, hicache_storage_prefetch_policy='best_effort', hicache_storage_backend_extra_config=None, enable_lmcache=False, kt_amx_weight_path=None, kt_amx_method='AMXINT4', kt_cpuinfer=None, kt_threadpool_count=2, kt_num_gpu_experts=None, enable_double_sparsity=False, ds_channel_config_path=None, ds_heavy_channel_num=32, ds_heavy_token_num=256, ds_heavy_channel_type='qk', ds_sparse_decode_threshold=4096, cpu_offload_gb=0, offload_group_size=-1, offload_num_in_group=1, offload_prefetch_step=1, offload_mode='cpu', multi_item_scoring_delimiter=None, disable_radix_cache=False, cuda_graph_max_bs=256, cuda_graph_bs=[1, 2, 4, 8, 12, 16, 24, 32, 40, 48, 56, 64, 72, 80, 88, 96, 104, 112, 120, 128, 136, 144, 152, 160, 168, 176, 184, 192, 200, 208, 216, 224, 232, 240, 248, 256], disable_cuda_graph=False, disable_cuda_graph_padding=False, enable_profile_cuda_graph=False, enable_cudagraph_gc=False, enable_nccl_nvls=False, enable_symm_mem=False, disable_flashinfer_cutlass_moe_fp4_allgather=False, enable_tokenizer_batch_encode=False, disable_tokenizer_batch_decode=False, disable_outlines_disk_cache=False, disable_custom_all_reduce=False, enable_mscclpp=False, enable_torch_symm_mem=False, disable_overlap_schedule=False, enable_mixed_chunk=False, enable_dp_attention=False, enable_dp_lm_head=False, enable_two_batch_overlap=False, enable_single_batch_overlap=False, tbo_token_distribution_threshold=0.48, enable_torch_compile=False, enable_piecewise_cuda_graph=False, torch_compile_max_bs=32, piecewise_cuda_graph_max_tokens=4096, piecewise_cuda_graph_tokens=[4, 8, 12, 16, 20, 24, 28, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240, 256, 288, 320, 352, 384, 416, 448, 480, 512, 640, 768, 896, 1024, 1152, 1280, 1408, 1536, 1664, 1792, 1920, 2048, 2176, 2304, 2432, 2560, 2688, 2816, 2944, 3072, 3200, 3328, 3456, 3584, 3712, 3840, 3968, 4096], piecewise_cuda_graph_compiler='eager', torchao_config='', enable_nan_detection=False, enable_p2p_check=False, triton_attention_reduce_in_fp32=False, triton_attention_num_kv_splits=8, triton_attention_split_tile_size=None, num_continuous_decode_steps=1, delete_ckpt_after_loading=False, enable_memory_saver=False, enable_weights_cpu_backup=False, allow_auto_truncate=False, enable_custom_logit_processor=False, flashinfer_mla_disable_ragged=False, disable_shared_experts_fusion=False, disable_chunked_prefix_cache=False, disable_fast_image_processor=False, keep_mm_feature_on_device=False, enable_return_hidden_states=False, scheduler_recv_interval=1, numa_node=None, enable_deterministic_inference=False, rl_on_policy_target=None, enable_dynamic_batch_tokenizer=False, dynamic_batch_tokenizer_batch_size=32, dynamic_batch_tokenizer_batch_timeout=0.002, debug_tensor_dump_output_folder=None, debug_tensor_dump_input_file=None, debug_tensor_dump_inject=False, disaggregation_mode='null', disaggregation_transfer_backend='mooncake', disaggregation_bootstrap_port=8998, disaggregation_decode_tp=None, disaggregation_decode_dp=None, disaggregation_prefill_pp=1, disaggregation_ib_device=None, disaggregation_decode_enable_offload_kvcache=False, num_reserved_decode_tokens=512, disaggregation_decode_polling_interval=1, custom_weight_loader=[], weight_loader_disable_mmap=False, remote_instance_weight_loader_seed_instance_ip=None, remote_instance_weight_loader_seed_instance_service_port=None, remote_instance_weight_loader_send_weights_group_ports=None, enable_pdmux=False, pdmux_config_path=None, sm_group_num=8)\r\n",,terminal_output +226,1424166,"TERMINAL",0,0,"[2025-11-03 13:33:04] Using default HuggingFace chat template with detected content format: string\r\n",,terminal_output +227,1440312,"TERMINAL",0,0,"[2025-11-03 13:33:20] INFO trace.py:48: opentelemetry package is not installed, tracing disabled\r\n",,terminal_output +228,1445360,"TERMINAL",0,0,"[2025-11-03 13:33:25] INFO trace.py:48: opentelemetry package is not installed, tracing disabled\r\n",,terminal_output +229,1447450,"TERMINAL",0,0,"[2025-11-03 13:33:27] Init torch distributed begin.\r\n",,terminal_output +230,1447715,"TERMINAL",0,0,"[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[2025-11-03 13:33:27] Init torch distributed ends. mem usage=0.00 GB\r\n",,terminal_output +231,1447771,"TERMINAL",0,0,"[2025-11-03 13:33:27] MOE_RUNNER_BACKEND is not initialized, the backend will be automatically selected\r\n",,terminal_output +232,1448948,"TERMINAL",0,0,"[2025-11-03 13:33:28] Load weight begin. avail mem=78.68 GB\r\n",,terminal_output +233,1449042,"TERMINAL",0,0,"[2025-11-03 13:33:28] TensorFlow version 2.20.0 available.\r\n",,terminal_output +234,1450092,"TERMINAL",0,0,"[2025-11-03 13:33:29] Using model weights format ['*.safetensors']\r\n",,terminal_output +235,1450641,"TERMINAL",0,0,"[2025-11-03 13:33:30] No model.safetensors.index.json found in remote.\r\n\rLoading safetensors checkpoint shards: 0% Completed | 0/1 [00:00 run.LocalExecutor:\n # Env vars for jobs are configured here\n env_vars = {\n ""TORCH_NCCL_AVOID_RECORD_STREAMS"": ""1"",\n ""NCCL_NVLS_ENABLE"": ""0"",\n ""NVTE_DP_AMAX_REDUCE_INTERVAL"": ""0"",\n ""NVTE_ASYNC_AMAX_REDUCTION"": ""1"",\n }\n\n executor = run.LocalExecutor(ntasks_per_node=devices, launcher=""torchrun"", env_vars=env_vars)\n\n return executor\n\ndef run_finetuning():\n recipe.resume.restore_config.path = ""/fast/project/HFMI_SynergyUnit/tab_model/data/checkpoints/nemo_converted_weights_qwen3-coder-30b-a3b-instruct/""\n recipe.data = run.Config(\n llm.ChatDataModule,\n dataset_root=DATASET_DIR,\n seq_length=16384,\n global_batch_size=32,\n micro_batch_size=1,\n num_workers=0,\n )\n\n recipe.trainer.strategy.tensor_model_parallel_size = 2\n recipe.trainer.strategy.expert_model_parallel_size = 2\n\n # Enable activation checkpointing to fit 30B model on single H100\n recipe.model.config.recompute_granularity = ""full""\n recipe.model.config.recompute_method = ""uniform""\n recipe.model.config.recompute_num_layers = 1\n\n executor = local_executor_torchrun(nodes=recipe.trainer.num_nodes, devices=recipe.trainer.devices)\n\n run.run(recipe, executor=executor, name=NAME)\n\n# This condition is necessary for the script to be compatible with Python's multiprocessing module.\nif __name__ == ""__main__"":\n run_finetuning()",python,tab +2,360,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:56:31 AM [info] Activating crowd-code\n10:56:31 AM [info] Recording started\n10:56:31 AM [info] Initializing git provider using file system watchers...\n",Log,tab +3,413,"TERMINAL",0,0,"bash",,terminal_focus +4,413,"slurm/dev/franz/berlin/crowd-pilot/nemo/test_lora_finetune_16k.py",0,0,"",python,tab +5,414,"TERMINAL",0,0,"bash",,terminal_focus +6,565,"TERMINAL",0,0,"source/home/franz.srambical/crowd-pilot/.nemo_venv/bin/activate",,terminal_command +7,566,"TERMINAL",0,0,"source /home/franz.srambical/crowd-pilot/.nemo_venv/bin/activate",,terminal_command +8,568,"TERMINAL",0,0,"]633;C]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +9,568,"TERMINAL",0,0,"]633;C]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +10,3228,"TERMINAL",0,0,"bash",,terminal_focus +11,3843,"TERMINAL",0,0,"bash",,terminal_focus +12,4449,"TERMINAL",0,0,"bash",,terminal_focus +13,17534,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",0,0,"#!/bin/bash\n\nset -uex\n\nOUTPUT_DIR=""/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_32k_tokens/""\nCSV_ROOT=""/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/""\n\n# rough estimate of characters per token\nTARGET_CHARS_PER_CONVERSATION=$((32768*3))\n\nuv run crowd_pilot/serialize_dataset_nemo_json.py --csv_root=$CSV_ROOT --output_dir=$OUTPUT_DIR --target_chars_per_conversation=$TARGET_CHARS_PER_CONVERSATION",shellscript,tab +14,18406,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",268,0,"",shellscript,selection_command +15,18501,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",225,0,"",shellscript,selection_command +16,18641,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",184,0,"",shellscript,selection_command +17,18790,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",183,0,"",shellscript,selection_command +18,18917,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",113,0,"",shellscript,selection_command +19,19070,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",23,0,"",shellscript,selection_command +20,19740,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",33,0,"",shellscript,selection_command +21,19904,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",36,0,"",shellscript,selection_command +22,20418,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,0,"",shellscript,selection_command +23,20503,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,1,"/",shellscript,selection_command +24,20557,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,2,"/f",shellscript,selection_command +25,20822,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,6,"/fast/",shellscript,selection_command +26,20851,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,7,"/fast/p",shellscript,selection_command +27,20883,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,14,"/fast/project/",shellscript,selection_command +28,20916,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,15,"/fast/project/H",shellscript,selection_command +29,20950,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,31,"/fast/project/HFMI_SynergyUnit/",shellscript,selection_command +30,20983,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,32,"/fast/project/HFMI_SynergyUnit/t",shellscript,selection_command +31,21016,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,41,"/fast/project/HFMI_SynergyUnit/tab_model/",shellscript,selection_command +32,21050,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,42,"/fast/project/HFMI_SynergyUnit/tab_model/d",shellscript,selection_command +33,21083,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,46,"/fast/project/HFMI_SynergyUnit/tab_model/data/",shellscript,selection_command +34,21136,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,47,"/fast/project/HFMI_SynergyUnit/tab_model/data/n",shellscript,selection_command +35,21154,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,76,"/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_32k_tokens/",shellscript,selection_command +36,21182,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,79,"/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_32k_tokens/""\nC",shellscript,selection_command +37,21215,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,87,"/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_32k_tokens/""\nCSV_ROOT=",shellscript,selection_command +38,21335,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,90,"/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_32k_tokens/""\nCSV_ROOT=""/f",shellscript,selection_command +39,21585,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,87,"/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_32k_tokens/""\nCSV_ROOT=",shellscript,selection_command +40,21742,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,79,"/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_32k_tokens/""\nC",shellscript,selection_command +41,22034,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",35,76,"/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_32k_tokens/",shellscript,selection_command +42,22848,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",110,0,"",shellscript,selection_command +43,24290,"TERMINAL",0,0,"ls /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_32k_tokens/",,terminal_command +44,24297,"TERMINAL",0,0,"]633;Cmetadata.json training.jsonl training.jsonl.idx.info training.jsonl.idx.npy validation.jsonl validation.jsonl.idx.info validation.jsonl.idx.npy\r\n]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +45,34039,"TERMINAL",0,0,"ls /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_128k_tokens/",,terminal_command +46,34046,"TERMINAL",0,0,"]633;Cmetadata.json training.jsonl training.jsonl.idx.info training.jsonl.idx.npy validation.jsonl validation.jsonl.idx.info validation.jsonl.idx.npy\r\n]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +47,41238,"TERMINAL",0,0,"cat /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_128k_tokens/metadata.json",,terminal_command +48,41241,"TERMINAL",0,0,"]633;C{\r\n ""config"": {\r\n ""csv_root"": ""/fast/project/HFMI_SynergyUnit/jafar_ws/data/crowd-pilot/crowd-code-0.1/csv/"",\r\n ""output_dir"": ""/fast/project/HFMI_SynergyUnit/jafar_ws/data/crowd-pilot/crowd-code-0.1/nemo_jsonl/"",\r\n ""min_session_turns"": 10,\r\n ""max_docs"": null,\r\n ""val_ratio"": 0.1,\r\n ""target_chars_per_conversation"": 393216,\r\n ""target_chars_per_turn"": 24576\r\n },\r\n ""counts"": {\r\n ""total_sessions"": 349,\r\n ""sessions_kept"": 315,\r\n ""skipped_short_sessions"": 34,\r\n ""train_conversations"": 446,\r\n ""val_conversations"": 41,\r\n ""docs_written"": 487\r\n },\r\n ""chunk_turn_stats"": {\r\n ""all"": {\r\n ""count"": 521,\r\n ""total"": 117075,\r\n ""median"": 193.0,\r\n ""avg"": 224.71209213051824,\r\n ""min"": 1,\r\n ""max"": 993\r\n },\r\n ""kept"": {\r\n ""count"": 487,\r\n ""total"": 116875,\r\n ""median"": 218.0,\r\n ""avg"": 239.98973305954826,\r\n ""min"": 1,\r\n ""max"": 993\r\n }\r\n },\r\n ""chunk_char_stats"": {\r\n ""all"": {\r\n ""count"": 521,\r\n ""total"": 112197712,\r\n ""median"": 214523.0,\r\n ""avg"": 215350.69481765834,\r\n ""min"": 104,\r\n ""max"": 393204\r\n },\r\n ""kept"": {\r\n ""count"": 487,\r\n ""total"": 111779470,\r\n ""median"": 245474.0,\r\n ""avg"": 229526.63244353182,\r\n ""min"": 382,\r\n ""max"": 393204\r\n }\r\n },\r\n ""files"": {\r\n ""train_path"": ""/fast/project/HFMI_SynergyUnit/jafar_ws/data/crowd-pilot/crowd-code-0.1/nemo_jsonl/train.jsonl"",\r\n ""val_path"": ""/fast/project/HFMI_SynergyUnit/jafar_ws/data/crowd-pilot/crowd-code-0.1/nemo_jsonl/val.jsonl""\r\n }\r\n}]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +49,144772,"TERMINAL",0,0,"cat /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_64k_tokens/metadata.json",,terminal_command +50,178003,"TERMINAL",0,0,"cat /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_32k_tokens/metadata.json",,terminal_command +51,225827,"TERMINAL",0,0,"cat /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_16k_tokens/metadata.json",,terminal_command +52,225836,"TERMINAL",0,0,"]633;C{\r\n ""config"": {\r\n ""csv_root"": ""/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/"",\r\n ""output_dir"": ""/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_16k_tokens/"",\r\n ""min_session_turns"": 10,\r\n ""max_docs"": null,\r\n ""val_ratio"": 0.1,\r\n ""target_chars_per_conversation"": 49152,\r\n ""target_chars_per_turn"": 24576\r\n },\r\n ""counts"": {\r\n ""total_sessions"": 349,\r\n ""sessions_kept"": 315,\r\n ""skipped_short_sessions"": 34,\r\n ""train_conversations"": 3859,\r\n ""val_conversations"": 350,\r\n ""docs_written"": 4209\r\n },\r\n ""chunk_turn_stats"": {\r\n ""all"": {\r\n ""count"": 4243,\r\n ""total"": 128663,\r\n ""median"": 25.0,\r\n ""avg"": 30.32359179825595,\r\n ""min"": 1,\r\n ""max"": 253\r\n },\r\n ""kept"": {\r\n ""count"": 4209,\r\n ""total"": 128463,\r\n ""median"": 25.0,\r\n ""avg"": 30.521026372059872,\r\n ""min"": 1,\r\n ""max"": 253\r\n }\r\n },\r\n ""chunk_char_stats"": {\r\n ""all"": {\r\n ""count"": 4243,\r\n ""total"": 176785622,\r\n ""median"": 45949.0,\r\n ""avg"": 41665.24204572236,\r\n ""min"": 104,\r\n ""max"": 49152\r\n },\r\n ""kept"": {\r\n ""count"": 4209,\r\n ""total"": 176367380,\r\n ""median"": 46056.0,\r\n ""avg"": 41902.442385364695,\r\n ""min"": 209,\r\n ""max"": 49152\r\n }\r\n },\r\n ""files"": {\r\n ""train_path"": ""/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_16k_tokens/train.jsonl"",\r\n ""val_path"": ""/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_16k_tokens/val.jsonl""\r\n }\r\n}]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +53,441997,"slurm/dev/franz/berlin/crowd-pilot/nemo/test_lora_finetune_16k.py",0,0,"",python,tab +54,443769,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_32k.sh",0,0,"",shellscript,tab +55,1674191,"slurm/dev/franz/berlin/crowd-pilot/nemo/test_lora_finetune_16k.py",0,0,"",python,tab +56,1674195,"slurm/dev/franz/berlin/crowd-pilot/nemo/test_lora_finetune_16k.py",1435,0,"",python,selection_command +57,3080011,"TERMINAL",0,0,"bash",,terminal_focus +58,3081214,"TERMINAL",0,0,"bash",,terminal_focus +59,3082831,"TERMINAL",0,0,"squeue",,terminal_command +60,3082845,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 34457 alfred.ngu interacti 1 8 R 2025-11-20T11:33:35 2025-11-20T11:33:35 14:19 2:00:00 hai003\r\n 34447 xiao.liu interacti 1 128 R 2025-11-20T10:45:36 2025-11-20T10:45:36 1:02:18 23:59:00 hai005\r\n 34438 franz.sram interacti 1 20 R 2025-11-19T19:14:17 2025-11-19T19:14:17 16:33:37 1-00:00:00 hai005\r\n 34409 xiao.liu interacti 1 128 R 2025-11-19T13:12:14 2025-11-19T13:12:17 22:35:37 23:59:00 hai006\r\n 34458 florian.mu standard 1 24 R 2025-11-20T11:47:34 2025-11-20T11:47:34 0:20 1:00:00 hai001\r\n 34449 xiao.liu standard 1 128 R 2025-11-20T10:55:02 2025-11-20T10:55:03 52:51 23:59:00 hai007\r\n 34448 xiao.liu standard 1 128 R 2025-11-20T10:55:00 2025-11-20T10:55:00 52:54 23:59:00 hai004\r\n 34442 nishant.ku standard 1 32 R 2025-11-19T23:33:30 2025-11-20T01:12:09 10:35:45 12:00:00 hai002\r\n]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +61,3088167,"TERMINAL",0,0,"scancel 34438",,terminal_command +62,3088176,"TERMINAL",0,0,"]633;C]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +63,3090860,"TERMINAL",0,0,"squeue",,terminal_command +64,3090863,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 34438 franz.sram interacti 1 20 CG 2025-11-19T19:14:17 2025-11-19T19:14:17 16:33:43 1-00:00:00 hai005\r\n 34457 alfred.ngu interacti 1 8 R 2025-11-20T11:33:35 2025-11-20T11:33:35 14:27 2:00:00 hai003\r\n 34447 xiao.liu interacti 1 128 R 2025-11-20T10:45:36 2025-11-20T10:45:36 1:02:26 23:59:00 hai005\r\n 34409 xiao.liu interacti 1 128 R 2025-11-19T13:12:14 2025-11-19T13:12:17 22:35:45 23:59:00 hai006\r\n 34458 florian.mu standard 1 24 R 2025-11-20T11:47:34 2025-11-20T11:47:34 0:28 1:00:00 hai001\r\n 34449 xiao.liu standard 1 128 R 2025-11-20T10:55:02 2025-11-20T10:55:03 52:59 23:59:00 hai007\r\n 34448 xiao.liu standard 1 128 R 2025-11-20T10:55:00 2025-11-20T10:55:00 53:02 23:59:00 hai004\r\n 34442 nishant.ku standard 1 32 R 2025-11-19T23:33:30 2025-11-20T01:12:09 10:35:53 12:00:00 hai002\r\n]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +65,3102019,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_4k.sh",0,0,"#!/bin/bash\n\nset -uex\n\nOUTPUT_DIR=""/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_4k_tokens/""\nCSV_ROOT=""/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/""\n\n# rough estimate of characters per token\nTARGET_CHARS_PER_CONVERSATION=$((4096*3))\n\nuv run crowd_pilot/serialize_dataset_nemo_json.py --csv_root=$CSV_ROOT --output_dir=$OUTPUT_DIR --target_chars_per_conversation=$TARGET_CHARS_PER_CONVERSATION",shellscript,tab +66,3117987,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_4k copy.sh",0,0,"#!/bin/bash\n\nset -uex\n\nOUTPUT_DIR=""/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_4k_tokens/""\nCSV_ROOT=""/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/""\n\n# rough estimate of characters per token\nTARGET_CHARS_PER_CONVERSATION=$((4096*3))\n\nuv run crowd_pilot/serialize_dataset_nemo_json.py --csv_root=$CSV_ROOT --output_dir=$OUTPUT_DIR --target_chars_per_conversation=$TARGET_CHARS_PER_CONVERSATION",shellscript,tab +67,3125062,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",0,0,"#!/bin/bash\n\nset -uex\n\nOUTPUT_DIR=""/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_4k_tokens/""\nCSV_ROOT=""/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/""\n\n# rough estimate of characters per token\nTARGET_CHARS_PER_CONVERSATION=$((4096*3))\n\nuv run crowd_pilot/serialize_dataset_nemo_json.py --csv_root=$CSV_ROOT --output_dir=$OUTPUT_DIR --target_chars_per_conversation=$TARGET_CHARS_PER_CONVERSATION",shellscript,tab +68,3126292,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",425,0,"",shellscript,selection_mouse +69,3126299,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",424,0,"",shellscript,selection_command +70,3128655,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",266,0,"",shellscript,selection_command +71,3128744,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",264,0,"",shellscript,selection_command +72,3129349,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",224,0,"",shellscript,selection_command +73,3132994,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",0,0,"#!/usr/bin/env python3\n""""""\nCSV sessions -> JSONL (newline-delimited JSON) for NeMo 2.0 SFT function calling.\n\nEach session is converted to a multi-turn conversation where:\n- User role: terminal output (... content)\n- Assistant role: terminal commands (bash commands)\n""""""\n\nfrom __future__ import annotations\n\nimport argparse\nimport json\nimport os\nfrom pathlib import Path\nfrom typing import List, Tuple, cast\nimport random\n\nimport pandas as pd\n\nfrom serialization_utils import (\n SerializeConfig,\n session_to_nemo_conversation_chunks,\n _discover_local_sessions,\n)\n\n\ndef to_nemo_jsonl(cfg: SerializeConfig) -> None:\n """"""Convert CSV sessions to NeMo JSONL format.""""""\n os.makedirs(cfg.output_dir, exist_ok=True)\n\n required_cols = [""Sequence"", ""Time"", ""File"", ""RangeOffset"", ""RangeLength"", ""Text"", ""Language"", ""Type""]\n\n session_dataframes: List[Tuple[pd.DataFrame, str]] = []\n root = Path(cast(str, cfg.csv_root)).expanduser().resolve()\n csv_files = _discover_local_sessions(root)\n assert csv_files, f""No CSV files found under {root}""\n \n for csv_file in csv_files:\n df = pd.read_csv(csv_file)\n missing_local = [c for c in required_cols if c not in df.columns]\n assert not missing_local, f""Missing required CSV columns in {csv_file}: {missing_local}""\n session_dataframes.append((df, str(csv_file)))\n\n random.seed(42)\n random.shuffle(session_dataframes)\n \n total_sessions = len(session_dataframes)\n val_count = int(total_sessions * cfg.val_ratio)\n train_count = total_sessions - val_count\n\n train_conversations = []\n val_conversations = []\n \n chunk_turn_counts_all: List[int] = []\n chunk_turn_counts_kept: List[int] = []\n chunk_char_counts_all: List[int] = []\n chunk_char_counts_kept: List[int] = []\n skipped_short_sessions = 0\n sessions_kept = 0\n docs_written = 0\n\n for i, (session_df, session_path) in enumerate(session_dataframes):\n if cfg.max_docs and docs_written >= cfg.max_docs:\n break\n \n session_df = pd.DataFrame(session_df.copy())\n\n assert cfg.target_chars_per_conversation > 0, ""target_chars_per_conversation must be positive""\n assert cfg.target_chars_per_turn > 0, ""target_chars_per_turn must be positive""\n conversation_chunks = session_to_nemo_conversation_chunks(\n session_df,\n cfg.target_chars_per_conversation,\n max_chars_per_turn=cfg.target_chars_per_turn,\n )\n\n assert len(conversation_chunks) >= 1, ""At least one conversation chunk should be produced""\n\n # Per-chunk statistics (for reporting)\n per_chunk_turns = [len(chunk) for chunk in conversation_chunks]\n per_chunk_chars = [\n sum(len(turn.get(""value"", """")) for turn in chunk)\n for chunk in conversation_chunks\n ]\n chunk_turn_counts_all.extend(per_chunk_turns)\n chunk_char_counts_all.extend(per_chunk_chars)\n\n # Aggregate per-session turns for filtering\n total_turns = sum(per_chunk_turns)\n\n if total_turns < cfg.min_session_turns:\n print(f""[warning] Session {session_path} is too short ({total_turns} turns)"")\n skipped_short_sessions += 1\n continue\n \n chunk_turn_counts_kept.extend(per_chunk_turns)\n chunk_char_counts_kept.extend(per_chunk_chars)\n sessions_kept += 1\n\n for chunk in conversation_chunks:\n if cfg.max_docs and docs_written >= cfg.max_docs:\n break\n\n record = {\n ""mask"": ""User"",\n ""system"": ""You are a helpful assistant that can interact multiple times with a computer shell to solve programming tasks.\nYour response must contain exactly ONE bash code block with ONE command (or commands connected with && or ||).\n\nFormat your response as shown in .\n\n\n```bash\nyour_command_here\n```\n\n\nFailure to follow these rules will cause your response to be rejected."",\n ""conversations"": chunk,\n }\n\n if i < train_count:\n train_conversations.append(record)\n else:\n val_conversations.append(record)\n\n docs_written += 1\n\n if cfg.max_docs and docs_written >= cfg.max_docs:\n break\n\n train_path = Path(cfg.output_dir) / ""train.jsonl""\n with open(train_path, 'w', encoding='utf-8') as f:\n for record in train_conversations:\n f.write(json.dumps(record, ensure_ascii=False) + '\n')\n \n val_path = Path(cfg.output_dir) / ""val.jsonl""\n with open(val_path, 'w', encoding='utf-8') as f:\n for record in val_conversations:\n f.write(json.dumps(record, ensure_ascii=False) + '\n')\n\n def _compute_stats(values: List[int]) -> dict | None:\n if not values:\n return None\n count = len(values)\n total = sum(values)\n avg = total / count if count > 0 else 0.0\n sorted_vals = sorted(values)\n mid = count // 2\n if count % 2 == 1:\n median = float(sorted_vals[mid])\n else:\n median = 0.5 * (sorted_vals[mid - 1] + sorted_vals[mid])\n min_len = min(values)\n max_len = max(values)\n return {\n ""count"": count,\n ""total"": total,\n ""median"": median,\n ""avg"": avg,\n ""min"": min_len,\n ""max"": max_len,\n }\n\n def _print_turn_stats(label: str, values: List[int]) -> None:\n if not values:\n print(f""[debug] No {label} chunks for turn count stats."")\n return\n stats = _compute_stats(values)\n assert stats is not None\n print(\n f""[debug] {label.capitalize()} chunks turn stats: ""\n f""count={stats['count']}, median_turns={stats['median']:.1f}, avg_turns={stats['avg']:.1f}, ""\n f""min_turns={stats['min']}, max_turns={stats['max']}""\n )\n\n def _print_char_stats(label: str, values: List[int]) -> None:\n if not values:\n print(f""[debug] No {label} chunks for character stats."")\n return\n stats = _compute_stats(values)\n assert stats is not None\n print(\n f""[debug] {label.capitalize()} chunks char stats: ""\n f""count={stats['count']}, median_chars={stats['median']:.1f}, avg_chars={stats['avg']:.1f}, ""\n f""min_chars={stats['min']}, max_chars={stats['max']}""\n )\n\n _print_turn_stats(""all"", chunk_turn_counts_all)\n _print_turn_stats(""kept"", chunk_turn_counts_kept)\n _print_char_stats(""all"", chunk_char_counts_all)\n _print_char_stats(""kept"", chunk_char_counts_kept)\n\n print(f""\n[summary]"")\n print(f"" Total sessions processed: {total_sessions}"")\n print(f"" Sessions kept: {sessions_kept}"")\n print(f"" Skipped (too few turns): {skipped_short_sessions}"")\n print(f"" Train conversations: {len(train_conversations)}"")\n print(f"" Val conversations: {len(val_conversations)}"")\n print(f"" Output: {cfg.output_dir}/{{train,val}}.jsonl"")\n\n metadata = {\n ""config"": {\n ""csv_root"": cfg.csv_root,\n ""output_dir"": cfg.output_dir,\n ""min_session_turns"": cfg.min_session_turns,\n ""max_docs"": cfg.max_docs,\n ""val_ratio"": cfg.val_ratio,\n ""target_chars_per_conversation"": cfg.target_chars_per_conversation,\n ""target_chars_per_turn"": cfg.target_chars_per_turn,\n },\n ""counts"": {\n ""total_sessions"": total_sessions,\n ""sessions_kept"": sessions_kept,\n ""skipped_short_sessions"": skipped_short_sessions,\n ""train_conversations"": len(train_conversations),\n ""val_conversations"": len(val_conversations),\n ""docs_written"": docs_written,\n },\n ""chunk_turn_stats"": {\n ""all"": _compute_stats(chunk_turn_counts_all),\n ""kept"": _compute_stats(chunk_turn_counts_kept),\n },\n ""chunk_char_stats"": {\n ""all"": _compute_stats(chunk_char_counts_all),\n ""kept"": _compute_stats(chunk_char_counts_kept),\n },\n ""files"": {\n ""train_path"": str(train_path),\n ""val_path"": str(val_path),\n },\n }\n metadata_path = Path(cfg.output_dir) / ""metadata.json""\n with open(metadata_path, ""w"", encoding=""utf-8"") as mf:\n json.dump(metadata, mf, ensure_ascii=False, indent=2)\n print(f"" Metadata: {metadata_path}"")\n\n\ndef parse_args() -> SerializeConfig:\n p = argparse.ArgumentParser(\n description=""Serialize CSV sessions to JSONL for NeMo 2.0 SFT function calling""\n )\n p.add_argument(""--csv_root"", type=str, required=True, \n help=""Root directory containing per-session CSV files"")\n p.add_argument(""--output_dir"", type=str, required=True, \n help=""Output directory for JSONL files"")\n p.add_argument(""--min_session_turns"", type=int, default=10, \n help=""Minimum number of turns to keep a session"")\n p.add_argument(""--max_docs"", type=int, default=None, \n help=""Stop after writing this many unique docs"")\n p.add_argument(""--val_ratio"", type=float, default=0.10, \n help=""Fraction of sessions to route to validation [0,1)"")\n p.add_argument(\n ""--target_chars_per_conversation"",\n type=int,\n default=8192*16*3,\n help=""Target characters per conversation chunk"",\n )\n p.add_argument(""--target_chars_per_turn"", type=int, default=8192*3, help=""Target characters per turn"")\n args = p.parse_args()\n return SerializeConfig(\n output_dir=args.output_dir,\n target_chars_per_conversation=args.target_chars_per_conversation,\n target_chars_per_turn=args.target_chars_per_turn,\n min_session_turns=args.min_session_turns,\n max_docs=args.max_docs,\n csv_root=(args.csv_root if args.csv_root else None),\n val_ratio=args.val_ratio,\n )\n\n\ndef main() -> None:\n cfg = parse_args()\n to_nemo_jsonl(cfg)\n\n\nif __name__ == ""__main__"":\n main()\n\n",python,tab +74,3135158,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2122,0,"",python,selection_command +75,3138015,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2158,0,"",python,selection_command +76,3139115,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2157,0,"",python,selection_command +77,3139298,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2155,0,"",python,selection_command +78,3139633,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2154,0,"",python,selection_command +79,3140034,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2152,0,"",python,selection_command +80,3140497,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2122,0,"",python,selection_command +81,3142654,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2225,0,"",python,selection_command +82,3145830,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2253,0,"",python,selection_command +83,3146127,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2466,0,"",python,selection_command +84,3146319,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7445,0,"",python,selection_command +85,3147271,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7473,0,"",python,selection_command +86,3147463,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9520,0,"",python,selection_command +87,3148109,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9777,0,"",python,selection_command +88,3148388,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9804,0,"",python,selection_command +89,3148701,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2225,0,"",python,selection_command +90,3149656,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2253,0,"",python,selection_command +91,3149802,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2466,0,"",python,selection_command +92,3149931,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7445,0,"",python,selection_command +93,3150075,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7473,0,"",python,selection_command +94,3150465,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9520,0,"",python,selection_command +95,3175285,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9777,0,"",python,selection_command +96,3175433,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9804,0,"",python,selection_command +97,3175793,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2225,0,"",python,selection_command +98,3177251,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2122,0,"",python,selection_command +99,3177586,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2158,0,"",python,selection_command +100,3178063,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2400,0,"",python,selection_command +101,3178415,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7365,0,"",python,selection_command +102,3179259,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7401,0,"",python,selection_command +103,3179822,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7365,0,"",python,selection_command +104,3179892,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2400,0,"",python,selection_command +105,3180954,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7365,0,"",python,selection_command +106,3181116,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7401,0,"",python,selection_command +107,3185208,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9358,0,"",python,selection_command +108,3189927,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9703,0,"",python,selection_command +109,3190052,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9738,0,"",python,selection_command +110,3190960,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2122,0,"",python,selection_command +111,3191754,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2158,0,"",python,selection_command +112,3191912,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2400,0,"",python,selection_command +113,3192810,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2376,0,"",python,selection_command +114,3192913,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2309,0,"",python,selection_command +115,3193090,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2321,0,"",python,selection_command +116,3193228,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2323,0,"",python,selection_command +117,3193797,"crowd-pilot/crowd_pilot/serialization_utils.py",0,0,"#!/usr/bin/env python3\n""""""\nCommon utilities for dataset serialization scripts.\n""""""\n\nfrom __future__ import annotations\n\nfrom dataclasses import dataclass, field\nfrom pathlib import Path\nfrom typing import List, Optional, Tuple, Dict\n\nimport difflib\nimport re\nimport pandas as pd\nfrom datasets import Dataset, load_dataset\n\n\n_ANSI_CSI_RE = re.compile(r""\x1b\[[0-9;?]*[ -/]*[@-~]"")\n_ANSI_OSC_TERMINATED_RE = re.compile(r""\x1b\][\s\S]*?(?:\x07|\x1b\\)"")\n_ANSI_OSC_LINE_FALLBACK_RE = re.compile(r""\x1b\][^\n]*$"")\n_BRACKETED_PASTE_ENABLE = ""\x1b[?2004h""\n_BRACKETED_PASTE_DISABLE = ""\x1b[?2004l""\n_OSC_633 = ""\x1b]633;""\n_OSC_0 = ""\x1b]0;""\n\n\n@dataclass\nclass SerializeConfig:\n output_dir: str\n target_chars_per_conversation: int\n target_chars_per_turn: int\n min_session_turns: int\n max_docs: Optional[int]\n csv_root: Optional[str]\n val_ratio: float\n\n\n@dataclass\nclass ChunkState:\n """"""\n Mutable state used while constructing conversation chunks.\n """"""\n chunks: List[List[Dict[str, str]]]\n max_chars_per_conversation: int\n max_chars_per_turn: int\n current_chunk: List[Dict[str, str]] = field(default_factory=list)\n current_chars: int = 0\n files_opened_in_chunk: set[str] = field(default_factory=set)\n\n def start_new_chunk(self) -> None:\n if self.current_chunk:\n self.chunks.append(self.current_chunk)\n self.current_chunk = []\n self.current_chars = 0\n self.files_opened_in_chunk.clear()\n\n def append_turn(self, turn: Dict[str, str]) -> None:\n value = turn.get(""value"", """")\n # Enforce a per-turn character budget to prevent pathological single\n # turns (e.g., massive file dumps) from creating multi-megabyte chunks.\n if len(value) > self.max_chars_per_turn:\n value = value[: self.max_chars_per_turn]\n turn[""value""] = value\n turn_len = len(value)\n if (\n self.current_chunk # only start a new chunk on non-empty current chunk\n and self.current_chars + turn_len > self.max_chars_per_conversation\n ):\n self.start_new_chunk()\n self.current_chunk.append(turn)\n self.current_chars += turn_len\n\n def maybe_capture_file_contents(\n self,\n file_path: str,\n content: str,\n ) -> None:\n """"""\n Capture the contents of the given file in the current chunk if it hasn't been opened yet.\n """"""\n if file_path in self.files_opened_in_chunk:\n return\n cmd = f""cat -n {file_path}""\n self.append_turn({\n ""from"": ""Assistant"",\n ""value"": _fenced_block(""bash"", _clean_text(cmd)),\n })\n output = _line_numbered_output(content)\n self.append_turn({\n ""from"": ""User"",\n ""value"": f""\n{output}\n"",\n })\n self.files_opened_in_chunk.add(file_path)\n\n\ndef _clean_text(text: str) -> str:\n # Normalize line endings and strip trailing spaces; preserve tabs/newlines.\n return text.replace(""\r\n"", ""\n"").replace(""\r"", ""\n"").rstrip()\n\n\ndef _fenced_block(language: Optional[str], content: str) -> str:\n lang = (language or """").lower()\n return f""```{lang}\n{content}\n```\n""\n\n\ndef _apply_change(content: str, offset: int, length: int, new_text: str) -> str:\n # Mirrors crowd_code_player.replay_file.apply_change\n base = str(content)\n text = str(new_text) if pd.notna(new_text) else """"\n text = text.replace(""\\n"", ""\n"").replace(""\\r"", ""\r"")\n if offset > len(base):\n base = base + ("" "" * (offset - len(base)))\n return base[:offset] + text + base[offset + length:]\n\n\ndef _apply_backspaces(text: str) -> str:\n out: List[str] = []\n for ch in text:\n if ch == ""\b"": # \x08\n if out:\n out.pop()\n else:\n out.append(ch)\n return """".join(out)\n\n\ndef _normalize_terminal_output(raw: str) -> str:\n """"""\n Normalize PTY/terminal output for training:\n - Apply backspaces (\x08)\n - Strip OSC (window title/shell integration) first, keeping BEL/ST terminators intact\n - Resolve carriage returns (\r) by keeping the last rewrite per line\n - Strip CSI (coloring etc.)\n - Finally drop any remaining BEL (\x07)\n """"""\n if not raw:\n return raw\n s = _apply_backspaces(raw)\n # Remove OSC sequences that are properly terminated (BEL or ST)\n s = _ANSI_OSC_TERMINATED_RE.sub("""", s)\n # Fallback: drop any unterminated OSC up to end-of-line only\n s = ""\n"".join(_ANSI_OSC_LINE_FALLBACK_RE.sub("""", line) for line in s.split(""\n""))\n # Resolve carriage returns per line:\n # - If there are multiple rewrites, keep the last non-empty chunk\n # - If it's CRLF (ending with '\r' before '\n'), keep the content before '\r'\n resolved_lines: List[str] = []\n for seg in s.split(""\n""):\n parts = seg.split(""\r"")\n chosen = """"\n # pick last non-empty part if available; else last part\n for p in reversed(parts):\n if p != """":\n chosen = p\n break\n if chosen == """" and parts:\n chosen = parts[-1]\n resolved_lines.append(chosen)\n s = ""\n"".join(resolved_lines)\n # Strip ANSI escape sequences\n s = _ANSI_CSI_RE.sub("""", s)\n # Remove any remaining BEL beeps\n s = s.replace(""\x07"", """")\n return s\n\n\ndef _line_numbered_output(content: str, start_line: Optional[int] = None, end_line: Optional[int] = None) -> str:\n # FIXME (f.srambical): check whether this corresponds **exactly** to the output of cat -n {file_path} | sed -n '{vstart},{vend}p'\n lines = content.splitlines()\n total = len(lines)\n if total == 0:\n return """"\n s = 1 if start_line is None else max(1, min(start_line, total))\n e = total if end_line is None else max(1, min(end_line, total))\n assert e >= s, ""End line number cannot be less than start line number! Likely a bug in the line numbering computation.""\n buf: List[str] = []\n for idx in range(s, e + 1):\n buf.append(f""{idx:6}\t{lines[idx - 1]}"")\n return ""\n"".join(buf)\n\n\ndef _compute_viewport(total_lines: int, center_line: int, radius: int) -> Tuple[int, int]:\n if total_lines <= 0:\n return (1, 0)\n start = max(1, center_line - radius)\n end = min(total_lines, center_line + radius)\n assert end >= start, ""Viewport cannot have negative width! Likely a bug in the viewport computation.""\n return (start, end)\n\n\ndef _escape_single_quotes_for_sed(text: str) -> str:\n # Close quote, add an escaped single quote, reopen quote: '""'""'\n return text.replace(""'"", ""'\""'\""'"")\n\n\ndef _compute_changed_block_lines(\n before: str, after: str\n) -> Tuple[int, int, int, int, List[str]]:\n """"""\n Return 1-based start and end line numbers in 'before' that should be\n replaced, 1-based start and end line numbers in 'after' that contain\n the replacement, and the replacement lines from 'after'.\n\n For pure deletions, the replacement list may be empty.\n """"""\n before_lines = before.splitlines()\n after_lines = after.splitlines()\n sm = difflib.SequenceMatcher(a=before_lines, b=after_lines, autojunk=False)\n opcodes = [op for op in sm.get_opcodes() if op[0] != ""equal""]\n assert opcodes, ""Opcode list cannot be empty! Likely a bug in the diff computation.""\n\n first = opcodes[0]\n last = opcodes[-1]\n # i1/i2 refer to 'before' indices, j1/j2 to 'after'\n start_before = max(1, first[1] + 1)\n end_before = last[2] # no increment since we go from 'exclusive' to 'inclusive' indexing\n start_after = max(1, first[3] + 1)\n end_after = last[4]\n replacement_lines = after_lines[first[3] : last[4]]\n return (start_before, end_before, start_after, end_after, replacement_lines)\n\n\ndef session_to_nemo_conversation_chunks(\n df: pd.DataFrame,\n max_chars_per_conversation: int,\n max_chars_per_turn: int,\n viewport_radius: int = 10,\n normalize_terminal_output: bool = True,\n coalesce_radius: int = 5,\n) -> List[List[Dict[str, str]]]:\n """"""\n Convert a session DataFrame to one or more NeMo conversation chunks.\n\n - Chunks are created by approximately limiting the total characters\n across all `value` fields to `max_chars_per_conversation`.\n - When a new chunk starts (after the first), the first time a file is\n referenced in that chunk we re-log the full file contents with\n `cat -n ` and numbered output so that each chunk is self-contained.\n """"""\n file_states: Dict[str, str] = {}\n per_file_viewport: Dict[str, Optional[Tuple[int, int]]] = {}\n\n chunks: List[List[Dict[str, str]]] = []\n chunk_state = ChunkState(\n chunks=chunks,\n max_chars_per_conversation=max_chars_per_conversation,\n max_chars_per_turn=max_chars_per_turn,\n )\n\n terminal_output_buffer: List[str] = []\n pending_edits_before: Dict[str, Optional[str]] = {}\n pending_edit_regions: Dict[str, Optional[Tuple[int, int]]] = {}\n\n def _flush_terminal_output_buffer() -> None:\n if not terminal_output_buffer:\n return\n aggregated = """".join(terminal_output_buffer)\n out = aggregated\n if normalize_terminal_output:\n out = _normalize_terminal_output(out)\n cleaned = _clean_text(out)\n if cleaned.strip():\n chunk_state.append_turn({\n ""from"": ""User"",\n ""value"": f""\n{cleaned}\n"",\n })\n terminal_output_buffer.clear()\n\n def _flush_pending_edit_for_file(target_file: str) -> None:\n before_snapshot = pending_edits_before.get(target_file)\n if before_snapshot is None:\n return\n after_state = file_states.get(target_file, """")\n if before_snapshot.rstrip(""\n"") == after_state.rstrip(""\n""):\n pending_edits_before[target_file] = None\n pending_edit_regions[target_file] = None\n return\n (\n start_before,\n end_before,\n start_after,\n end_after,\n repl_lines,\n ) = _compute_changed_block_lines(before_snapshot, after_state)\n before_total_lines = len(before_snapshot.splitlines())\n if end_before < start_before:\n escaped_lines = [_escape_single_quotes_for_sed(line) for line in repl_lines]\n sed_payload = ""\n"".join(escaped_lines)\n if start_before <= max(1, before_total_lines):\n sed_cmd = f""sed -i '{start_before}i\\\n{sed_payload}' {target_file}""\n else:\n sed_cmd = f""sed -i '$a\\\n{sed_payload}' {target_file}""\n elif not repl_lines:\n sed_cmd = f""sed -i '{start_before},{end_before}d' {target_file}""\n else:\n escaped_lines = [_escape_single_quotes_for_sed(line) for line in repl_lines]\n sed_payload = ""\n"".join(escaped_lines)\n sed_cmd = f""sed -i '{start_before},{end_before}c\\\n{sed_payload}' {target_file}""\n total_lines = len(after_state.splitlines())\n center = (start_after + end_after) // 2\n vp = _compute_viewport(total_lines, center, viewport_radius)\n per_file_viewport[target_file] = vp\n vstart, vend = vp\n chunk_state.maybe_capture_file_contents(target_file, before_snapshot)\n chained_cmd = f""{sed_cmd} && cat -n {target_file} | sed -n '{vstart},{vend}p'""\n chunk_state.append_turn({\n ""from"": ""Assistant"",\n ""value"": _fenced_block(""bash"", _clean_text(chained_cmd)),\n })\n viewport_output = _line_numbered_output(after_state, vstart, vend)\n chunk_state.append_turn({\n ""from"": ""User"",\n ""value"": f""\n{viewport_output}\n"",\n })\n pending_edits_before[target_file] = None\n pending_edit_regions[target_file] = None\n\n def _flush_all_pending_edits() -> None:\n for fname in list(pending_edits_before.keys()):\n _flush_pending_edit_for_file(fname)\n\n for i in range(len(df)):\n row = df.iloc[i]\n file_path: str = row[""File""]\n event_type = row[""Type""]\n\n match event_type:\n case ""tab"":\n _flush_all_pending_edits()\n _flush_terminal_output_buffer()\n text = row[""Text""]\n if pd.notna(text):\n content = str(text).replace(""\\n"", ""\n"").replace(""\\r"", ""\r"")\n file_states[file_path] = content\n cmd = f""cat -n {file_path}""\n chunk_state.append_turn({\n ""from"": ""Assistant"",\n ""value"": _fenced_block(""bash"", _clean_text(cmd)),\n })\n output = _line_numbered_output(content)\n chunk_state.append_turn({\n ""from"": ""User"",\n ""value"": f""\n{output}\n"",\n })\n chunk_state.files_opened_in_chunk.add(file_path)\n else:\n # File switch without content snapshot: show current viewport only\n content = file_states.get(file_path, """")\n total_lines = len(content.splitlines())\n vp = per_file_viewport.get(file_path)\n if not vp or vp[1] == 0:\n vp = _compute_viewport(total_lines, 1, viewport_radius)\n per_file_viewport[file_path] = vp\n if vp:\n vstart, vend = vp\n chunk_state.maybe_capture_file_contents(file_path, content)\n cmd = f""cat -n {file_path} | sed -n '{vstart},{vend}p'""\n chunk_state.append_turn({\n ""from"": ""Assistant"",\n ""value"": _fenced_block(""bash"", _clean_text(cmd)),\n })\n viewport_output = _line_numbered_output(content, vstart, vend)\n chunk_state.append_turn({\n ""from"": ""User"",\n ""value"": f""\n{viewport_output}\n"",\n })\n\n case ""content"":\n _flush_terminal_output_buffer()\n offset = int(row[""RangeOffset""])\n length = int(row[""RangeLength""])\n new_text = row[""Text""]\n before = file_states.get(file_path, """")\n # Approximate current edit region in line space\n new_text_str = str(new_text) if pd.notna(new_text) else """"\n start_line_current = before[:offset].count(""\n"") + 1\n deleted_chunk = before[offset:offset + length]\n lines_added = new_text_str.count(""\n"")\n lines_deleted = deleted_chunk.count(""\n"")\n region_start = start_line_current\n region_end = start_line_current + max(lines_added, lines_deleted, 0)\n # Flush pending edits if this edit is far from the pending region\n current_region = pending_edit_regions.get(file_path)\n if current_region is not None:\n rstart, rend = current_region\n if region_start < (rstart - coalesce_radius) or region_start > (rend + coalesce_radius):\n _flush_pending_edit_for_file(file_path)\n current_region = None\n after = _apply_change(before, offset, length, new_text)\n if pending_edits_before.get(file_path) is None:\n pending_edits_before[file_path] = before\n # Update/initialize region union\n if current_region is None:\n pending_edit_regions[file_path] = (region_start, max(region_start, region_end))\n else:\n rstart, rend = current_region\n pending_edit_regions[file_path] = (min(rstart, region_start), max(rend, region_end))\n file_states[file_path] = after\n\n case ""selection_command"" | ""selection_mouse"" | ""selection_keyboard"":\n # During an edit burst (pending edits), suppress flush and viewport emissions\n if pending_edits_before.get(file_path) is None:\n _flush_terminal_output_buffer()\n else:\n # Skip emitting viewport while edits are pending to avoid per-keystroke sed/cat spam\n continue\n offset = int(row[""RangeOffset""])\n content = file_states.get(file_path, """")\n total_lines = len(content.splitlines())\n target_line = content[:offset].count(""\n"") + 1\n vp = per_file_viewport.get(file_path)\n should_emit = False\n if not vp or vp[1] == 0:\n vp = _compute_viewport(total_lines, target_line, viewport_radius)\n per_file_viewport[file_path] = vp\n should_emit = True\n else:\n vstart, vend = vp\n if target_line < vstart or target_line > vend:\n vp = _compute_viewport(total_lines, target_line, viewport_radius)\n per_file_viewport[file_path] = vp\n should_emit = True\n if should_emit and vp:\n vstart, vend = vp\n chunk_state.maybe_capture_file_contents(file_path, content)\n cmd = f""cat -n {file_path} | sed -n '{vstart},{vend}p'""\n chunk_state.append_turn({\n ""from"": ""Assistant"",\n ""value"": _fenced_block(""bash"", _clean_text(cmd)),\n })\n viewport_output = _line_numbered_output(content, vstart, vend)\n chunk_state.append_turn({\n ""from"": ""User"",\n ""value"": f""\n{viewport_output}\n"",\n })\n\n case ""terminal_command"":\n _flush_all_pending_edits()\n _flush_terminal_output_buffer()\n command = row[""Text""]\n command_str = str(command).replace(""\\n"", ""\n"").replace(""\\r"", ""\r"")\n chunk_state.append_turn({\n ""from"": ""Assistant"",\n ""value"": _fenced_block(""bash"", _clean_text(command_str)),\n })\n\n case ""terminal_output"":\n output = row[""Text""]\n raw_output = str(output).replace(""\\n"", ""\n"").replace(""\\r"", ""\r"")\n terminal_output_buffer.append(raw_output)\n\n case ""terminal_focus"":\n _flush_all_pending_edits()\n _flush_terminal_output_buffer()\n # No-op for bash transcript; focus changes don't emit commands/output\n pass\n\n case ""git_branch_checkout"":\n _flush_all_pending_edits()\n _flush_terminal_output_buffer()\n branch_info = row[""Text""]\n branch_str = str(branch_info).replace(""\\n"", ""\n"").replace(""\\r"", ""\r"")\n cleaned = _clean_text(branch_str)\n m = re.search(r""to '([^']+)'"", cleaned)\n if not m:\n raise ValueError(f""Could not extract branch name from git checkout message: {cleaned}"")\n branch_name = m.group(1).strip()\n # Safe-quote branch if it contains special characters\n if re.search(r""[^A-Za-z0-9._/\\-]"", branch_name):\n branch_name = ""'"" + branch_name.replace(""'"", ""'\""'\""'"") + ""'""\n cmd = f""git checkout {branch_name}""\n chunk_state.append_turn({\n ""from"": ""Assistant"",\n ""value"": _fenced_block(""bash"", _clean_text(cmd)),\n })\n\n case _:\n raise ValueError(f""Unknown event type: {event_type}"")\n\n _flush_all_pending_edits()\n _flush_terminal_output_buffer()\n if chunk_state.current_chunk:\n chunks.append(chunk_state.current_chunk)\n return chunks\n\n\n\ndef load_hf_csv(hf_path: str, split: str) -> Dataset:\n loaded = load_dataset(hf_path, split=split)\n\n assert isinstance(loaded, Dataset), ""Expected a Dataset from load_dataset""\n return loaded\n\n\ndef _discover_local_sessions(root: Path) -> List[Path]:\n # Recursively find all CSV files\n paths: List[Path] = []\n for p in root.rglob(""*.csv""):\n if p.is_file():\n paths.append(p)\n paths.sort()\n return paths",python,tab +118,3193798,"crowd-pilot/crowd_pilot/serialization_utils.py",7738,0,"",python,selection_command +119,3194571,"crowd-pilot/crowd_pilot/serialization_utils.py",7779,0,"",python,selection_command +120,3194672,"crowd-pilot/crowd_pilot/serialization_utils.py",7801,0,"",python,selection_command +121,3197414,"crowd-pilot/crowd_pilot/serialization_utils.py",8191,0,"",python,selection_command +122,3197843,"crowd-pilot/crowd_pilot/serialization_utils.py",8659,0,"",python,selection_command +123,3198225,"crowd-pilot/crowd_pilot/serialization_utils.py",8686,0,"",python,selection_command +124,3199560,"crowd-pilot/crowd_pilot/serialization_utils.py",1015,0,"",python,selection_command +125,3202068,"crowd-pilot/crowd_pilot/serialization_utils.py",2034,0,"",python,selection_command +126,3210437,"crowd-pilot/crowd_pilot/serialization_utils.py",7801,0,"",python,selection_command +127,3216947,"crowd-pilot/crowd_pilot/serialization_utils.py",8191,0,"",python,selection_command +128,3217086,"crowd-pilot/crowd_pilot/serialization_utils.py",8659,0,"",python,selection_command +129,3220238,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",0,0,"",python,tab +130,3222214,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",0,0,"",shellscript,tab +131,3222892,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",266,0,"",shellscript,selection_command +132,3223814,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",224,0,"",shellscript,selection_command +133,3224401,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",265,0,"",shellscript,selection_command +134,3228402,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",264,1,"",shellscript,content +135,3228529,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",263,1,"",shellscript,content +136,3228663,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",262,1,"",shellscript,content +137,3228810,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",261,1,"",shellscript,content +138,3228934,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",260,1,"",shellscript,content +139,3229063,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",259,1,"",shellscript,content +140,3229192,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",258,1,"",shellscript,content +141,3229325,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",257,1,"",shellscript,content +142,3229504,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",256,1,"",shellscript,content +143,3229678,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",255,1,"",shellscript,content +144,3229839,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",254,1,"",shellscript,content +145,3230980,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",254,0,"9",shellscript,content +146,3230981,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",255,0,"",shellscript,selection_keyboard +147,3231818,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",255,0,"9",shellscript,content +148,3231818,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",256,0,"",shellscript,selection_keyboard +149,3232292,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",256,0,"9",shellscript,content +150,3232292,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",257,0,"",shellscript,selection_keyboard +151,3232532,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",257,0,"9",shellscript,content +152,3232533,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",258,0,"",shellscript,selection_keyboard +153,3232774,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",258,0,"9",shellscript,content +154,3232775,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",259,0,"",shellscript,selection_keyboard +155,3233040,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",259,0,"9",shellscript,content +156,3233040,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",260,0,"",shellscript,selection_keyboard +157,3233440,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",260,0,"9",shellscript,content +158,3233440,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",261,0,"",shellscript,selection_keyboard +159,3233654,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",261,0,"9",shellscript,content +160,3233655,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",262,0,"",shellscript,selection_keyboard +161,3233843,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",262,0,"9",shellscript,content +162,3233843,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",263,0,"",shellscript,selection_keyboard +163,3234043,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",263,0,"9",shellscript,content +164,3234043,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",264,0,"",shellscript,selection_keyboard +165,3234688,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",263,0,"",shellscript,selection_command +166,3234869,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",254,0,"",shellscript,selection_command +167,3236877,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",255,0,"",shellscript,selection_command +168,3236993,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",256,0,"",shellscript,selection_command +169,3237447,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",264,0,"",shellscript,selection_command +170,3237854,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",264,0,"9",shellscript,content +171,3237855,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",265,0,"",shellscript,selection_keyboard +172,3237937,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",265,0,"9",shellscript,content +173,3237938,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",266,0,"",shellscript,selection_keyboard +174,3238099,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",266,0,"9",shellscript,content +175,3238100,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",267,0,"",shellscript,selection_keyboard +176,3238430,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",266,0,"",shellscript,selection_command +177,3238851,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",265,0,"",shellscript,selection_command +178,3239142,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",264,0,"",shellscript,selection_command +179,3239303,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",263,0,"",shellscript,selection_command +180,3239449,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",262,0,"",shellscript,selection_command +181,3239598,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",254,0,"",shellscript,selection_command +182,3240488,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",224,43,"TARGET_CHARS_PER_CONVERSATION=9999999999999",shellscript,selection_command +183,3257488,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",385,0,"",shellscript,selection_mouse +184,3306472,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",257,0,"",shellscript,selection_mouse +185,3307272,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",268,0,"",shellscript,selection_command +186,3308304,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",257,0,"",shellscript,selection_command +187,3310335,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",224,43,"TARGET_CHARS_PER_CONVERSATION=99999999999999999",shellscript,content +188,3314505,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",270,0,"",shellscript,selection_command +189,3327754,"TERMINAL",0,0,"deactivate",,terminal_command +190,3338425,"TERMINAL",0,0,"source /home/franz.srambical/crowd-pilot/.venv/bin/activate",,terminal_command +191,3344511,"TERMINAL",0,0,"deactivate",,terminal_command +192,3349258,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",222,0,"",shellscript,selection_command +193,3349322,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",182,0,"",shellscript,selection_command +194,3349681,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",158,0,"",shellscript,selection_command +195,3349862,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",69,0,"",shellscript,selection_command +196,3351350,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",109,0,"full",shellscript,content +197,3351350,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",100,9,"",shellscript,content +198,3356332,"TERMINAL",0,0,"bash /home/franz.srambical/slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",,terminal_command +199,3356384,"TERMINAL",0,0,"]633;C+ OUTPUT_DIR=/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/\r\n+ CSV_ROOT=/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/\r\n+ TARGET_CHARS_PER_CONVERSATION=99999999999999999\r\n+ uv run crowd_pilot/serialize_dataset_nemo_json.py --csv_root=/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/ --output_dir=/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/ --target_chars_per_conversation=99999999999999999\r\n",,terminal_output +200,3364562,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f1a23455-555b-44b7-b7f2-5fb8550d75021753722279799-2025_07_28-19.04.51.762/source.csv is too short (4 turns)\r\n",,terminal_output +201,3364733,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-f0382786-979c-4a6d-8e9b-f5977f18eb4f1753726151187-2025_08_02-06.58.58.573/source.csv is too short (4 turns)\r\n",,terminal_output +202,3365684,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-1ae480a2-bc7d-4b65-b456-3679a88992ae1752076515942-2025_07_09-17.56.33.471/source.csv is too short (6 turns)\r\n",,terminal_output +203,3366264,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-f2478f34-0247-46ae-b2bf-367520171d271754324711637-2025_08_04-18.25.17.542/source.csv is too short (6 turns)\r\n",,terminal_output +204,3366681,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-6614bc95-283e-4f09-b0e7-458aba09077f1754428379797-2025_08_05-23.14.08.79/source.csv is too short (8 turns)\r\n",,terminal_output +205,3369011,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-d413e23e-d7da-4c64-9e15-0b0c0e6031031751383188198-2025_07_01-17.19.54.522/source.csv is too short (8 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-23.49.28.299/source.csv is too short (4 turns)\r\n",,terminal_output +206,3369797,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-5342e4d6-3c20-40cb-9bcb-64bf1931df6e1753973941916-2025_07_31-16.59.20.943/source.csv is too short (8 turns)\r\n",,terminal_output +207,3369880,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-527e6f79-68be-4672-b584-2b15b1cf78281754060417734-2025_08_01-17.00.27.872/source.csv is too short (8 turns)\r\n",,terminal_output +208,3370087,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-cb4b3d1a-86b1-4ac1-90fd-85b7844bd05e1753184360536-2025_07_22-13.39.45.214/source.csv is too short (2 turns)\r\n",,terminal_output +209,3370515,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-e59497df-c03b-4c61-b74a-a1b1aa85d6971751043196509-2025_06_27-18.53.25.651/source.csv is too short (4 turns)\r\n",,terminal_output +210,3370920,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-6829bcbf-f7fb-4481-92ea-521e9af7eabb1754058671446-2025_08_01-16.31.17.606/source.csv is too short (6 turns)\r\n",,terminal_output +211,3371439,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-4719c5f9-1b15-4792-8afd-690761108bda1751617825355-2025_07_04-10.31.22.581/source.csv is too short (9 turns)\r\n",,terminal_output +212,3372405,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-8e5967ed-f938-46b6-90b8-104deb249c3c1753178239652-2025_07_22-11.57.53.728/source.csv is too short (5 turns)\r\n",,terminal_output +213,3372810,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-89e3ea93-acec-4320-9c1b-eafc7c47155f1750747464127-2025_06_24-12.16.44.223/source.csv is too short (4 turns)\r\n",,terminal_output +214,3373826,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-268ba756-633d-460c-9a43-1e38b034568e1754428388262-2025_08_05-23.13.35.680/source.csv is too short (4 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f514f25a-b687-4125-bb3e-747ec98309d21750930471988-2025_06_26-11.34.36.685/source.csv is too short (4 turns)\r\n",,terminal_output +215,3375636,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2cce3a90-32a5-4d8b-8cb0-10445a2ee7a71754054463184-2025_08_01-15.21.32.127/source.csv is too short (5 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-4719c5f9-1b15-4792-8afd-690761108bda1751617825355-2025_07_04-10.31.22.581/source.csv is too short (9 turns)\r\n",,terminal_output +216,3376489,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9995c396-b28a-4817-9971-5d6e293ae32a1753180233918-2025_07_22-12.30.43.584/source.csv is too short (2 turns)\r\n",,terminal_output +217,3376865,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-03e2d1f0-34f2-48bc-9e6a-99708362c3301750977820647-2025_06_27-00.43.44.850/source.csv is too short (4 turns)\r\n",,terminal_output +218,3377377,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-d413e23e-d7da-4c64-9e15-0b0c0e6031031751383188198-2025_07_01-17.19.54.522/source.csv is too short (8 turns)\r\n",,terminal_output +219,3377449,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-9ff54a43-2a59-41a8-96bc-f7e46d5244651750887279734-2025_06_25-23.36.32.560/source.csv is too short (4 turns)\r\n",,terminal_output +220,3378914,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-f0382786-979c-4a6d-8e9b-f5977f18eb4f1753726151187-2025_08_02-06.58.58.573/source.csv is too short (4 turns)\r\n",,terminal_output +221,3379590,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-23.49.28.299/source.csv is too short (4 turns)\r\n",,terminal_output +222,3380303,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-eb4593ba-8717-4311-aac2-0669058b8e141750152994551-2025_06_17-11.36.51.515/source.csv is too short (8 turns)\r\n",,terminal_output +223,3380813,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-48fbb415-6db9-4d35-b548-561e828791bf1751383187013-2025_07_01-17.19.57.60/source.csv is too short (8 turns)\r\n",,terminal_output +224,3380995,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-a775c5ce-801a-4b55-897a-6c0b6f3448081754127102402-2025_08_02-11.31.50.147/source.csv is too short (6 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-311b67df-2f46-4805-a16c-07606ec1fc001751032376516-2025_06_27-15.53.06.902/source.csv is too short (4 turns)\r\n",,terminal_output +225,3384593,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-48fbb415-6db9-4d35-b548-561e828791bf1751383187013-2025_07_01-17.19.57.60/source.csv is too short (8 turns)\r\n",,terminal_output +226,3385115,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-aa8be5f9-c447-4faf-b9c6-7142909b3c591750719092446-2025_06_24-00.51.37.15/source.csv is too short (9 turns)\r\n",,terminal_output +227,3385660,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-280dfe9f-0f96-4f1d-a304-44e1237157a81754289117685-2025_08_04-08.32.51.461/source.csv is too short (9 turns)\r\n",,terminal_output +228,3385848,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-623c548f-e16f-46a4-9ee1-6577a82e63e51754054052755-2025_08_01-15.14.20.520/source.csv is too short (8 turns)\r\n",,terminal_output +229,3386140,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-afb08496-b4ce-4efc-aec5-cc21ff6731861752228993278-2025_07_11-12.16.54.20/source.csv is too short (6 turns)\r\n",,terminal_output +230,3387411,"TERMINAL",0,0,"[debug] All chunks turn stats: count=349, median_turns=127.0, avg_turns=331.6, min_turns=2, max_turns=3553\r\n[debug] Kept chunks turn stats: count=315, median_turns=173.0, avg_turns=366.8, min_turns=10, max_turns=3553\r\n[debug] All chunks char stats: count=349, median_chars=119609.0, avg_chars=301732.0, min_chars=104, max_chars=2827282\r\n[debug] Kept chunks char stats: count=315, median_chars=151798.0, avg_chars=332972.1, min_chars=382, max_chars=2827282\r\n\r\n[summary]\r\n Total sessions processed: 349\r\n Sessions kept: 315\r\n Skipped (too few turns): 34\r\n Train conversations: 285\r\n Val conversations: 30\r\n Output: /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full//{train,val}.jsonl\r\n Metadata: /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/metadata.json\r\n",,terminal_output +231,3387806,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +232,3657831,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",153,0,"",shellscript,selection_command +233,3657901,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",177,0,"",shellscript,selection_command +234,3658043,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",217,0,"",shellscript,selection_command +235,3658174,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",265,0,"",shellscript,selection_command +236,3658295,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",267,0,"",shellscript,selection_command +237,3660240,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",267,0," ",shellscript,content +238,3660240,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",268,0,"",shellscript,selection_keyboard +239,3661175,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",267,1,"",shellscript,content +240,3661891,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",268,0,"",shellscript,selection_command +241,3662234,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",426,0,"",shellscript,selection_command +242,3662447,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",425,1,"",shellscript,content +243,3662617,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",425,0," ",shellscript,content +244,3662617,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",426,0,"",shellscript,selection_keyboard +245,3663106,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",425,1,"",shellscript,content +246,3663477,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",425,0,"N",shellscript,content +247,3663477,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",426,0,"",shellscript,selection_keyboard +248,3663677,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",426,0," ",shellscript,content +249,3663677,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",427,0,"",shellscript,selection_keyboard +250,3663902,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",427,0,"-",shellscript,content +251,3663902,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",428,0,"",shellscript,selection_keyboard +252,3663968,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",428,0,"-",shellscript,content +253,3663968,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",429,0,"",shellscript,selection_keyboard +254,3666475,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",429,0,"target_chars_per_turn=99999999999999999",shellscript,content +255,3666762,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",467,0,"",shellscript,selection_command +256,3667199,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",451,0,"",shellscript,selection_command +257,3667762,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",451,17,"",shellscript,content +258,3668187,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",451,0,"5",shellscript,content +259,3668187,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",452,0,"",shellscript,selection_keyboard +260,3668192,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",452,0,"0",shellscript,content +261,3668192,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",453,0,"",shellscript,selection_keyboard +262,3668649,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",452,0,"",shellscript,selection_command +263,3672973,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",0,0,"",python,tab +264,3677800,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2236,0,"",python,selection_command +265,3680523,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2253,0,"",python,selection_command +266,3681210,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",2466,0,"",python,selection_command +267,3682237,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",0,0,"",shellscript,tab +268,3684647,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",0,0,"",python,tab +269,3685221,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7445,0,"",python,selection_command +270,3685840,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",7473,0,"",python,selection_command +271,3685984,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9520,0,"",python,selection_command +272,3690138,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",0,0,"",shellscript,tab +273,3691257,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",451,0,"",shellscript,selection_command +274,3691390,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",450,0,"",shellscript,selection_command +275,3691563,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",429,0,"",shellscript,selection_command +276,3691719,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",427,0,"",shellscript,selection_command +277,3692759,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",426,0,"",shellscript,selection_command +278,3693197,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",426,27,"",shellscript,content +279,3693207,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",425,0,"",shellscript,selection_command +280,3694225,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",0,0,"",python,tab +281,3697304,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",0,0,"",python,selection_command +282,3698021,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",0,0,"",shellscript,tab +283,3699402,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",0,0,"",python,tab +284,3699668,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",10119,0,"",python,selection_command +285,3700725,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",8509,0,"",python,selection_keyboard +286,3702112,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",8510,0,"",python,selection_command +287,3702362,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",8547,0,"",python,selection_command +288,3702393,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",8580,0,"",python,selection_command +289,3702425,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",8668,0,"",python,selection_command +290,3702453,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",8674,0,"",python,selection_command +291,3702488,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",8733,0,"",python,selection_command +292,3709624,"crowd-pilot/crowd_pilot/serialization_utils.py",0,0,"",python,tab +293,3714904,"crowd-pilot/crowd_pilot/serialization_utils.py",7890,0,"",python,selection_command +294,3721969,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",0,0,"",python,tab +295,3722133,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",8989,0,"",python,selection_command +296,3722490,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9245,0,"",python,selection_command +297,3723140,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",8989,0,"",python,selection_command +298,3723235,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",9245,0,"",python,selection_command +299,3723754,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",8989,0,"",python,selection_command +300,3726760,"crowd-pilot/crowd_pilot/serialization_utils.py",0,0,"",python,tab +301,3727864,"crowd-pilot/crowd_pilot/serialize_dataset_nemo_json.py",0,0,"",python,tab +302,3728544,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",0,0,"",shellscript,tab +303,3729156,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",426,0,"",shellscript,selection_command +304,3729731,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",426,0," ",shellscript,content +305,3729732,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",427,0,"",shellscript,selection_keyboard +306,3729962,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",427,0,"-",shellscript,content +307,3729962,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",428,0,"",shellscript,selection_keyboard +308,3730040,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",428,0,"-",shellscript,content +309,3730040,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",429,0,"",shellscript,selection_keyboard +310,3730425,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",429,0,"m",shellscript,content +311,3730425,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",430,0,"",shellscript,selection_keyboard +312,3730560,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",430,0,"i",shellscript,content +313,3730560,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",431,0,"",shellscript,selection_keyboard +314,3730635,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",431,0,"n",shellscript,content +315,3730635,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",432,0,"",shellscript,selection_keyboard +316,3731576,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",432,0,"_session_turns=10",shellscript,content +317,3731785,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",448,0,"",shellscript,selection_command +318,3732178,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",447,0,"",shellscript,selection_command +319,3733223,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",447,1,"5",shellscript,content +320,3737118,"TERMINAL",0,0,"bash /home/franz.srambical/slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",,terminal_command +321,3737171,"TERMINAL",0,0,"]633;C+ OUTPUT_DIR=/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/\r\n+ CSV_ROOT=/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/\r\n+ TARGET_CHARS_PER_CONVERSATION=99999999999999999\r\n+ uv run crowd_pilot/serialize_dataset_nemo_json.py --csv_root=/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/ --output_dir=/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/ --target_chars_per_conversation=99999999999999999 --min_session_turns=50\r\n",,terminal_output +322,3743550,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-036b3473-9f9d-44a0-a906-2567459f706c1754119162409-2025_08_02-09.19.29.910/source.csv is too short (39 turns)\r\n",,terminal_output +323,3743812,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-08d1caae-8fb5-40b9-88ed-5072c2f48ca81754110755341-2025_08_02-06.59.28.742/source.csv is too short (12 turns)\r\n",,terminal_output +324,3744076,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-207fe3e3-7fd2-432d-a410-a7a943195e5f1753557295596-2025_07_26-21.15.03.812/source.csv is too short (24 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-264a0e70-8280-4ed9-a47c-e76bfae594cd1754128841382-2025_08_02-12.01.32.618/source.csv is too short (10 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-e88943ac-329c-42c9-ab5c-3679434104c51751028592580-2025_06_27-14.50.02.815/source.csv is too short (12 turns)\r\n",,terminal_output +325,3744336,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-fe3e4a6d-15dd-460a-96b3-1f7a60db202a1753178306285-2025_07_22-12.23.53.192/source.csv is too short (21 turns)\r\n",,terminal_output +326,3744569,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f1a23455-555b-44b7-b7f2-5fb8550d75021753722279799-2025_07_28-19.04.51.762/source.csv is too short (4 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-e7d20f74-415c-47d0-ad95-3f6da31696d51753194904459-2025_07_22-16.35.52.74/source.csv is too short (20 turns)\r\n",,terminal_output +327,3744722,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-f0382786-979c-4a6d-8e9b-f5977f18eb4f1753726151187-2025_08_02-06.58.58.573/source.csv is too short (4 turns)\r\n",,terminal_output +328,3745678,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-1ae480a2-bc7d-4b65-b456-3679a88992ae1752076515942-2025_07_09-17.56.33.471/source.csv is too short (6 turns)\r\n",,terminal_output +329,3746266,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-f2478f34-0247-46ae-b2bf-367520171d271754324711637-2025_08_04-18.25.17.542/source.csv is too short (6 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-803c0497-2cfb-4591-a2cb-bac49e1c774c1751564777714-2025_07_03-19.47.11.372/source.csv is too short (21 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-68bb0852-886e-4fc0-9144-b85c1ade71281752827220294-2025_07_18-10.27.35.446/source.csv is too short (12 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-28f4aa5c-0534-40eb-ae05-51501d68e4871752860706222-2025_07_18-19.45.48.539/source.csv is too short (42 turns)\r\n",,terminal_output +330,3746615,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-372ef62d-2075-43ff-ac1a-e2025fd873c41751612450082-2025_07_04-09.01.47.125/source.csv is too short (22 turns)\r\n",,terminal_output +331,3746675,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-6614bc95-283e-4f09-b0e7-458aba09077f1754428379797-2025_08_05-23.14.08.79/source.csv is too short (8 turns)\r\n",,terminal_output +332,3748035,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-b69885bf-32d4-47c3-9c98-056c07697d2d1754060276680-2025_08_01-16.58.39.604/source.csv is too short (22 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-bc8678ed-7352-41d3-8ab3-bf70f7958a0b1750745114983-2025_06_24-08.05.29.186/source.csv is too short (20 turns)\r\n",,terminal_output +333,3748183,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-978ed4eb-d9c9-4380-b981-e501087459181750623968304-2025_06_22-13.26.11.394/source.csv is too short (16 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-97725341-21b1-49e4-a29b-2639a6d35d101753197054575-2025_07_22-17.11.28.789/source.csv is too short (23 turns)\r\n",,terminal_output +334,3748982,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-d413e23e-d7da-4c64-9e15-0b0c0e6031031751383188198-2025_07_01-17.19.54.522/source.csv is too short (8 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-55c207a6-ee6d-464d-8f77-e1220855a4f41754396844541-2025_08_05-14.27.33.594/source.csv is too short (31 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-23.49.28.299/source.csv is too short (4 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-222fe98e-29ac-4b20-9a65-fe2e31f8eb701751128122769-2025_06_28-09.28.47.536/source.csv is too short (21 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-222fe98e-29ac-4b20-9a65-fe2e31f8eb701751128122769-2025_06_28-09.28.47.536/source.csv is too short (21 turns)\r\n",,terminal_output +335,3749131,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-84bc9952-c4b0-4456-bdc2-984faf53684f1751163593750-2025_06_28-19.19.55.196/source.csv is too short (18 turns)\r\n",,terminal_output +336,3749288,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-fb95ac9f-f57c-47dc-8ad1-be718344c5271753291337656-2025_07_23-19.22.47.144/source.csv is too short (12 turns)\r\n",,terminal_output +337,3749843,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-5342e4d6-3c20-40cb-9bcb-64bf1931df6e1753973941916-2025_07_31-16.59.20.943/source.csv is too short (8 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-94fb4d7e-812c-4d36-984a-6626015fa6fd1750854950642-2025_06_25-14.36.16.983/source.csv is too short (27 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-59175e55-ecae-446f-be12-8861032d4f481751613426266-2025_07_04-09.17.44.620/source.csv is too short (15 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-527e6f79-68be-4672-b584-2b15b1cf78281754060417734-2025_08_01-17.00.27.872/source.csv is too short (8 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-066da5ef-4ba0-4726-8184-5dc4ef5e75811751465994612-2025_07_02-16.20.42.177/source.csv is too short (32 turns)\r\n",,terminal_output +338,3750033,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6ec3c786-4e15-4759-b299-d48a9e4650101752698884767-2025_07_16-22.48.49.473/source.csv is too short (36 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-44279a75-3a8c-442c-8d1c-098416d72d641750776465409-2025_06_24-16.48.04.187/source.csv is too short (29 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-cb4b3d1a-86b1-4ac1-90fd-85b7844bd05e1753184360536-2025_07_22-13.39.45.214/source.csv is too short (2 turns)\r\n",,terminal_output +339,3750523,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-e59497df-c03b-4c61-b74a-a1b1aa85d6971751043196509-2025_06_27-18.53.25.651/source.csv is too short (4 turns)\r\n",,terminal_output +340,3750865,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-6829bcbf-f7fb-4481-92ea-521e9af7eabb1754058671446-2025_08_01-16.31.17.606/source.csv is too short (6 turns)\r\n",,terminal_output +341,3751165,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7c8743a1-f55a-4e45-b7aa-0b3df3c9f3c91752835699286-2025_07_18-12.49.02.294/source.csv is too short (39 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-ae61aa9e-3c01-45fb-b595-af4191213c4d1752828764609-2025_07_18-10.53.17.363/source.csv is too short (31 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-ecd6bcde-7cf4-4819-b2c7-c5b474828daa1750689105661-2025_06_23-16.31.57.981/source.csv is too short (12 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-4457e5d2-f5e8-4b15-95aa-bafa247369991751528947759-2025_07_03-09.50.10.663/source.csv is too short (30 turns)\r\n",,terminal_output +342,3751376,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-4719c5f9-1b15-4792-8afd-690761108bda1751617825355-2025_07_04-10.31.22.581/source.csv is too short (9 turns)\r\n",,terminal_output +343,3751873,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-22.50.32.930/source.csv is too short (16 turns)\r\n",,terminal_output +344,3751992,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-cc97a3cc-fb9b-4e64-a5b2-4f31bca6d6d71751911458918-2025_07_07-20.04.30.836/source.csv is too short (22 turns)\r\n",,terminal_output +345,3752167,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0847f6e2-9383-4ddd-96df-fc93316e98df1751584003509-2025_07_04-01.07.31.817/source.csv is too short (46 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-73ddfe20-a667-477d-9924-94f7208128f81752186339186-2025_07_11-00.25.58.835/source.csv is too short (15 turns)\r\n",,terminal_output +346,3752270,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-bf53d422-1719-4f12-b27c-bb9a8a564cc71752934282390-2025_07_19-16.12.25.314/source.csv is too short (19 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-1abe6561-37fa-44c4-a02e-35deedf040521754322372590-2025_08_04-17.46.19.699/source.csv is too short (44 turns)\r\n",,terminal_output +347,3752360,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-8e5967ed-f938-46b6-90b8-104deb249c3c1753178239652-2025_07_22-11.57.53.728/source.csv is too short (5 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-3d995f1f-4c62-4f98-bebc-a37f170682391753352025448-2025_07_24-12.14.24.225/source.csv is too short (15 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-966f6435-b7a4-4869-9e11-2847af1349b81750947034236-2025_06_26-16.10.57.310/source.csv is too short (25 turns)\r\n",,terminal_output +348,3752506,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-210e88c5-c80c-4b42-a393-717923a05daf1751602893995-2025_07_04-06.22.30.835/source.csv is too short (25 turns)\r\n",,terminal_output +349,3752594,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-d33d9128-8aa8-4382-a7f1-61cc99198a8e1750839147762-2025_06_25-10.21.30.519/source.csv is too short (41 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5612308a-14dd-4f05-ae65-c6cd496f68351752499707410-2025_07_14-15.29.02.547/source.csv is too short (36 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-4c99cdc3-e81e-4e53-b728-37b9467466201751794872067-2025_07_06-11.41.50.869/source.csv is too short (20 turns)\r\n",,terminal_output +350,3752746,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-89e3ea93-acec-4320-9c1b-eafc7c47155f1750747464127-2025_06_24-12.16.44.223/source.csv is too short (4 turns)\r\n",,terminal_output +351,3752887,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-b08d92a3-9c0a-4526-b12f-c973e9c3c43f1752071802867-2025_07_09-16.36.43.962/source.csv is too short (20 turns)\r\n",,terminal_output +352,3753493,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-89e3ea93-acec-4320-9c1b-eafc7c47155f1750747464127-2025_06_24-08.44.49.850/source.csv is too short (11 turns)\r\n",,terminal_output +353,3753555,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-dd3ebc08-bf50-4107-8e45-eccf16876d251751885451968-2025_07_07-12.51.28.329/source.csv is too short (13 turns)\r\n",,terminal_output +354,3753660,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-268ba756-633d-460c-9a43-1e38b034568e1754428388262-2025_08_05-23.13.35.680/source.csv is too short (4 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-33e2e492-00ca-4b0f-8523-da20187ea5301751307587112-2025_06_30-20.21.41.519/source.csv is too short (20 turns)\r\n",,terminal_output +355,3753761,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f514f25a-b687-4125-bb3e-747ec98309d21750930471988-2025_06_26-11.34.36.685/source.csv is too short (4 turns)\r\n",,terminal_output +356,3754618,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7c1bdcf0-d594-4018-8499-7d2ed33930611752094287328-2025_07_09-22.51.39.315/source.csv is too short (13 turns)\r\n",,terminal_output +357,3754687,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-3acc90e9-90ce-4c91-8dc5-7fa36ee6eae81754056616784-2025_08_01-15.57.02.654/source.csv is too short (12 turns)\r\n",,terminal_output +358,3755061,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-e7125223-30c4-4c14-9e38-785bcef125641752082599283-2025_07_09-19.37.39.323/source.csv is too short (47 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-f493ee6f-fdfb-4a84-bc4c-0358523e54001754136495510-2025_08_02-14.08.22.904/source.csv is too short (12 turns)\r\n",,terminal_output +359,3755534,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2cce3a90-32a5-4d8b-8cb0-10445a2ee7a71754054463184-2025_08_01-15.21.32.127/source.csv is too short (5 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-4719c5f9-1b15-4792-8afd-690761108bda1751617825355-2025_07_04-10.31.22.581/source.csv is too short (9 turns)\r\n",,terminal_output +360,3755769,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-27a851dc-3a84-4cfb-867a-eef6b63ee7ef1750746742858-2025_06_24-08.32.35.909/source.csv is too short (18 turns)\r\n",,terminal_output +361,3756062,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-258375eb-a7da-4a0f-9f0a-c9cb095e5b831750680349626-2025_06_23-14.06.32.108/source.csv is too short (41 turns)\r\n",,terminal_output +362,3756212,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-69be90d4-787f-4563-89d6-b05adc11797c1750785084721-2025_06_24-19.11.34.94/source.csv is too short (31 turns)\r\n",,terminal_output +363,3756401,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9995c396-b28a-4817-9971-5d6e293ae32a1753180233918-2025_07_22-12.30.43.584/source.csv is too short (2 turns)\r\n",,terminal_output +364,3756759,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-03e2d1f0-34f2-48bc-9e6a-99708362c3301750977820647-2025_06_27-00.43.44.850/source.csv is too short (4 turns)\r\n",,terminal_output +365,3757263,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-4925dddc-46dd-4a2d-8d37-761ea748b28d1753197051515-2025_07_22-17.10.58.842/source.csv is too short (10 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-d413e23e-d7da-4c64-9e15-0b0c0e6031031751383188198-2025_07_01-17.19.54.522/source.csv is too short (8 turns)\r\n",,terminal_output +366,3757354,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-9ff54a43-2a59-41a8-96bc-f7e46d5244651750887279734-2025_06_25-23.36.32.560/source.csv is too short (4 turns)\r\n",,terminal_output +367,3758030,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-25569aaa-6e77-4ce2-b9b6-8ae8c33420051753180192494-2025_07_22-12.30.11.399/source.csv is too short (14 turns)\r\n",,terminal_output +368,3758814,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-f0382786-979c-4a6d-8e9b-f5977f18eb4f1753726151187-2025_08_02-06.58.58.573/source.csv is too short (4 turns)\r\n",,terminal_output +369,3759222,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-d33d9128-8aa8-4382-a7f1-61cc99198a8e1750839147762-2025_06_25-10.12.51.683/source.csv is too short (11 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-cd782281-c91c-41ad-b846-159ca9c28e9b1751798382635-2025_07_06-12.40.04.647/source.csv is too short (47 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-8e7b7877-c553-4d5c-a7c5-433adcd8112b1754287948136-2025_08_04-08.12.35.154/source.csv is too short (11 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-b07db19d-68eb-49f3-ac94-3d4c9ee495c61751056974607-2025_06_27-13.42.58.489/source.csv is too short (14 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-2f484e29-43ea-48d0-8c50-df135d6c967a1753171043773-2025_07_22-09.57.31.372/source.csv is too short (24 turns)\r\n",,terminal_output +370,3759352,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-50d32311-0df8-4235-b597-7d69f06b72151752666637262-2025_07_16-13.50.55.636/source.csv is too short (27 turns)\r\n",,terminal_output +371,3759413,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-fcaaf520-6a1e-40c5-9a72-85ae7ad4ab0b1750621325310-2025_06_22-12.42.08.659/source.csv is too short (10 turns)\r\n",,terminal_output +372,3759473,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-b2ff9c32-0980-46be-b0f1-51dff76665011752660866284-2025_07_16-12.14.34.814/source.csv is too short (14 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-23.49.28.299/source.csv is too short (4 turns)\r\n",,terminal_output +373,3760187,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-eb4593ba-8717-4311-aac2-0669058b8e141750152994551-2025_06_17-11.36.51.515/source.csv is too short (8 turns)\r\n",,terminal_output +374,3760408,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6aec90c8-8d95-4bad-afcb-92c28c6ff5241753889052956-2025_07_30-17.24.41.914/source.csv is too short (28 turns)\r\n",,terminal_output +375,3760496,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-a5280753-495e-4391-8448-c3c5679e94b01753346266343-2025_07_24-10.37.53.687/source.csv is too short (46 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-005af936-919b-4c44-b3af-2ed3459192121752831699789-2025_07_18-11.43.38.119/source.csv is too short (45 turns)\r\n",,terminal_output +376,3760693,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-6eacf655-5590-4c9d-ad09-856f09c6e0121751568373129-2025_07_03-20.47.02.778/source.csv is too short (49 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-48fbb415-6db9-4d35-b548-561e828791bf1751383187013-2025_07_01-17.19.57.60/source.csv is too short (8 turns)\r\n",,terminal_output +377,3760858,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-a775c5ce-801a-4b55-897a-6c0b6f3448081754127102402-2025_08_02-11.31.50.147/source.csv is too short (6 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-311b67df-2f46-4805-a16c-07606ec1fc001751032376516-2025_06_27-15.53.06.902/source.csv is too short (4 turns)\r\n",,terminal_output +378,3761199,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-59af9530-ed37-4620-9980-6c646b3d58821751599132847-2025_07_04-05.19.59.945/source.csv is too short (23 turns)\r\n",,terminal_output +379,3761687,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-23.44.53.64/source.csv is too short (12 turns)\r\n",,terminal_output +380,3762320,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-df35ccd5-7324-4848-a616-148858575cf41752663626151-2025_07_16-13.00.58.432/source.csv is too short (32 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-c66923f3-2a2a-4b19-880b-c1a8bfe1bf981753195775955-2025_07_22-16.49.43.384/source.csv is too short (22 turns)\r\n",,terminal_output +381,3763689,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-12bded65-cc97-4d6e-a5a9-8eb203cdf5b21750746850455-2025_06_24-08.34.24.180/source.csv is too short (15 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-e7d20f74-415c-47d0-ad95-3f6da31696d51753194904459-2025_07_22-16.35.52.74/source.csv is too short (20 turns)\r\n",,terminal_output +382,3763785,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-40a031fc-4dc5-4408-a821-8344d9146d941752833855790-2025_07_18-12.18.25.998/source.csv is too short (38 turns)\r\n",,terminal_output +383,3764417,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-7e709d19-b316-43c3-9dab-8ab2f9d260041750153932857-2025_06_17-12.00.01.867/source.csv is too short (12 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-c458bc44-8c11-4b8a-a6af-3080f7b9216e1753197637866-2025_07_22-17.21.45.288/source.csv is too short (46 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-48fbb415-6db9-4d35-b548-561e828791bf1751383187013-2025_07_01-17.19.57.60/source.csv is too short (8 turns)\r\n",,terminal_output +384,3764469,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-08316456-a7e4-42de-8648-d23dded165eb1750174511987-2025_06_17-17.42.37.822/source.csv is too short (13 turns)\r\n",,terminal_output +385,3764964,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-aa8be5f9-c447-4faf-b9c6-7142909b3c591750719092446-2025_06_24-00.51.37.15/source.csv is too short (9 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-b08d92a3-9c0a-4526-b12f-c973e9c3c43f1752071802867-2025_07_09-16.36.43.962/source.csv is too short (20 turns)\r\n",,terminal_output +386,3765221,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0f5513f7-8bc9-4c5d-856d-79d92f75113d1751284706913-2025_06_30-13.59.01.459/source.csv is too short (37 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-be624715-d320-42af-9d2e-a14ae6d9a3021750779349385-2025_06_24-17.36.05.736/source.csv is too short (49 turns)\r\n",,terminal_output +387,3765449,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-280dfe9f-0f96-4f1d-a304-44e1237157a81754289117685-2025_08_04-08.32.51.461/source.csv is too short (9 turns)\r\n",,terminal_output +388,3765667,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-7737eb1f-d280-4335-8b81-3697e0d16cc61754428290161-2025_08_05-23.11.37.668/source.csv is too short (10 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-623c548f-e16f-46a4-9ee1-6577a82e63e51754054052755-2025_08_01-15.14.20.520/source.csv is too short (8 turns)\r\n",,terminal_output +389,3765861,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-12b522dd-8518-4c62-b207-ca1ed4ce90571752782954186-2025_07_17-22.10.14.626/source.csv is too short (16 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-e55a361e-680d-4531-a14a-8106315972f21751460136949-2025_07_02-14.42.58.307/source.csv is too short (19 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-84bc9952-c4b0-4456-bdc2-984faf53684f1751163593750-2025_06_28-19.19.55.196/source.csv is too short (20 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-a5280753-495e-4391-8448-c3c5679e94b01753346266343-2025_07_24-10.37.53.687/source.csv is too short (46 turns)\r\n",,terminal_output +390,3765988,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-6a683910-8d55-4299-9066-894bbed6c97c1754399347661-2025_08_05-15.09.15.958/source.csv is too short (18 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-847ad052-cbff-45db-a343-a2c6b2d212411753383716687-2025_07_24-21.02.30.722/source.csv is too short (26 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-afb08496-b4ce-4efc-aec5-cc21ff6731861752228993278-2025_07_11-12.16.54.20/source.csv is too short (6 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-26402469-72d0-4ef2-8121-ec0cf7ef56091752689884853-2025_07_16-20.18.54.768/source.csv is too short (33 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-345f0cdf-fa74-4f5f-8eca-394e8f79b3ba1750698220178-2025_06_23-19.04.24.596/source.csv is too short (33 turns)\r\n",,terminal_output +391,3767116,"TERMINAL",0,0,"[debug] All chunks turn stats: count=349, median_turns=127.0, avg_turns=331.6, min_turns=2, max_turns=3553\r\n[debug] Kept chunks turn stats: count=227, median_turns=327.0, avg_turns=499.5, min_turns=51, max_turns=3553\r\n[debug] All chunks char stats: count=349, median_chars=119609.0, avg_chars=301732.0, min_chars=104, max_chars=2827282\r\n[debug] Kept chunks char stats: count=227, median_chars=302580.0, avg_chars=450097.0, min_chars=8424, max_chars=2827282\r\n\r\n[summary]\r\n Total sessions processed: 349\r\n Sessions kept: 227\r\n Skipped (too few turns): 122\r\n Train conversations: 210\r\n Val conversations: 17\r\n Output: /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full//{train,val}.jsonl\r\n Metadata: /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/metadata.json\r\n",,terminal_output +392,3767586,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +393,3892998,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",267,0,"",shellscript,selection_command +394,3893055,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",265,0,"",shellscript,selection_command +395,3893175,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",217,0,"",shellscript,selection_command +396,3893403,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",177,0,"",shellscript,selection_command +397,3893714,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",175,0,"",shellscript,selection_command +398,3894370,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",105,0,"",shellscript,selection_command +399,3894848,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",104,0,"",shellscript,selection_command +400,3894948,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",104,1,"/",shellscript,selection_command +401,3895138,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",81,24,"nemo_hf_part_jsonl_full/",shellscript,selection_command +402,3895385,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",80,25,"/nemo_hf_part_jsonl_full/",shellscript,selection_command +403,3895416,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",76,29,"data/nemo_hf_part_jsonl_full/",shellscript,selection_command +404,3895450,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",75,30,"/data/nemo_hf_part_jsonl_full/",shellscript,selection_command +405,3895483,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",66,39,"tab_model/data/nemo_hf_part_jsonl_full/",shellscript,selection_command +406,3895516,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",65,40,"/tab_model/data/nemo_hf_part_jsonl_full/",shellscript,selection_command +407,3895550,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",49,56,"HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/",shellscript,selection_command +408,3895582,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",48,57,"/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/",shellscript,selection_command +409,3895718,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",41,64,"project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/",shellscript,selection_command +410,3895886,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",40,65,"/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/",shellscript,selection_command +411,3896081,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",36,69,"fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/",shellscript,selection_command +412,3896399,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",35,70,"/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/",shellscript,selection_command +413,3896797,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",35,0,"",shellscript,selection_command +414,3898898,"TERMINAL",0,0,"rm -r /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/",,terminal_command +415,3898903,"TERMINAL",0,0,"]633;C]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output +416,3911320,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",119,0,"",shellscript,selection_command +417,3911397,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",177,0,"",shellscript,selection_command +418,3911543,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",190,0,"",shellscript,selection_command +419,3911673,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",231,0,"",shellscript,selection_command +420,3911811,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",267,0,"",shellscript,selection_command +421,3911977,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",280,0,"",shellscript,selection_command +422,3912558,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",449,0,"",shellscript,selection_command +423,3912890,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",448,1,"",shellscript,content +424,3913160,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",448,0,"1",shellscript,content +425,3913160,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",449,0,"",shellscript,selection_keyboard +426,3913200,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",449,0,"0",shellscript,content +427,3913200,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",450,0,"",shellscript,selection_keyboard +428,3913559,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",449,1,"",shellscript,content +429,3913717,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",448,1,"",shellscript,content +430,3913846,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",447,1,"",shellscript,content +431,3914069,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",447,0,"1",shellscript,content +432,3914069,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",448,0,"",shellscript,selection_keyboard +433,3914074,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",448,0,"0",shellscript,content +434,3914074,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",449,0,"",shellscript,selection_keyboard +435,3914410,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",448,0,"",shellscript,selection_command +436,3956349,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",447,0,"",shellscript,selection_command +437,3956499,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",446,0,"",shellscript,selection_command +438,3956650,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",429,0,"",shellscript,selection_command +439,3956791,"slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",427,0,"",shellscript,selection_command +440,3958304,"TERMINAL",0,0,"bash /home/franz.srambical/slurm/dev/franz/berlin/crowd-pilot/nemo/generate_jsonl_nemo_dataset_full.sh",,terminal_command +441,3958357,"TERMINAL",0,0,"]633;C+ OUTPUT_DIR=/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/\r\n+ CSV_ROOT=/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/\r\n+ TARGET_CHARS_PER_CONVERSATION=99999999999999999\r\n+ uv run crowd_pilot/serialize_dataset_nemo_json.py --csv_root=/fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/ --output_dir=/fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/ --target_chars_per_conversation=99999999999999999 --min_session_turns=10\r\n",,terminal_output +442,3965807,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f1a23455-555b-44b7-b7f2-5fb8550d75021753722279799-2025_07_28-19.04.51.762/source.csv is too short (4 turns)\r\n",,terminal_output +443,3965971,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-f0382786-979c-4a6d-8e9b-f5977f18eb4f1753726151187-2025_08_02-06.58.58.573/source.csv is too short (4 turns)\r\n",,terminal_output +444,3966901,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-1ae480a2-bc7d-4b65-b456-3679a88992ae1752076515942-2025_07_09-17.56.33.471/source.csv is too short (6 turns)\r\n",,terminal_output +445,3967478,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-f2478f34-0247-46ae-b2bf-367520171d271754324711637-2025_08_04-18.25.17.542/source.csv is too short (6 turns)\r\n",,terminal_output +446,3967918,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-6614bc95-283e-4f09-b0e7-458aba09077f1754428379797-2025_08_05-23.14.08.79/source.csv is too short (8 turns)\r\n",,terminal_output +447,3970289,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-d413e23e-d7da-4c64-9e15-0b0c0e6031031751383188198-2025_07_01-17.19.54.522/source.csv is too short (8 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-23.49.28.299/source.csv is too short (4 turns)\r\n",,terminal_output +448,3970940,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-5342e4d6-3c20-40cb-9bcb-64bf1931df6e1753973941916-2025_07_31-16.59.20.943/source.csv is too short (8 turns)\r\n",,terminal_output +449,3971034,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-527e6f79-68be-4672-b584-2b15b1cf78281754060417734-2025_08_01-17.00.27.872/source.csv is too short (8 turns)\r\n",,terminal_output +450,3971235,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-cb4b3d1a-86b1-4ac1-90fd-85b7844bd05e1753184360536-2025_07_22-13.39.45.214/source.csv is too short (2 turns)\r\n",,terminal_output +451,3971616,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-e59497df-c03b-4c61-b74a-a1b1aa85d6971751043196509-2025_06_27-18.53.25.651/source.csv is too short (4 turns)\r\n",,terminal_output +452,3972058,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-6829bcbf-f7fb-4481-92ea-521e9af7eabb1754058671446-2025_08_01-16.31.17.606/source.csv is too short (6 turns)\r\n",,terminal_output +453,3972561,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-4719c5f9-1b15-4792-8afd-690761108bda1751617825355-2025_07_04-10.31.22.581/source.csv is too short (9 turns)\r\n",,terminal_output +454,3973521,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-8e5967ed-f938-46b6-90b8-104deb249c3c1753178239652-2025_07_22-11.57.53.728/source.csv is too short (5 turns)\r\n",,terminal_output +455,3973927,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-89e3ea93-acec-4320-9c1b-eafc7c47155f1750747464127-2025_06_24-12.16.44.223/source.csv is too short (4 turns)\r\n",,terminal_output +456,3974823,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/e8b08c312d88206805b92191af1ee2a660f8f0e59d3990233d6a3f81cdab43f4/crowd-code-268ba756-633d-460c-9a43-1e38b034568e1754428388262-2025_08_05-23.13.35.680/source.csv is too short (4 turns)\r\n",,terminal_output +457,3974917,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f514f25a-b687-4125-bb3e-747ec98309d21750930471988-2025_06_26-11.34.36.685/source.csv is too short (4 turns)\r\n",,terminal_output +458,3976733,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-2cce3a90-32a5-4d8b-8cb0-10445a2ee7a71754054463184-2025_08_01-15.21.32.127/source.csv is too short (5 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-4719c5f9-1b15-4792-8afd-690761108bda1751617825355-2025_07_04-10.31.22.581/source.csv is too short (9 turns)\r\n",,terminal_output +459,3977545,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9995c396-b28a-4817-9971-5d6e293ae32a1753180233918-2025_07_22-12.30.43.584/source.csv is too short (2 turns)\r\n",,terminal_output +460,3977957,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-03e2d1f0-34f2-48bc-9e6a-99708362c3301750977820647-2025_06_27-00.43.44.850/source.csv is too short (4 turns)\r\n",,terminal_output +461,3978395,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-d413e23e-d7da-4c64-9e15-0b0c0e6031031751383188198-2025_07_01-17.19.54.522/source.csv is too short (8 turns)\r\n",,terminal_output +462,3978483,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-9ff54a43-2a59-41a8-96bc-f7e46d5244651750887279734-2025_06_25-23.36.32.560/source.csv is too short (4 turns)\r\n",,terminal_output +463,3979929,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-f0382786-979c-4a6d-8e9b-f5977f18eb4f1753726151187-2025_08_02-06.58.58.573/source.csv is too short (4 turns)\r\n",,terminal_output +464,3980604,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-faba6583-b2c9-4b94-9ba6-9f240428520a1750722089894-2025_06_23-23.49.28.299/source.csv is too short (4 turns)\r\n",,terminal_output +465,3981312,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/69a563db57051868fc3ecdda3a43f162385be48f5447fe691a10177ee4dc3a0e/crowd-code-eb4593ba-8717-4311-aac2-0669058b8e141750152994551-2025_06_17-11.36.51.515/source.csv is too short (8 turns)\r\n",,terminal_output +466,3981835,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-48fbb415-6db9-4d35-b548-561e828791bf1751383187013-2025_07_01-17.19.57.60/source.csv is too short (8 turns)\r\n",,terminal_output +467,3981995,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-a775c5ce-801a-4b55-897a-6c0b6f3448081754127102402-2025_08_02-11.31.50.147/source.csv is too short (6 turns)\r\n[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-311b67df-2f46-4805-a16c-07606ec1fc001751032376516-2025_06_27-15.53.06.902/source.csv is too short (4 turns)\r\n",,terminal_output +468,3985631,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/4de8d861ed2563988d5f1871647ebc5fe70861b32d24a4b32f9363518653a328/crowd-code-48fbb415-6db9-4d35-b548-561e828791bf1751383187013-2025_07_01-17.19.57.60/source.csv is too short (8 turns)\r\n",,terminal_output +469,3986059,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/05d9d5da933137c5402a176a469b618685c7e9142aa8972616ca5cdf0f6e53d1/crowd-code-aa8be5f9-c447-4faf-b9c6-7142909b3c591750719092446-2025_06_24-00.51.37.15/source.csv is too short (9 turns)\r\n",,terminal_output +470,3986657,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1de052c516cab686515c107385aaf7c3a7e3e5c23c9bc3c0be0cff3df28cd64d/crowd-code-280dfe9f-0f96-4f1d-a304-44e1237157a81754289117685-2025_08_04-08.32.51.461/source.csv is too short (9 turns)\r\n",,terminal_output +471,3986793,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-623c548f-e16f-46a4-9ee1-6577a82e63e51754054052755-2025_08_01-15.14.20.520/source.csv is too short (8 turns)\r\n",,terminal_output +472,3987081,"TERMINAL",0,0,"[warning] Session /fast/project/HFMI_SynergyUnit/tab_model/data/hf_part_csv/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-afb08496-b4ce-4efc-aec5-cc21ff6731861752228993278-2025_07_11-12.16.54.20/source.csv is too short (6 turns)\r\n",,terminal_output +473,3988346,"TERMINAL",0,0,"[debug] All chunks turn stats: count=349, median_turns=127.0, avg_turns=331.6, min_turns=2, max_turns=3553\r\n[debug] Kept chunks turn stats: count=315, median_turns=173.0, avg_turns=366.8, min_turns=10, max_turns=3553\r\n[debug] All chunks char stats: count=349, median_chars=119609.0, avg_chars=301732.0, min_chars=104, max_chars=2827282\r\n[debug] Kept chunks char stats: count=315, median_chars=151798.0, avg_chars=332972.1, min_chars=382, max_chars=2827282\r\n\r\n[summary]\r\n Total sessions processed: 349\r\n Sessions kept: 315\r\n Skipped (too few turns): 34\r\n Train conversations: 285\r\n Val conversations: 30\r\n Output: /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full//{train,val}.jsonl\r\n Metadata: /fast/project/HFMI_SynergyUnit/tab_model/data/nemo_hf_part_jsonl_full/metadata.json\r\n",,terminal_output +474,3988865,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/crowd-pilot",,terminal_output diff --git a/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-f508ed97-76c1-4935-95ed-d4393099e6361753128212083-2025_07_21-22.03.39.166/source.csv b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-f508ed97-76c1-4935-95ed-d4393099e6361753128212083-2025_07_21-22.03.39.166/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..e8a8913f517040bd25733f03f2faa8af30f69bb5 --- /dev/null +++ b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-f508ed97-76c1-4935-95ed-d4393099e6361753128212083-2025_07_21-22.03.39.166/source.csv @@ -0,0 +1,282 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,3,"experiments/sample.sh",0,0,"source .venv/bin/activate\n\ndata_dir=""$PWD/data_arrayrecord/dummy""\nckpt_dir=""$PWD/checkpoints/dynamics_openai_grain_tok_lam_restore""\n\nexport XLA_FLAGS=--xla_gpu_autotune_level=0\nsrun python sample.py \\n --batch_size 1 \\n --seq_len 16 \\n --start_frame 15 \\n --checkpoint $ckpt_dir \\n --data_dir $data_dir",shellscript,tab +2,87,"anysphere.remote-ssh.Remote - SSH",0,0,"2025-07-15 16:25:19.283 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #1)\n2025-07-15 16:25:19.293 [info] SSH askpass server listening on /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-ZyKUP5/socket.sock\n2025-07-15 16:25:19.294 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-15 16:25:19.296 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-ZyKUP5/socket.sock\n2025-07-15 16:25:19.298 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_70811.sh"" | ssh -v -T -D 58069 login.haicore.berlin bash --login -c bash\n2025-07-15 16:25:19.298 [info] Started installation script. Waiting for it to finish...\n2025-07-15 16:25:19.298 [info] Waiting for server to install via process(51201)...\n2025-07-15 16:25:19.303 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-15 16:25:19.303 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 16:25:19.304 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 16:25:19.304 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 16:25:19.304 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 16:25:19.363 [info] (ssh_tunnel) stderr: debug1: Connection established.\n\n2025-07-15 16:25:19.363 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519 type 3\n\n2025-07-15 16:25:19.363 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519-cert type -1\ndebug1: Local version string SSH-2.0-OpenSSH_9.9\n\n2025-07-15 16:25:19.442 [info] (ssh_tunnel) stderr: debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7\ndebug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000\ndebug1: Authenticating to login.haicore.berlin:22 as 'franz.srambical'\n\n2025-07-15 16:25:19.444 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\n\n2025-07-15 16:25:19.444 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT sent\n\n2025-07-15 16:25:19.467 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT received\ndebug1: kex: algorithm: ecdh-sha2-nistp256\ndebug1: kex: host key algorithm: ssh-ed25519\n\n2025-07-15 16:25:19.467 [info] (ssh_tunnel) stderr: debug1: kex: server->client cipher: aes128-gcm@openssh.com MAC: compression: none\ndebug1: kex: client->server cipher: aes128-gcm@openssh.com MAC: compression: none\n\n2025-07-15 16:25:19.468 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_KEX_ECDH_REPLY\n\n2025-07-15 16:25:19.498 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEX_ECDH_REPLY received\n\n2025-07-15 16:25:19.498 [info] (ssh_tunnel) stderr: debug1: Server host key: ssh-ed25519 SHA256:3/BGZ1UNXR9SufKdsZVtx4Yd+kZTnZzSvRH0l6rtbvo\n\n2025-07-15 16:25:19.499 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\n\n2025-07-15 16:25:19.499 [info] (ssh_tunnel) stderr: debug1: Host 'login.haicore.berlin' is known and matches the ED25519 host key.\ndebug1: Found key in /Users/franzsrambical/.ssh/known_hosts:17\n\n2025-07-15 16:25:19.504 [info] (ssh_tunnel) stderr: debug1: ssh_packet_send2_wrapped: resetting send seqnr 3\ndebug1: rekey out after 4294967296 blocks\n\n2025-07-15 16:25:19.504 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_NEWKEYS sent\ndebug1: expecting SSH2_MSG_NEWKEYS\ndebug1: ssh_packet_read_poll2: resetting read seqnr 3\ndebug1: SSH2_MSG_NEWKEYS received\ndebug1: rekey in after 4294967296 blocks\n\n2025-07-15 16:25:19.504 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_EXT_INFO received\ndebug1: kex_ext_info_client_parse: server-sig-algs=\n\n2025-07-15 16:25:19.596 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_SERVICE_ACCEPT received\n\n2025-07-15 16:25:19.634 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: publickey\n\n2025-07-15 16:25:19.638 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: bound agent to hostkey\n\n2025-07-15 16:25:19.638 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: ssh_fetch_identitylist: agent contains no identities\ndebug1: Will attempt key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\ndebug1: Offering public key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-15 16:25:19.703 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: keyboard-interactive\n\n2025-07-15 16:25:19.770 [info] (ssh_tunnel) stderr: debug1: read_passphrase: requested to askpass\n\n2025-07-15 16:25:19.850 [info] Askpass server received request: POST /\n2025-07-15 16:25:19.850 [info] Askpass server received request body: {""request"":""(franz.srambical@login.haicore.berlin) Password: ""}\n2025-07-15 16:25:19.850 [info] Received SSH askpass request: (franz.srambical@login.haicore.berlin) Password: \n2025-07-15 16:25:27.995 [error] Password authentication cancelled\n2025-07-15 16:25:27.998 [info] (ssh_tunnel) stderr: Server returned status code: 500\n\n2025-07-15 16:25:30.345 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #1)\n2025-07-15 16:25:30.355 [info] SSH askpass server listening on /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 16:25:30.356 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-15 16:25:30.358 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 16:25:30.360 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_36628.sh"" | ssh -v -T -D 58080 login.haicore.berlin bash --login -c bash\n2025-07-15 16:25:30.360 [info] Started installation script. Waiting for it to finish...\n2025-07-15 16:25:30.360 [info] Waiting for server to install via process(51223)...\n2025-07-15 16:25:30.366 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-15 16:25:30.366 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 16:25:30.366 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 16:25:30.366 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 16:25:30.366 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 16:25:30.390 [info] (ssh_tunnel) stderr: debug1: Connection established.\n\n2025-07-15 16:25:30.390 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519 type 3\n\n2025-07-15 16:25:30.390 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519-cert type -1\ndebug1: Local version string SSH-2.0-OpenSSH_9.9\n\n2025-07-15 16:25:30.533 [info] (ssh_tunnel) stderr: debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7\ndebug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000\ndebug1: Authenticating to login.haicore.berlin:22 as 'franz.srambical'\n\n2025-07-15 16:25:30.534 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-15 16:25:30.534 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: SSH2_MSG_KEXINIT sent\n\n2025-07-15 16:25:30.558 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT received\ndebug1: kex: algorithm: ecdh-sha2-nistp256\ndebug1: kex: host key algorithm: ssh-ed25519\n\n2025-07-15 16:25:30.558 [info] (ssh_tunnel) stderr: debug1: kex: server->client cipher: aes128-gcm@openssh.com MAC: compression: none\ndebug1: kex: client->server cipher: aes128-gcm@openssh.com MAC: compression: none\n\n2025-07-15 16:25:30.559 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_KEX_ECDH_REPLY\n\n2025-07-15 16:25:30.608 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEX_ECDH_REPLY received\ndebug1: Server host key: ssh-ed25519 SHA256:3/BGZ1UNXR9SufKdsZVtx4Yd+kZTnZzSvRH0l6rtbvo\n\n2025-07-15 16:25:30.609 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-15 16:25:30.610 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: Host 'login.haicore.berlin' is known and matches the ED25519 host key.\ndebug1: Found key in /Users/franzsrambical/.ssh/known_hosts:17\n\n2025-07-15 16:25:30.615 [info] (ssh_tunnel) stderr: debug1: ssh_packet_send2_wrapped: resetting send seqnr 3\ndebug1: rekey out after 4294967296 blocks\ndebug1: SSH2_MSG_NEWKEYS sent\ndebug1: expecting SSH2_MSG_NEWKEYS\n\n2025-07-15 16:25:30.615 [info] (ssh_tunnel) stderr: debug1: ssh_packet_read_poll2: resetting read seqnr 3\ndebug1: SSH2_MSG_NEWKEYS received\ndebug1: rekey in after 4294967296 blocks\n\n2025-07-15 16:25:30.615 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_EXT_INFO received\ndebug1: kex_ext_info_client_parse: server-sig-algs=\n\n2025-07-15 16:25:30.848 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_SERVICE_ACCEPT received\n\n2025-07-15 16:25:30.913 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: publickey\n\n2025-07-15 16:25:30.920 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: bound agent to hostkey\ndebug1: get_agent_identities: ssh_fetch_identitylist: agent contains no identities\n\n2025-07-15 16:25:30.920 [info] (ssh_tunnel) stderr: debug1: Will attempt key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\ndebug1: Offering public key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-15 16:25:30.996 [info] (ssh_tunnel) stderr: debug1: Server accepts key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-15 16:25:31.069 [info] (ssh_tunnel) stderr: Authenticated to login.haicore.berlin ([141.80.150.4]:22) using ""publickey"".\ndebug1: Local connections to LOCALHOST:58080 forwarded to remote address socks:0\n\n2025-07-15 16:25:31.069 [info] (ssh_tunnel) stderr: debug1: Local forwarding listening on ::1 port 58080.\ndebug1: channel 0: new port-listener [port listener] (inactive timeout: 0)\ndebug1: Local forwarding listening on 127.0.0.1 port 58080.\ndebug1: channel 1: new port-listener [port listener] (inactive timeout: 0)\n\n2025-07-15 16:25:31.069 [info] (ssh_tunnel) stderr: debug1: channel 2: new session [client-session] (inactive timeout: 0)\ndebug1: Requesting no-more-sessions@openssh.com\n\n2025-07-15 16:25:31.070 [info] (ssh_tunnel) stderr: debug1: Entering interactive session.\ndebug1: pledge: filesystem\n\n2025-07-15 16:25:31.251 [info] (ssh_tunnel) stderr: debug1: client_input_global_request: rtype hostkeys-00@openssh.com want_reply 0\n\n2025-07-15 16:25:31.252 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts for login.haicore.berlin / (none)\n\n2025-07-15 16:25:31.261 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts2 for login.haicore.berlin / (none)\ndebug1: client_input_hostkeys: hostkeys file /Users/franzsrambical/.ssh/known_hosts2 does not exist\ndebug1: client_input_hostkeys: no new or deprecated keys from server\n\n2025-07-15 16:25:31.261 [info] (ssh_tunnel) stderr: debug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\n\n2025-07-15 16:25:31.273 [info] (ssh_tunnel) stderr: debug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\ndebug1: Sending environment.\ndebug1: Sending command: bash --login -c bash\n\n2025-07-15 16:25:31.274 [info] (ssh_tunnel) stderr: debug1: pledge: network\n\n2025-07-15 16:25:31.673 [info] (ssh_tunnel) stdout: Using TMP_DIR: /run/user/961800067\n\n2025-07-15 16:25:31.709 [info] (ssh_tunnel) stdout: Locking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-15 16:25:31.712 [info] (ssh_tunnel) stdout: Server script already installed in /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server\n\n2025-07-15 16:25:31.713 [info] (ssh_tunnel) stdout: Checking node executable\n\n2025-07-15 16:25:31.718 [info] (ssh_tunnel) stdout: v20.18.2\n\n2025-07-15 16:25:31.721 [info] (ssh_tunnel) stdout: Checking for running multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-15 16:25:31.740 [info] (ssh_tunnel) stdout: Running multiplex server: \n\n2025-07-15 16:25:31.741 [info] (ssh_tunnel) stdout: Creating multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-15 16:25:31.744 [info] (ssh_tunnel) stdout: Creating directory for multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server\n\n2025-07-15 16:25:31.746 [info] (ssh_tunnel) stdout: Writing multiplex server script to /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-15 16:25:31.758 [info] (ssh_tunnel) stdout: Starting multiplex server: /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js 9c7afd2b-b7f2-421b-8a62-3e0f8de9f546\n\n2025-07-15 16:25:31.758 [info] (ssh_tunnel) stdout: Multiplex server started with PID 667915 and wrote pid to file /run/user/961800067/cursor-remote-multiplex.pid.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nReading multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nMultiplex server token file found\nReading multiplex server log file /run/user/961800067/cursor-remote-multiplex.log.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-15 16:25:32.262 [info] (ssh_tunnel) stdout: Checking for code servers\n\n2025-07-15 16:25:32.278 [info] (ssh_tunnel) stdout: Code server script is not running\nCreating code server token file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-15 16:25:32.279 [info] (ssh_tunnel) stdout: Starting code server script /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server --start-server --host=127.0.0.1 --port 0 --connection-token-file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0 --telemetry-level off --enable-remote-auto-shutdown --accept-server-license-terms &> /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0 &\n\n2025-07-15 16:25:32.280 [info] (ssh_tunnel) stdout: Code server started with PID 667941 and wrote pid to file /run/user/961800067/cursor-remote-code.pid.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-15 16:25:32.281 [info] (ssh_tunnel) stdout: Code server log file is /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-15 16:25:32.797 [info] (ssh_tunnel) stdout: 8aa66e9a064ed2b82f742096: start\nexitCode==0==\nnodeExecutable==/home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node==\nerrorMessage====\nisFatalError==false==\nmultiplexListeningOn==37279==\nmultiplexConnectionToken==9c7afd2b-b7f2-421b-8a62-3e0f8de9f546==\ncodeListeningOn==33373==\ncodeConnectionToken==2bbfa45f-fd71-45de-9de1-d8d14af645d1==\ndetectedPlatform==linux==\narch==x64==\nSSH_AUTH_SOCK====\n8aa66e9a064ed2b82f742096: end\n\n2025-07-15 16:25:32.799 [info] Server install command exit code: 0\n2025-07-15 16:25:32.800 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_36628.sh\n2025-07-15 16:25:32.802 [info] [forwarding][code] creating new forwarding server\n2025-07-15 16:25:32.803 [info] [forwarding][code] server listening on 58082\n2025-07-15 16:25:32.803 [info] [forwarding][code] Set up server\n2025-07-15 16:25:32.804 [info] [remote-ssh] codeListeningOn (remote=33373; local=58082) codeConnectionToken: 2bbfa45f-fd71-45de-9de1-d8d14af645d1\n2025-07-15 16:25:32.804 [info] [forwarding][multiplex] creating new forwarding server\n2025-07-15 16:25:32.804 [info] [forwarding][multiplex] server listening on 58083\n2025-07-15 16:25:32.804 [info] [forwarding][multiplex] Set up server\n2025-07-15 16:25:32.806 [info] [remote-ssh] multiplexListeningOn (remote=37279; local=58083) multiplexConnectionToken: 9c7afd2b-b7f2-421b-8a62-3e0f8de9f546\n2025-07-15 16:25:32.806 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:25:32.810 [info] (ssh_tunnel) stdout: Unlocking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-15 16:25:32.812 [info] [command][40336851-30e3-431c-bed0-b4c403b53e8b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""40336851-30e3-431c-bed0-b4c403b53e8b""}\n2025-07-15 16:25:32.813 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][bacfd19a-926e-4256-97de-cabb6a6c525b] received connection request\n2025-07-15 16:25:32.813 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:25:32.824 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:33373][d51b8696-8d77-437b-be38-92d9b9b96091] received connection request\n2025-07-15 16:25:32.824 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:25:32.839 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][bacfd19a-926e-4256-97de-cabb6a6c525b] socks forwarding established\n2025-07-15 16:25:32.847 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:58080 -> 127.0.0.1:33373][d51b8696-8d77-437b-be38-92d9b9b96091] socks forwarding established\n2025-07-15 16:25:32.867 [info] [command][40336851-30e3-431c-bed0-b4c403b53e8b] Process exited with code 0\n2025-07-15 16:25:32.867 [info] [command][40336851-30e3-431c-bed0-b4c403b53e8b] Socket close event received\n2025-07-15 16:25:32.868 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][bacfd19a-926e-4256-97de-cabb6a6c525b] socks connection closed\n2025-07-15 16:25:32.890 [info] Successfully connected to Cursor server at http://127.0.0.1:58082/version\n2025-07-15 16:25:32.890 [info] [execServer][spawn] command: echo, args: 1, options: {}\n2025-07-15 16:25:32.890 [info] [command][b0107133-66ce-4053-997a-af0eb21ff1f8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""b0107133-66ce-4053-997a-af0eb21ff1f8""}\n2025-07-15 16:25:32.890 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][8f23ccfe-bd3e-4cf1-9270-29ffbe8cc97b] received connection request\n2025-07-15 16:25:32.891 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 5: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:25:32.895 [info] (ssh_tunnel) stderr: debug1: channel 3: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58085 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:25:32.914 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8f23ccfe-bd3e-4cf1-9270-29ffbe8cc97b] socks forwarding established\n2025-07-15 16:25:32.945 [info] [command][b0107133-66ce-4053-997a-af0eb21ff1f8] Process exited with code 0\n2025-07-15 16:25:32.945 [info] Successfully ran 'echo 1' against the multiplex server\n2025-07-15 16:25:32.945 [info] [remote-ssh] Resolved exec server. Socks port: 58080\n2025-07-15 16:25:32.945 [info] [remote-ssh] Resolved authority: {""host"":""127.0.0.1"",""port"":58082,""connectionToken"":""2bbfa45f-fd71-45de-9de1-d8d14af645d1"",""extensionHostEnv"":{}}. Socks port: 58080\n2025-07-15 16:25:32.945 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8f23ccfe-bd3e-4cf1-9270-29ffbe8cc97b] socks connection closed\n2025-07-15 16:25:32.945 [info] [command][b0107133-66ce-4053-997a-af0eb21ff1f8] Socket close event received\n2025-07-15 16:25:32.969 [info] (ssh_tunnel) stderr: debug1: channel 5: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58089 to 127.0.0.1 port 58080, nchannels 5\n\n2025-07-15 16:25:32.970 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:33373][aa80c3b4-4bb6-4c78-bbd1-5b24883d7017] received connection request\n2025-07-15 16:25:32.970 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:25:32.993 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:58080 -> 127.0.0.1:33373][aa80c3b4-4bb6-4c78-bbd1-5b24883d7017] socks forwarding established\n2025-07-15 16:25:33.030 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:33373][588813ae-6e64-458d-b1a5-35c8f9f3d47b] received connection request\n2025-07-15 16:25:33.030 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 5: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:25:33.055 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:58080 -> 127.0.0.1:33373][588813ae-6e64-458d-b1a5-35c8f9f3d47b] socks forwarding established\n2025-07-15 16:25:33.150 [info] Saved platform linux for remote host login.haicore.berlin\n2025-07-15 16:25:35.919 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 33373, connect from 127.0.0.1 port 58087 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:25:35.919 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:58080 -> 127.0.0.1:33373][d51b8696-8d77-437b-be38-92d9b9b96091] socks connection closed\n2025-07-15 16:26:32.872 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:26:32.874 [info] [command][359b5356-be39-4e9d-b5a9-30393cb990c0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""359b5356-be39-4e9d-b5a9-30393cb990c0""}\n2025-07-15 16:26:32.876 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ddb2bb0f-b04d-4873-9196-b3911b625268] received connection request\n2025-07-15 16:26:32.876 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:26:32.929 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ddb2bb0f-b04d-4873-9196-b3911b625268] socks forwarding established\n2025-07-15 16:26:32.971 [info] [command][359b5356-be39-4e9d-b5a9-30393cb990c0] Process exited with code 0\n2025-07-15 16:26:32.971 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ddb2bb0f-b04d-4873-9196-b3911b625268] socks connection closed\n2025-07-15 16:26:32.971 [info] [command][359b5356-be39-4e9d-b5a9-30393cb990c0] Socket close event received\n2025-07-15 16:26:33.078 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58183 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:27:32.974 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:27:32.977 [info] [command][0d3aefd2-8a22-4ef9-8d61-3f86c810952f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""0d3aefd2-8a22-4ef9-8d61-3f86c810952f""}\n2025-07-15 16:27:32.978 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][351b7906-af4e-4bd9-a9fe-9ec6f5f1d844] received connection request\n2025-07-15 16:27:32.978 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:27:33.046 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][351b7906-af4e-4bd9-a9fe-9ec6f5f1d844] socks forwarding established\n2025-07-15 16:27:33.205 [info] [command][0d3aefd2-8a22-4ef9-8d61-3f86c810952f] Process exited with code 0\n2025-07-15 16:27:33.206 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][351b7906-af4e-4bd9-a9fe-9ec6f5f1d844] socks connection closed\n2025-07-15 16:27:33.207 [info] [command][0d3aefd2-8a22-4ef9-8d61-3f86c810952f] Socket close event received\n2025-07-15 16:27:33.241 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58208 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:28:33.209 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:28:33.211 [info] [command][02fc6e92-c38b-46b0-972d-59dc9d75ee6e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""02fc6e92-c38b-46b0-972d-59dc9d75ee6e""}\n2025-07-15 16:28:33.212 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][cc1b0fd7-cf74-4ee7-b59e-ba04310b3360] received connection request\n2025-07-15 16:28:33.213 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:28:33.250 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][cc1b0fd7-cf74-4ee7-b59e-ba04310b3360] socks forwarding established\n2025-07-15 16:28:33.276 [info] [command][02fc6e92-c38b-46b0-972d-59dc9d75ee6e] Process exited with code 0\n2025-07-15 16:28:33.276 [info] [command][02fc6e92-c38b-46b0-972d-59dc9d75ee6e] Socket close event received\n2025-07-15 16:28:33.277 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][cc1b0fd7-cf74-4ee7-b59e-ba04310b3360] socks connection closed\n2025-07-15 16:28:33.302 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58229 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:29:33.281 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:29:33.282 [info] [command][7d9177d3-56dd-4c81-a59e-a3afc936d531] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""7d9177d3-56dd-4c81-a59e-a3afc936d531""}\n2025-07-15 16:29:33.282 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][22b33c17-df6c-42de-85b1-6ccd7035ffa1] received connection request\n2025-07-15 16:29:33.283 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:29:33.378 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][22b33c17-df6c-42de-85b1-6ccd7035ffa1] socks forwarding established\n2025-07-15 16:29:33.407 [info] [command][7d9177d3-56dd-4c81-a59e-a3afc936d531] Process exited with code 0\n2025-07-15 16:29:33.408 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][22b33c17-df6c-42de-85b1-6ccd7035ffa1] socks connection closed\n2025-07-15 16:29:33.408 [info] [command][7d9177d3-56dd-4c81-a59e-a3afc936d531] Socket close event received\n2025-07-15 16:29:33.554 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58269 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:30:33.413 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:30:33.415 [info] [command][55a29428-325f-4a0a-a032-99a1cfd7e844] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""55a29428-325f-4a0a-a032-99a1cfd7e844""}\n2025-07-15 16:30:33.416 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][9e7f679c-54cc-4eee-b399-5873688e0b50] received connection request\n2025-07-15 16:30:33.416 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:30:33.475 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][9e7f679c-54cc-4eee-b399-5873688e0b50] socks forwarding established\n2025-07-15 16:30:33.645 [info] [command][55a29428-325f-4a0a-a032-99a1cfd7e844] Process exited with code 0\n2025-07-15 16:30:33.645 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][9e7f679c-54cc-4eee-b399-5873688e0b50] socks connection closed\n2025-07-15 16:30:33.646 [info] [command][55a29428-325f-4a0a-a032-99a1cfd7e844] Socket close event received\n2025-07-15 16:30:33.829 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58302 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:31:33.648 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:31:33.651 [info] [command][78b779d3-ecef-497d-9af8-4c02c1ce8203] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""78b779d3-ecef-497d-9af8-4c02c1ce8203""}\n2025-07-15 16:31:33.652 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][a3160c3d-6f20-4f92-bda5-c03bf80fc071] received connection request\n2025-07-15 16:31:33.653 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:31:33.702 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][a3160c3d-6f20-4f92-bda5-c03bf80fc071] socks forwarding established\n2025-07-15 16:31:33.863 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][a3160c3d-6f20-4f92-bda5-c03bf80fc071] socks connection closed\n2025-07-15 16:31:33.863 [info] [command][78b779d3-ecef-497d-9af8-4c02c1ce8203] Process exited with code 0\n2025-07-15 16:31:33.864 [info] [command][78b779d3-ecef-497d-9af8-4c02c1ce8203] Socket close event received\n2025-07-15 16:31:33.889 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58365 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:32:33.869 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:32:33.871 [info] [command][fb6d0f40-39d6-45c7-8ac1-42b6e1fb12a1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""fb6d0f40-39d6-45c7-8ac1-42b6e1fb12a1""}\n2025-07-15 16:32:33.871 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][f27a4281-daaf-4b3f-90e9-c475c69451ce] received connection request\n2025-07-15 16:32:33.872 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 16:32:33.872 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:32:33.899 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f27a4281-daaf-4b3f-90e9-c475c69451ce] socks forwarding established\n2025-07-15 16:32:33.933 [info] [command][fb6d0f40-39d6-45c7-8ac1-42b6e1fb12a1] Process exited with code 0\n2025-07-15 16:32:33.933 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f27a4281-daaf-4b3f-90e9-c475c69451ce] socks connection closed\n2025-07-15 16:32:33.934 [info] [command][fb6d0f40-39d6-45c7-8ac1-42b6e1fb12a1] Socket close event received\n2025-07-15 16:32:33.967 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58394 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:33:33.934 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:33:33.936 [info] [command][06c52913-a7fb-4be3-b77c-996e4f179635] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""06c52913-a7fb-4be3-b77c-996e4f179635""}\n2025-07-15 16:33:33.937 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][d37bea94-a829-48df-999e-ba29053c4383] received connection request\n2025-07-15 16:33:33.938 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 16:33:33.939 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:33:33.985 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d37bea94-a829-48df-999e-ba29053c4383] socks forwarding established\n2025-07-15 16:33:34.014 [info] [command][06c52913-a7fb-4be3-b77c-996e4f179635] Process exited with code 0\n2025-07-15 16:33:34.014 [info] [command][06c52913-a7fb-4be3-b77c-996e4f179635] Socket close event received\n2025-07-15 16:33:34.018 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d37bea94-a829-48df-999e-ba29053c4383] socks connection closed\n2025-07-15 16:33:34.143 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58426 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:34:34.020 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:34:34.022 [info] [command][a3eedbe3-8f09-40de-af3b-b0efa33fe2b4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""a3eedbe3-8f09-40de-af3b-b0efa33fe2b4""}\n2025-07-15 16:34:34.023 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][a6394058-4227-451c-a239-7e0ac6388fbd] received connection request\n2025-07-15 16:34:34.023 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:34:34.049 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][a6394058-4227-451c-a239-7e0ac6388fbd] socks forwarding established\n2025-07-15 16:34:34.081 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][a6394058-4227-451c-a239-7e0ac6388fbd] socks connection closed\n2025-07-15 16:34:34.082 [info] [command][a3eedbe3-8f09-40de-af3b-b0efa33fe2b4] Process exited with code 0\n2025-07-15 16:34:34.082 [info] [command][a3eedbe3-8f09-40de-af3b-b0efa33fe2b4] Socket close event received\n2025-07-15 16:34:34.105 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58467 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:35:34.085 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:35:34.087 [info] [command][eafd68e8-52eb-447e-af13-47a5a474a0f9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""eafd68e8-52eb-447e-af13-47a5a474a0f9""}\n2025-07-15 16:35:34.088 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][f107a4d4-892c-4009-9f41-c1cd7b71363d] received connection request\n2025-07-15 16:35:34.089 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:35:34.212 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f107a4d4-892c-4009-9f41-c1cd7b71363d] socks forwarding established\n2025-07-15 16:35:34.245 [info] [command][eafd68e8-52eb-447e-af13-47a5a474a0f9] Process exited with code 0\n2025-07-15 16:35:34.245 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f107a4d4-892c-4009-9f41-c1cd7b71363d] socks connection closed\n2025-07-15 16:35:34.245 [info] [command][eafd68e8-52eb-447e-af13-47a5a474a0f9] Socket close event received\n2025-07-15 16:35:34.278 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58516 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:36:34.247 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:36:34.249 [info] [command][0ea44767-9381-4f45-8ce3-f0746dc4bc08] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""0ea44767-9381-4f45-8ce3-f0746dc4bc08""}\n2025-07-15 16:36:34.250 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][89902fa3-c796-4aae-824e-71d858079ab8] received connection request\n2025-07-15 16:36:34.251 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:36:34.363 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][89902fa3-c796-4aae-824e-71d858079ab8] socks forwarding established\n2025-07-15 16:36:34.392 [info] [command][0ea44767-9381-4f45-8ce3-f0746dc4bc08] Process exited with code 0\n2025-07-15 16:36:34.393 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][89902fa3-c796-4aae-824e-71d858079ab8] socks connection closed\n2025-07-15 16:36:34.393 [info] [command][0ea44767-9381-4f45-8ce3-f0746dc4bc08] Socket close event received\n2025-07-15 16:36:34.417 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58564 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:37:34.394 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:37:34.395 [info] [command][9bc13d98-a758-458c-ad8c-87225f11b462] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""9bc13d98-a758-458c-ad8c-87225f11b462""}\n2025-07-15 16:37:34.396 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][1c310c9c-d488-4ed9-9026-c73a8b9188f0] received connection request\n2025-07-15 16:37:34.396 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 16:37:34.396 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:37:34.424 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1c310c9c-d488-4ed9-9026-c73a8b9188f0] socks forwarding established\n2025-07-15 16:37:34.449 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1c310c9c-d488-4ed9-9026-c73a8b9188f0] socks connection closed\n2025-07-15 16:37:34.450 [info] [command][9bc13d98-a758-458c-ad8c-87225f11b462] Process exited with code 0\n2025-07-15 16:37:34.450 [info] [command][9bc13d98-a758-458c-ad8c-87225f11b462] Socket close event received\n2025-07-15 16:37:34.472 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58603 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:38:34.451 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:38:34.453 [info] [command][2e692ed9-f404-4216-b262-67081747a07d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""2e692ed9-f404-4216-b262-67081747a07d""}\n2025-07-15 16:38:34.453 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][821a8dad-1278-45c6-a68d-03d86d0101f1] received connection request\n2025-07-15 16:38:34.453 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:38:34.482 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][821a8dad-1278-45c6-a68d-03d86d0101f1] socks forwarding established\n2025-07-15 16:38:34.509 [info] [command][2e692ed9-f404-4216-b262-67081747a07d] Process exited with code 0\n2025-07-15 16:38:34.509 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][821a8dad-1278-45c6-a68d-03d86d0101f1] socks connection closed\n2025-07-15 16:38:34.509 [info] [command][2e692ed9-f404-4216-b262-67081747a07d] Socket close event received\n2025-07-15 16:38:34.533 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58628 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:39:34.514 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:39:34.516 [info] [command][b1abfa48-6b1d-4e36-bed8-bed155b79624] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""b1abfa48-6b1d-4e36-bed8-bed155b79624""}\n2025-07-15 16:39:34.517 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][f00660dd-c993-4aae-8f07-9d367ea6a011] received connection request\n2025-07-15 16:39:34.517 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 16:39:34.517 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:39:34.624 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f00660dd-c993-4aae-8f07-9d367ea6a011] socks forwarding established\n2025-07-15 16:39:34.685 [info] [command][b1abfa48-6b1d-4e36-bed8-bed155b79624] Process exited with code 0\n2025-07-15 16:39:34.685 [info] [command][b1abfa48-6b1d-4e36-bed8-bed155b79624] Socket close event received\n2025-07-15 16:39:34.685 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f00660dd-c993-4aae-8f07-9d367ea6a011] socks connection closed\n2025-07-15 16:39:34.842 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58663 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:40:34.687 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:40:34.689 [info] [command][56c318f8-3a76-4ed2-a361-50aa865e0181] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""56c318f8-3a76-4ed2-a361-50aa865e0181""}\n2025-07-15 16:40:34.689 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][5f83ff0e-aa4f-4506-9510-30163fe8de82] received connection request\n2025-07-15 16:40:34.689 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 16:40:34.689 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:40:34.723 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][5f83ff0e-aa4f-4506-9510-30163fe8de82] socks forwarding established\n2025-07-15 16:40:34.749 [info] [command][56c318f8-3a76-4ed2-a361-50aa865e0181] Process exited with code 0\n2025-07-15 16:40:34.749 [info] [command][56c318f8-3a76-4ed2-a361-50aa865e0181] Socket close event received\n2025-07-15 16:40:34.749 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][5f83ff0e-aa4f-4506-9510-30163fe8de82] socks connection closed\n2025-07-15 16:40:34.776 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58707 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:41:34.754 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:41:34.756 [info] [command][443863c7-77c8-4ae1-a492-48df0f0aae07] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""443863c7-77c8-4ae1-a492-48df0f0aae07""}\n2025-07-15 16:41:34.757 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][79e93f4f-4ebc-4d6e-b6a1-2861f2e40e52] received connection request\n2025-07-15 16:41:34.757 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 16:41:34.757 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:41:34.783 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][79e93f4f-4ebc-4d6e-b6a1-2861f2e40e52] socks forwarding established\n2025-07-15 16:41:34.815 [info] [command][443863c7-77c8-4ae1-a492-48df0f0aae07] Process exited with code 0\n2025-07-15 16:41:34.815 [info] [command][443863c7-77c8-4ae1-a492-48df0f0aae07] Socket close event received\n2025-07-15 16:41:34.842 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][79e93f4f-4ebc-4d6e-b6a1-2861f2e40e52] socks connection closed\n2025-07-15 16:41:34.844 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58753 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:42:34.821 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:42:34.823 [info] [command][62e14d4d-e659-4c52-804c-9c43a0d1a954] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""62e14d4d-e659-4c52-804c-9c43a0d1a954""}\n2025-07-15 16:42:34.823 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ad3a0982-cbf1-40af-8224-b82ec707e37e] received connection request\n2025-07-15 16:42:34.824 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:42:34.863 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ad3a0982-cbf1-40af-8224-b82ec707e37e] socks forwarding established\n2025-07-15 16:42:34.921 [info] [command][62e14d4d-e659-4c52-804c-9c43a0d1a954] Process exited with code 0\n2025-07-15 16:42:34.922 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ad3a0982-cbf1-40af-8224-b82ec707e37e] socks connection closed\n2025-07-15 16:42:34.922 [info] [command][62e14d4d-e659-4c52-804c-9c43a0d1a954] Socket close event received\n2025-07-15 16:42:35.079 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58789 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:43:34.925 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:43:34.927 [info] [command][5c855aa5-d80d-4148-9b0b-dbf5b443f5c9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""5c855aa5-d80d-4148-9b0b-dbf5b443f5c9""}\n2025-07-15 16:43:34.928 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][1686843a-fc59-42d1-bb24-b3754561ca91] received connection request\n2025-07-15 16:43:34.928 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:43:34.956 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1686843a-fc59-42d1-bb24-b3754561ca91] socks forwarding established\n2025-07-15 16:43:34.983 [info] [command][5c855aa5-d80d-4148-9b0b-dbf5b443f5c9] Process exited with code 0\n2025-07-15 16:43:34.984 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1686843a-fc59-42d1-bb24-b3754561ca91] socks connection closed\n2025-07-15 16:43:34.984 [info] [command][5c855aa5-d80d-4148-9b0b-dbf5b443f5c9] Socket close event received\n2025-07-15 16:43:35.007 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58814 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:44:34.988 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:44:34.990 [info] [command][2f5c557c-f60b-4726-8279-bac3965c1c61] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""2f5c557c-f60b-4726-8279-bac3965c1c61""}\n2025-07-15 16:44:34.991 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][747a934a-27b4-4019-b063-d51eb87df0e0] received connection request\n2025-07-15 16:44:34.992 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:44:35.026 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][747a934a-27b4-4019-b063-d51eb87df0e0] socks forwarding established\n2025-07-15 16:44:35.065 [info] [command][2f5c557c-f60b-4726-8279-bac3965c1c61] Process exited with code 0\n2025-07-15 16:44:35.065 [info] [command][2f5c557c-f60b-4726-8279-bac3965c1c61] Socket close event received\n2025-07-15 16:44:35.068 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][747a934a-27b4-4019-b063-d51eb87df0e0] socks connection closed\n2025-07-15 16:44:35.088 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58851 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:45:35.066 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:45:35.068 [info] [command][c251e208-288a-4e30-80ba-7002ac255568] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""c251e208-288a-4e30-80ba-7002ac255568""}\n2025-07-15 16:45:35.069 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][8690adc6-b4d7-49b5-9121-db8e66f29e64] received connection request\n2025-07-15 16:45:35.070 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:45:35.139 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8690adc6-b4d7-49b5-9121-db8e66f29e64] socks forwarding established\n2025-07-15 16:45:35.251 [info] [command][c251e208-288a-4e30-80ba-7002ac255568] Process exited with code 0\n2025-07-15 16:45:35.252 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8690adc6-b4d7-49b5-9121-db8e66f29e64] socks connection closed\n2025-07-15 16:45:35.252 [info] [command][c251e208-288a-4e30-80ba-7002ac255568] Socket close event received\n2025-07-15 16:45:35.276 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58902 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:46:35.256 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:46:35.257 [info] [command][df355300-9b38-4f4c-8432-25fa44d97d64] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""df355300-9b38-4f4c-8432-25fa44d97d64""}\n2025-07-15 16:46:35.258 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][4bd791b3-5ff9-4108-8bf6-de5e97c54976] received connection request\n2025-07-15 16:46:35.258 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:46:35.330 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][4bd791b3-5ff9-4108-8bf6-de5e97c54976] socks forwarding established\n2025-07-15 16:46:35.491 [info] [command][df355300-9b38-4f4c-8432-25fa44d97d64] Process exited with code 0\n2025-07-15 16:46:35.492 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][4bd791b3-5ff9-4108-8bf6-de5e97c54976] socks connection closed\n2025-07-15 16:46:35.492 [info] [command][df355300-9b38-4f4c-8432-25fa44d97d64] Socket close event received\n2025-07-15 16:46:35.520 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 58967 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:47:35.497 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:47:35.500 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][8d82e996-2bfd-452e-b60c-cc7295c3a993] received connection request\n2025-07-15 16:47:35.500 [info] [command][c79700e5-5f62-4687-91c4-6e1e098a2739] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""c79700e5-5f62-4687-91c4-6e1e098a2739""}\n2025-07-15 16:47:35.500 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:47:35.618 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8d82e996-2bfd-452e-b60c-cc7295c3a993] socks forwarding established\n2025-07-15 16:47:35.686 [info] [command][c79700e5-5f62-4687-91c4-6e1e098a2739] Process exited with code 0\n2025-07-15 16:47:35.686 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8d82e996-2bfd-452e-b60c-cc7295c3a993] socks connection closed\n2025-07-15 16:47:35.687 [info] [command][c79700e5-5f62-4687-91c4-6e1e098a2739] Socket close event received\n2025-07-15 16:47:35.724 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59005 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:48:35.688 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:48:35.690 [info] [command][17e7aff4-5da0-4c6c-bc95-fdad47041291] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""17e7aff4-5da0-4c6c-bc95-fdad47041291""}\n2025-07-15 16:48:35.690 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][25ad7b62-85f6-4cc6-b1aa-a493ebcad6ac] received connection request\n2025-07-15 16:48:35.691 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:48:35.731 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][25ad7b62-85f6-4cc6-b1aa-a493ebcad6ac] socks forwarding established\n2025-07-15 16:48:35.787 [info] [command][17e7aff4-5da0-4c6c-bc95-fdad47041291] Process exited with code 0\n2025-07-15 16:48:35.787 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][25ad7b62-85f6-4cc6-b1aa-a493ebcad6ac] socks connection closed\n2025-07-15 16:48:35.787 [info] [command][17e7aff4-5da0-4c6c-bc95-fdad47041291] Socket close event received\n2025-07-15 16:48:35.815 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59035 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:49:35.788 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:49:35.789 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][8c6d0ba2-ea23-4f3d-8479-2e69603eafe8] received connection request\n2025-07-15 16:49:35.789 [info] [command][ade612a4-4428-456c-b719-2d8853a1ab35] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ade612a4-4428-456c-b719-2d8853a1ab35""}\n2025-07-15 16:49:35.790 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:49:35.818 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8c6d0ba2-ea23-4f3d-8479-2e69603eafe8] socks forwarding established\n2025-07-15 16:49:35.868 [info] [command][ade612a4-4428-456c-b719-2d8853a1ab35] Process exited with code 0\n2025-07-15 16:49:35.868 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8c6d0ba2-ea23-4f3d-8479-2e69603eafe8] socks connection closed\n2025-07-15 16:49:35.868 [info] [command][ade612a4-4428-456c-b719-2d8853a1ab35] Socket close event received\n2025-07-15 16:49:35.892 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59091 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:50:35.870 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:50:35.872 [info] [command][2bd2cd02-55cc-4510-b6d4-d6509fdc227b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""2bd2cd02-55cc-4510-b6d4-d6509fdc227b""}\n2025-07-15 16:50:35.873 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][8e787f7f-72ad-4ff2-90fc-9a53edbc20f4] received connection request\n2025-07-15 16:50:35.874 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:50:35.899 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8e787f7f-72ad-4ff2-90fc-9a53edbc20f4] socks forwarding established\n2025-07-15 16:50:35.931 [info] [command][2bd2cd02-55cc-4510-b6d4-d6509fdc227b] Process exited with code 0\n2025-07-15 16:50:35.932 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8e787f7f-72ad-4ff2-90fc-9a53edbc20f4] socks connection closed\n2025-07-15 16:50:35.934 [info] [command][2bd2cd02-55cc-4510-b6d4-d6509fdc227b] Socket close event received\n2025-07-15 16:50:35.959 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59136 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:51:35.935 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:51:35.938 [info] [command][6cffe04a-839e-4aa1-957b-2a65ea3e0663] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""6cffe04a-839e-4aa1-957b-2a65ea3e0663""}\n2025-07-15 16:51:35.938 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][8ec6aa7b-d475-4b7b-bf04-d1b584035ab1] received connection request\n2025-07-15 16:51:35.938 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 16:51:35.938 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:51:35.964 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8ec6aa7b-d475-4b7b-bf04-d1b584035ab1] socks forwarding established\n2025-07-15 16:51:35.990 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8ec6aa7b-d475-4b7b-bf04-d1b584035ab1] socks connection closed\n2025-07-15 16:51:35.990 [info] [command][6cffe04a-839e-4aa1-957b-2a65ea3e0663] Process exited with code 0\n2025-07-15 16:51:35.990 [info] [command][6cffe04a-839e-4aa1-957b-2a65ea3e0663] Socket close event received\n2025-07-15 16:51:36.014 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59183 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:52:35.994 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:52:35.997 [info] [command][3250eb48-9cdd-4484-bdb8-aa495cd38e9c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""3250eb48-9cdd-4484-bdb8-aa495cd38e9c""}\n2025-07-15 16:52:35.997 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][e56b8781-0f6c-43c7-869c-a441e8f64e7e] received connection request\n2025-07-15 16:52:35.997 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 16:52:35.997 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:52:36.027 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][e56b8781-0f6c-43c7-869c-a441e8f64e7e] socks forwarding established\n2025-07-15 16:52:36.071 [info] [command][3250eb48-9cdd-4484-bdb8-aa495cd38e9c] Process exited with code 0\n2025-07-15 16:52:36.071 [info] [command][3250eb48-9cdd-4484-bdb8-aa495cd38e9c] Socket close event received\n2025-07-15 16:52:36.072 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][e56b8781-0f6c-43c7-869c-a441e8f64e7e] socks connection closed\n2025-07-15 16:52:36.110 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59232 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:53:36.077 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:53:36.079 [info] [command][96322d48-7358-47c9-9dec-b36e029199cb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""96322d48-7358-47c9-9dec-b36e029199cb""}\n2025-07-15 16:53:36.080 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][d700e43c-ac07-422d-babd-f040e077d1d3] received connection request\n2025-07-15 16:53:36.080 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:53:36.107 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d700e43c-ac07-422d-babd-f040e077d1d3] socks forwarding established\n2025-07-15 16:53:36.135 [info] [command][96322d48-7358-47c9-9dec-b36e029199cb] Process exited with code 0\n2025-07-15 16:53:36.135 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d700e43c-ac07-422d-babd-f040e077d1d3] socks connection closed\n2025-07-15 16:53:36.135 [info] [command][96322d48-7358-47c9-9dec-b36e029199cb] Socket close event received\n2025-07-15 16:53:36.244 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59293 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:54:36.140 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:54:36.142 [info] [command][96c0cea2-4344-4123-b710-daf658aae92c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""96c0cea2-4344-4123-b710-daf658aae92c""}\n2025-07-15 16:54:36.143 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][1c9ec2a1-50d4-46bb-ad9e-e01f6331f2ea] received connection request\n2025-07-15 16:54:36.143 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:54:36.177 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1c9ec2a1-50d4-46bb-ad9e-e01f6331f2ea] socks forwarding established\n2025-07-15 16:54:36.205 [info] [command][96c0cea2-4344-4123-b710-daf658aae92c] Process exited with code 0\n2025-07-15 16:54:36.205 [info] [command][96c0cea2-4344-4123-b710-daf658aae92c] Socket close event received\n2025-07-15 16:54:36.205 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1c9ec2a1-50d4-46bb-ad9e-e01f6331f2ea] socks connection closed\n2025-07-15 16:54:36.233 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59344 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:55:36.211 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:55:36.213 [info] [command][fa6118dc-221c-43a6-9d92-355a2027b219] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""fa6118dc-221c-43a6-9d92-355a2027b219""}\n2025-07-15 16:55:36.214 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][59c5b294-d653-4d0e-bec3-92cccb5e218c] received connection request\n2025-07-15 16:55:36.214 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:55:36.240 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][59c5b294-d653-4d0e-bec3-92cccb5e218c] socks forwarding established\n2025-07-15 16:55:36.268 [info] [command][fa6118dc-221c-43a6-9d92-355a2027b219] Process exited with code 0\n2025-07-15 16:55:36.268 [info] [command][fa6118dc-221c-43a6-9d92-355a2027b219] Socket close event received\n2025-07-15 16:55:36.269 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][59c5b294-d653-4d0e-bec3-92cccb5e218c] socks connection closed\n2025-07-15 16:55:36.293 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59383 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:56:36.270 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:56:36.272 [info] [command][d78d7e4d-efc4-44b9-baaf-cf07ab7d14fd] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""d78d7e4d-efc4-44b9-baaf-cf07ab7d14fd""}\n2025-07-15 16:56:36.273 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][09d3a371-fa6a-439b-a08b-a04df86166c7] received connection request\n2025-07-15 16:56:36.274 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:56:36.364 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][09d3a371-fa6a-439b-a08b-a04df86166c7] socks forwarding established\n2025-07-15 16:56:36.391 [info] [command][d78d7e4d-efc4-44b9-baaf-cf07ab7d14fd] Process exited with code 0\n2025-07-15 16:56:36.391 [info] [command][d78d7e4d-efc4-44b9-baaf-cf07ab7d14fd] Socket close event received\n2025-07-15 16:56:36.393 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][09d3a371-fa6a-439b-a08b-a04df86166c7] socks connection closed\n2025-07-15 16:56:36.420 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59445 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:57:36.396 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:57:36.398 [info] [command][fac65925-9bf1-4e9f-b78e-3f17284423fd] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""fac65925-9bf1-4e9f-b78e-3f17284423fd""}\n2025-07-15 16:57:36.398 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][aefb99c2-df0f-431c-91de-e6ef11939eba] received connection request\n2025-07-15 16:57:36.399 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:57:36.423 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][aefb99c2-df0f-431c-91de-e6ef11939eba] socks forwarding established\n2025-07-15 16:57:36.450 [info] [command][fac65925-9bf1-4e9f-b78e-3f17284423fd] Process exited with code 0\n2025-07-15 16:57:36.450 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][aefb99c2-df0f-431c-91de-e6ef11939eba] socks connection closed\n2025-07-15 16:57:36.450 [info] [command][fac65925-9bf1-4e9f-b78e-3f17284423fd] Socket close event received\n2025-07-15 16:57:36.474 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59488 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:58:36.455 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:58:36.457 [info] [command][7ab881ba-2384-4589-bc31-3ecffde0756a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""7ab881ba-2384-4589-bc31-3ecffde0756a""}\n2025-07-15 16:58:36.457 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ebd719ab-52fd-4b4e-9270-74afb0e5cb6d] received connection request\n2025-07-15 16:58:36.458 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 16:58:36.458 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:58:36.485 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ebd719ab-52fd-4b4e-9270-74afb0e5cb6d] socks forwarding established\n2025-07-15 16:58:36.514 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ebd719ab-52fd-4b4e-9270-74afb0e5cb6d] socks connection closed\n2025-07-15 16:58:36.514 [info] [command][7ab881ba-2384-4589-bc31-3ecffde0756a] Process exited with code 0\n2025-07-15 16:58:36.514 [info] [command][7ab881ba-2384-4589-bc31-3ecffde0756a] Socket close event received\n2025-07-15 16:58:36.538 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59545 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 16:59:36.516 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 16:59:36.518 [info] [command][0e5f51b2-5bce-476c-98c7-4af3a7526749] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""0e5f51b2-5bce-476c-98c7-4af3a7526749""}\n2025-07-15 16:59:36.518 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][7e903845-7060-403f-8d22-741566bf73fd] received connection request\n2025-07-15 16:59:36.518 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 16:59:36.518 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 16:59:36.541 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][7e903845-7060-403f-8d22-741566bf73fd] socks forwarding established\n2025-07-15 16:59:36.570 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][7e903845-7060-403f-8d22-741566bf73fd] socks connection closed\n2025-07-15 16:59:36.571 [info] [command][0e5f51b2-5bce-476c-98c7-4af3a7526749] Process exited with code 0\n2025-07-15 16:59:36.571 [info] [command][0e5f51b2-5bce-476c-98c7-4af3a7526749] Socket close event received\n2025-07-15 16:59:36.594 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59597 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:00:36.576 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:00:36.578 [info] [command][5ca68d99-725a-45e2-8dca-ca4eca238364] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""5ca68d99-725a-45e2-8dca-ca4eca238364""}\n2025-07-15 17:00:36.578 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][1634a743-eb94-4f12-8b61-fe30c2485830] received connection request\n2025-07-15 17:00:36.579 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:00:36.606 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1634a743-eb94-4f12-8b61-fe30c2485830] socks forwarding established\n2025-07-15 17:00:36.633 [info] [command][5ca68d99-725a-45e2-8dca-ca4eca238364] Process exited with code 0\n2025-07-15 17:00:36.633 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1634a743-eb94-4f12-8b61-fe30c2485830] socks connection closed\n2025-07-15 17:00:36.633 [info] [command][5ca68d99-725a-45e2-8dca-ca4eca238364] Socket close event received\n2025-07-15 17:00:36.657 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59648 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:01:36.634 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:01:36.636 [info] [command][85c390c5-ec2a-44e1-aad3-571e665fb657] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""85c390c5-ec2a-44e1-aad3-571e665fb657""}\n2025-07-15 17:01:36.636 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][b6470df1-e10a-42d0-99cd-e28a656fe1ce] received connection request\n2025-07-15 17:01:36.636 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:01:36.660 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][b6470df1-e10a-42d0-99cd-e28a656fe1ce] socks forwarding established\n2025-07-15 17:01:36.686 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][b6470df1-e10a-42d0-99cd-e28a656fe1ce] socks connection closed\n2025-07-15 17:01:36.686 [info] [command][85c390c5-ec2a-44e1-aad3-571e665fb657] Process exited with code 0\n2025-07-15 17:01:36.686 [info] [command][85c390c5-ec2a-44e1-aad3-571e665fb657] Socket close event received\n2025-07-15 17:01:36.711 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59712 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:02:36.690 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:02:36.692 [info] [command][028bc1d7-a053-4a6f-a8d2-03e3be5a0562] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""028bc1d7-a053-4a6f-a8d2-03e3be5a0562""}\n2025-07-15 17:02:36.692 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][97419984-10c4-441f-ae3f-ac49d38d6cc2] received connection request\n2025-07-15 17:02:36.693 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:02:36.717 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][97419984-10c4-441f-ae3f-ac49d38d6cc2] socks forwarding established\n2025-07-15 17:02:36.750 [info] [command][028bc1d7-a053-4a6f-a8d2-03e3be5a0562] Process exited with code 0\n2025-07-15 17:02:36.751 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][97419984-10c4-441f-ae3f-ac49d38d6cc2] socks connection closed\n2025-07-15 17:02:36.751 [info] [command][028bc1d7-a053-4a6f-a8d2-03e3be5a0562] Socket close event received\n2025-07-15 17:02:36.774 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59775 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:03:36.751 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:03:36.754 [info] [command][6ae3df07-f22e-49e5-8806-6cacd3f73799] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""6ae3df07-f22e-49e5-8806-6cacd3f73799""}\n2025-07-15 17:03:36.755 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][0ebb1641-52aa-45b6-915e-678743a10b7c] received connection request\n2025-07-15 17:03:36.755 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:03:36.854 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][0ebb1641-52aa-45b6-915e-678743a10b7c] socks forwarding established\n2025-07-15 17:03:36.963 [info] [command][6ae3df07-f22e-49e5-8806-6cacd3f73799] Process exited with code 0\n2025-07-15 17:03:36.964 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][0ebb1641-52aa-45b6-915e-678743a10b7c] socks connection closed\n2025-07-15 17:03:36.964 [info] [command][6ae3df07-f22e-49e5-8806-6cacd3f73799] Socket close event received\n2025-07-15 17:03:36.993 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59810 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:04:36.965 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:04:36.968 [info] [command][5214c74c-b518-4118-94a3-cd2b4e5da8f0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""5214c74c-b518-4118-94a3-cd2b4e5da8f0""}\n2025-07-15 17:04:36.968 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][d6c03ecf-26bb-4bec-9cb2-658ff98751e3] received connection request\n2025-07-15 17:04:36.969 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:04:36.993 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d6c03ecf-26bb-4bec-9cb2-658ff98751e3] socks forwarding established\n2025-07-15 17:04:37.019 [info] [command][5214c74c-b518-4118-94a3-cd2b4e5da8f0] Process exited with code 0\n2025-07-15 17:04:37.019 [info] [command][5214c74c-b518-4118-94a3-cd2b4e5da8f0] Socket close event received\n2025-07-15 17:04:37.020 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d6c03ecf-26bb-4bec-9cb2-658ff98751e3] socks connection closed\n2025-07-15 17:04:37.047 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59849 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:05:37.022 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:05:37.025 [info] [command][a61c1f32-48c6-4d89-add8-f0f78386075f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""a61c1f32-48c6-4d89-add8-f0f78386075f""}\n2025-07-15 17:05:37.026 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][fe3ae214-7695-4613-94d7-c9902db87e19] received connection request\n2025-07-15 17:05:37.027 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:05:37.055 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][fe3ae214-7695-4613-94d7-c9902db87e19] socks forwarding established\n2025-07-15 17:05:37.081 [info] [command][a61c1f32-48c6-4d89-add8-f0f78386075f] Process exited with code 0\n2025-07-15 17:05:37.081 [info] [command][a61c1f32-48c6-4d89-add8-f0f78386075f] Socket close event received\n2025-07-15 17:05:37.081 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][fe3ae214-7695-4613-94d7-c9902db87e19] socks connection closed\n2025-07-15 17:05:37.104 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59898 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:06:37.087 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:06:37.089 [info] [command][4128371d-5237-4d69-9997-5d2688f509ef] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""4128371d-5237-4d69-9997-5d2688f509ef""}\n2025-07-15 17:06:37.090 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][f3dc60ef-22db-43e6-aa46-c9945eeddd6d] received connection request\n2025-07-15 17:06:37.091 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:06:37.116 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f3dc60ef-22db-43e6-aa46-c9945eeddd6d] socks forwarding established\n2025-07-15 17:06:37.143 [info] [command][4128371d-5237-4d69-9997-5d2688f509ef] Process exited with code 0\n2025-07-15 17:06:37.144 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f3dc60ef-22db-43e6-aa46-c9945eeddd6d] socks connection closed\n2025-07-15 17:06:37.144 [info] [command][4128371d-5237-4d69-9997-5d2688f509ef] Socket close event received\n2025-07-15 17:06:37.168 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59937 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:07:37.149 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:07:37.151 [info] [command][ff911148-c57a-4d8c-b9c1-d0cd7809e227] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ff911148-c57a-4d8c-b9c1-d0cd7809e227""}\n2025-07-15 17:07:37.152 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][06816e1b-a0b1-4938-854d-9ebe5e32ecae] received connection request\n2025-07-15 17:07:37.153 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:07:37.177 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][06816e1b-a0b1-4938-854d-9ebe5e32ecae] socks forwarding established\n2025-07-15 17:07:37.205 [info] [command][ff911148-c57a-4d8c-b9c1-d0cd7809e227] Process exited with code 0\n2025-07-15 17:07:37.205 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][06816e1b-a0b1-4938-854d-9ebe5e32ecae] socks connection closed\n2025-07-15 17:07:37.206 [info] [command][ff911148-c57a-4d8c-b9c1-d0cd7809e227] Socket close event received\n2025-07-15 17:07:37.229 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59960 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:08:37.210 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:08:37.211 [info] [command][e05dd0f9-812e-4729-9893-f870cd17bcae] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""e05dd0f9-812e-4729-9893-f870cd17bcae""}\n2025-07-15 17:08:37.211 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][6fa82d6b-b9f6-4792-a386-4896c841d22b] received connection request\n2025-07-15 17:08:37.212 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 17:08:37.212 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:08:37.235 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][6fa82d6b-b9f6-4792-a386-4896c841d22b] socks forwarding established\n2025-07-15 17:08:37.261 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][6fa82d6b-b9f6-4792-a386-4896c841d22b] socks connection closed\n2025-07-15 17:08:37.261 [info] [command][e05dd0f9-812e-4729-9893-f870cd17bcae] Process exited with code 0\n2025-07-15 17:08:37.261 [info] [command][e05dd0f9-812e-4729-9893-f870cd17bcae] Socket close event received\n2025-07-15 17:08:37.285 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 59985 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:09:37.266 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:09:37.270 [info] [command][9bd6da24-e30d-4fb9-b32e-8e818cfbc4cf] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""9bd6da24-e30d-4fb9-b32e-8e818cfbc4cf""}\n2025-07-15 17:09:37.271 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][aae96b82-643b-48f5-9058-197b56931b52] received connection request\n2025-07-15 17:09:37.271 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:09:37.298 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][aae96b82-643b-48f5-9058-197b56931b52] socks forwarding established\n2025-07-15 17:09:37.325 [info] [command][9bd6da24-e30d-4fb9-b32e-8e818cfbc4cf] Process exited with code 0\n2025-07-15 17:09:37.326 [info] [command][9bd6da24-e30d-4fb9-b32e-8e818cfbc4cf] Socket close event received\n2025-07-15 17:09:37.329 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][aae96b82-643b-48f5-9058-197b56931b52] socks connection closed\n2025-07-15 17:09:37.359 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60022 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:10:37.330 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:10:37.332 [info] [command][0ebd2122-3833-4282-b379-09414f3d806d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""0ebd2122-3833-4282-b379-09414f3d806d""}\n2025-07-15 17:10:37.332 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][a340c8ac-632c-46d6-a33a-fbfad9dcfc55] received connection request\n2025-07-15 17:10:37.332 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 17:10:37.332 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:10:37.356 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][a340c8ac-632c-46d6-a33a-fbfad9dcfc55] socks forwarding established\n2025-07-15 17:10:37.382 [info] [command][0ebd2122-3833-4282-b379-09414f3d806d] Process exited with code 0\n2025-07-15 17:10:37.382 [info] [command][0ebd2122-3833-4282-b379-09414f3d806d] Socket close event received\n2025-07-15 17:10:37.382 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][a340c8ac-632c-46d6-a33a-fbfad9dcfc55] socks connection closed\n2025-07-15 17:10:37.406 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60066 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:11:37.384 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:11:37.386 [info] [command][21a4c17b-12a5-49e3-973a-96222cc9ed4c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""21a4c17b-12a5-49e3-973a-96222cc9ed4c""}\n2025-07-15 17:11:37.387 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][c5fd0f6c-a1e1-48c1-90c2-2f1311d40f23] received connection request\n2025-07-15 17:11:37.387 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:11:37.412 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][c5fd0f6c-a1e1-48c1-90c2-2f1311d40f23] socks forwarding established\n2025-07-15 17:11:37.439 [info] [command][21a4c17b-12a5-49e3-973a-96222cc9ed4c] Process exited with code 0\n2025-07-15 17:11:37.440 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][c5fd0f6c-a1e1-48c1-90c2-2f1311d40f23] socks connection closed\n2025-07-15 17:11:37.440 [info] [command][21a4c17b-12a5-49e3-973a-96222cc9ed4c] Socket close event received\n2025-07-15 17:11:37.463 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60127 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:12:37.444 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:12:37.446 [info] [command][a16fe95e-23a6-44eb-a9a8-074dcba1bf43] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""a16fe95e-23a6-44eb-a9a8-074dcba1bf43""}\n2025-07-15 17:12:37.447 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][95cd8d7b-f3c5-4c69-8a2b-4f746a4710d8] received connection request\n2025-07-15 17:12:37.448 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:12:37.492 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][95cd8d7b-f3c5-4c69-8a2b-4f746a4710d8] socks forwarding established\n2025-07-15 17:12:37.609 [info] [command][a16fe95e-23a6-44eb-a9a8-074dcba1bf43] Process exited with code 0\n2025-07-15 17:12:37.609 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][95cd8d7b-f3c5-4c69-8a2b-4f746a4710d8] socks connection closed\n2025-07-15 17:12:37.609 [info] [command][a16fe95e-23a6-44eb-a9a8-074dcba1bf43] Socket close event received\n2025-07-15 17:12:37.633 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60158 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:13:37.614 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:13:37.617 [info] [command][d299a7ed-1456-4801-a602-29c5fba78411] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""d299a7ed-1456-4801-a602-29c5fba78411""}\n2025-07-15 17:13:37.618 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][2554db49-2667-4432-96f3-5e69e4903dfc] received connection request\n2025-07-15 17:13:37.618 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:13:37.643 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][2554db49-2667-4432-96f3-5e69e4903dfc] socks forwarding established\n2025-07-15 17:13:37.670 [info] [command][d299a7ed-1456-4801-a602-29c5fba78411] Process exited with code 0\n2025-07-15 17:13:37.670 [info] [command][d299a7ed-1456-4801-a602-29c5fba78411] Socket close event received\n2025-07-15 17:13:37.671 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][2554db49-2667-4432-96f3-5e69e4903dfc] socks connection closed\n2025-07-15 17:13:37.695 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60190 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:14:37.672 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:14:37.675 [info] [command][b6f66f6a-5c1a-4541-89c3-c688d7bc58f7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""b6f66f6a-5c1a-4541-89c3-c688d7bc58f7""}\n2025-07-15 17:14:37.675 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][0c9a7a96-5563-4934-87c2-e832c9e5db5a] received connection request\n2025-07-15 17:14:37.676 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:14:37.782 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][0c9a7a96-5563-4934-87c2-e832c9e5db5a] socks forwarding established\n2025-07-15 17:14:37.810 [info] [command][b6f66f6a-5c1a-4541-89c3-c688d7bc58f7] Process exited with code 0\n2025-07-15 17:14:37.810 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][0c9a7a96-5563-4934-87c2-e832c9e5db5a] socks connection closed\n2025-07-15 17:14:37.810 [info] [command][b6f66f6a-5c1a-4541-89c3-c688d7bc58f7] Socket close event received\n2025-07-15 17:14:37.906 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60239 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:15:37.811 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:15:37.813 [info] [command][30234a6f-8c66-4dbf-9c8a-cfefdb6b6c8c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""30234a6f-8c66-4dbf-9c8a-cfefdb6b6c8c""}\n2025-07-15 17:15:37.814 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][44d8455c-8ab5-4b6d-b4ce-dfca813438ac] received connection request\n2025-07-15 17:15:37.815 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:15:37.842 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][44d8455c-8ab5-4b6d-b4ce-dfca813438ac] socks forwarding established\n2025-07-15 17:15:37.870 [info] [command][30234a6f-8c66-4dbf-9c8a-cfefdb6b6c8c] Process exited with code 0\n2025-07-15 17:15:37.871 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][44d8455c-8ab5-4b6d-b4ce-dfca813438ac] socks connection closed\n2025-07-15 17:15:37.871 [info] [command][30234a6f-8c66-4dbf-9c8a-cfefdb6b6c8c] Socket close event received\n2025-07-15 17:15:37.895 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60284 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:16:37.871 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:16:37.873 [info] [command][ce615636-a104-4842-803a-e20d628016ce] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ce615636-a104-4842-803a-e20d628016ce""}\n2025-07-15 17:16:37.873 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][86512f38-c69c-4fec-9033-5c52a02dc29d] received connection request\n2025-07-15 17:16:37.874 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:16:37.898 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][86512f38-c69c-4fec-9033-5c52a02dc29d] socks forwarding established\n2025-07-15 17:16:37.925 [info] [command][ce615636-a104-4842-803a-e20d628016ce] Process exited with code 0\n2025-07-15 17:16:37.925 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][86512f38-c69c-4fec-9033-5c52a02dc29d] socks connection closed\n2025-07-15 17:16:37.925 [info] [command][ce615636-a104-4842-803a-e20d628016ce] Socket close event received\n2025-07-15 17:16:37.951 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60322 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:17:37.930 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:17:37.932 [info] [command][5c330a6d-8fc3-4598-ae37-082602e3b688] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""5c330a6d-8fc3-4598-ae37-082602e3b688""}\n2025-07-15 17:17:37.933 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][1beaf19f-24d7-41a6-8c74-f6d722631e08] received connection request\n2025-07-15 17:17:37.933 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:17:38.003 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1beaf19f-24d7-41a6-8c74-f6d722631e08] socks forwarding established\n2025-07-15 17:17:38.037 [info] [command][5c330a6d-8fc3-4598-ae37-082602e3b688] Process exited with code 0\n2025-07-15 17:17:38.037 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1beaf19f-24d7-41a6-8c74-f6d722631e08] socks connection closed\n2025-07-15 17:17:38.037 [info] [command][5c330a6d-8fc3-4598-ae37-082602e3b688] Socket close event received\n2025-07-15 17:17:38.066 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60342 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:18:38.043 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:18:38.044 [info] [command][73d62b6a-149f-406f-9b66-d38602e8374e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""73d62b6a-149f-406f-9b66-d38602e8374e""}\n2025-07-15 17:18:38.045 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][339da165-1582-4efe-9188-facdb50f66c9] received connection request\n2025-07-15 17:18:38.045 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:18:38.140 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][339da165-1582-4efe-9188-facdb50f66c9] socks forwarding established\n2025-07-15 17:18:38.244 [info] [command][73d62b6a-149f-406f-9b66-d38602e8374e] Process exited with code 0\n2025-07-15 17:18:38.244 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][339da165-1582-4efe-9188-facdb50f66c9] socks connection closed\n2025-07-15 17:18:38.244 [info] [command][73d62b6a-149f-406f-9b66-d38602e8374e] Socket close event received\n2025-07-15 17:18:38.269 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60367 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:19:38.246 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:19:38.248 [info] [command][e1a5c6e5-74dd-42fe-b5be-c430f446bb6f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""e1a5c6e5-74dd-42fe-b5be-c430f446bb6f""}\n2025-07-15 17:19:38.248 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][79939fb6-09a1-4a32-a907-22b923ddca4c] received connection request\n2025-07-15 17:19:38.249 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:19:39.023 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][79939fb6-09a1-4a32-a907-22b923ddca4c] socks forwarding established\n2025-07-15 17:19:39.056 [info] [command][e1a5c6e5-74dd-42fe-b5be-c430f446bb6f] Process exited with code 0\n2025-07-15 17:19:39.056 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][79939fb6-09a1-4a32-a907-22b923ddca4c] socks connection closed\n2025-07-15 17:19:39.056 [info] [command][e1a5c6e5-74dd-42fe-b5be-c430f446bb6f] Socket close event received\n2025-07-15 17:19:39.219 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60407 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:20:39.061 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:20:39.064 [info] [command][5bc5c606-6628-44d1-861f-0a8d184a3a2f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""5bc5c606-6628-44d1-861f-0a8d184a3a2f""}\n2025-07-15 17:20:39.065 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][3301a3a2-bc51-4d67-9bde-07720bc598ec] received connection request\n2025-07-15 17:20:39.065 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:20:39.090 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][3301a3a2-bc51-4d67-9bde-07720bc598ec] socks forwarding established\n2025-07-15 17:20:39.117 [info] [command][5bc5c606-6628-44d1-861f-0a8d184a3a2f] Process exited with code 0\n2025-07-15 17:20:39.118 [info] [command][5bc5c606-6628-44d1-861f-0a8d184a3a2f] Socket close event received\n2025-07-15 17:20:39.119 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][3301a3a2-bc51-4d67-9bde-07720bc598ec] socks connection closed\n2025-07-15 17:20:39.143 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60455 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:21:39.122 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:21:39.125 [info] [command][0d4cc294-b673-403f-beef-0655b0644e56] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""0d4cc294-b673-403f-beef-0655b0644e56""}\n2025-07-15 17:21:39.125 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][befeee45-27f1-4da4-9e9c-5b1835e58767] received connection request\n2025-07-15 17:21:39.126 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 17:21:39.127 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:21:39.156 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][befeee45-27f1-4da4-9e9c-5b1835e58767] socks forwarding established\n2025-07-15 17:21:39.273 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][befeee45-27f1-4da4-9e9c-5b1835e58767] socks connection closed\n2025-07-15 17:21:39.273 [info] [command][0d4cc294-b673-403f-beef-0655b0644e56] Process exited with code 0\n2025-07-15 17:21:39.273 [info] [command][0d4cc294-b673-403f-beef-0655b0644e56] Socket close event received\n2025-07-15 17:21:39.299 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60526 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:22:39.274 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:22:39.276 [info] [command][f1e0a670-7ed7-46b6-b387-0c1e06552bb2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""f1e0a670-7ed7-46b6-b387-0c1e06552bb2""}\n2025-07-15 17:22:39.276 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][06bf7561-6e65-49a8-aef6-8ade8e8ddf06] received connection request\n2025-07-15 17:22:39.277 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 17:22:39.277 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:22:39.388 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][06bf7561-6e65-49a8-aef6-8ade8e8ddf06] socks forwarding established\n2025-07-15 17:22:39.418 [info] [command][f1e0a670-7ed7-46b6-b387-0c1e06552bb2] Process exited with code 0\n2025-07-15 17:22:39.419 [info] [command][f1e0a670-7ed7-46b6-b387-0c1e06552bb2] Socket close event received\n2025-07-15 17:22:39.425 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][06bf7561-6e65-49a8-aef6-8ade8e8ddf06] socks connection closed\n2025-07-15 17:22:39.452 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60551 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:23:39.423 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:23:39.427 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][f5fae915-b66e-4837-8664-e82be6e5e7c8] received connection request\n2025-07-15 17:23:39.428 [info] [command][62fd0a1e-0d22-472a-8ad4-db2827f98797] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""62fd0a1e-0d22-472a-8ad4-db2827f98797""}\n2025-07-15 17:23:39.431 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:23:39.463 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f5fae915-b66e-4837-8664-e82be6e5e7c8] socks forwarding established\n2025-07-15 17:23:39.503 [info] [command][62fd0a1e-0d22-472a-8ad4-db2827f98797] Process exited with code 0\n2025-07-15 17:23:39.503 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f5fae915-b66e-4837-8664-e82be6e5e7c8] socks connection closed\n2025-07-15 17:23:39.503 [info] [command][62fd0a1e-0d22-472a-8ad4-db2827f98797] Socket close event received\n2025-07-15 17:23:39.545 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60590 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:24:39.507 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:24:39.508 [info] [command][2096bcaf-45b6-49b0-ae10-81ef2ebeb363] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""2096bcaf-45b6-49b0-ae10-81ef2ebeb363""}\n2025-07-15 17:24:39.509 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][390eef8c-8b4d-4a26-aae0-fff85f6bfa2c] received connection request\n2025-07-15 17:24:39.509 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:24:39.561 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][390eef8c-8b4d-4a26-aae0-fff85f6bfa2c] socks forwarding established\n2025-07-15 17:24:39.722 [info] [command][2096bcaf-45b6-49b0-ae10-81ef2ebeb363] Process exited with code 0\n2025-07-15 17:24:39.722 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][390eef8c-8b4d-4a26-aae0-fff85f6bfa2c] socks connection closed\n2025-07-15 17:24:39.722 [info] [command][2096bcaf-45b6-49b0-ae10-81ef2ebeb363] Socket close event received\n2025-07-15 17:24:39.750 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60675 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:25:39.724 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:25:39.726 [info] [command][b6143009-95e2-4efb-ac03-1394dba8ace5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""b6143009-95e2-4efb-ac03-1394dba8ace5""}\n2025-07-15 17:25:39.727 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][a1431309-ccff-40b0-9c6e-63d773e5567f] received connection request\n2025-07-15 17:25:39.728 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 17:25:39.728 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:25:39.758 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][a1431309-ccff-40b0-9c6e-63d773e5567f] socks forwarding established\n2025-07-15 17:25:39.787 [info] [command][b6143009-95e2-4efb-ac03-1394dba8ace5] Process exited with code 0\n2025-07-15 17:25:39.787 [info] [command][b6143009-95e2-4efb-ac03-1394dba8ace5] Socket close event received\n2025-07-15 17:25:39.809 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][a1431309-ccff-40b0-9c6e-63d773e5567f] socks connection closed\n2025-07-15 17:25:39.810 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60712 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:26:39.787 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:26:39.788 [info] [command][85fb2c7f-6b6c-4bf3-a3c0-eb6c25b048bc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""85fb2c7f-6b6c-4bf3-a3c0-eb6c25b048bc""}\n2025-07-15 17:26:39.789 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][0e6822e8-f783-4c57-ad1f-b2b29a7046b4] received connection request\n2025-07-15 17:26:39.789 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:26:39.813 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][0e6822e8-f783-4c57-ad1f-b2b29a7046b4] socks forwarding established\n2025-07-15 17:26:39.842 [info] [command][85fb2c7f-6b6c-4bf3-a3c0-eb6c25b048bc] Process exited with code 0\n2025-07-15 17:26:39.842 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][0e6822e8-f783-4c57-ad1f-b2b29a7046b4] socks connection closed\n2025-07-15 17:26:39.842 [info] [command][85fb2c7f-6b6c-4bf3-a3c0-eb6c25b048bc] Socket close event received\n2025-07-15 17:26:39.867 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60768 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:27:39.847 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:27:39.849 [info] [command][905f7693-ef1f-4246-9c18-2caae7277592] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""905f7693-ef1f-4246-9c18-2caae7277592""}\n2025-07-15 17:27:39.850 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][2b76ca1a-e4c2-42e4-8bb2-2c7f39372941] received connection request\n2025-07-15 17:27:39.850 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:27:39.953 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][2b76ca1a-e4c2-42e4-8bb2-2c7f39372941] socks forwarding established\n2025-07-15 17:27:40.101 [info] [command][905f7693-ef1f-4246-9c18-2caae7277592] Process exited with code 0\n2025-07-15 17:27:40.101 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][2b76ca1a-e4c2-42e4-8bb2-2c7f39372941] socks connection closed\n2025-07-15 17:27:40.101 [info] [command][905f7693-ef1f-4246-9c18-2caae7277592] Socket close event received\n2025-07-15 17:27:40.127 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60790 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:28:40.102 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:28:40.105 [info] [command][5e2e4675-506c-46c7-bf5b-a48b1fa81d99] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""5e2e4675-506c-46c7-bf5b-a48b1fa81d99""}\n2025-07-15 17:28:40.105 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][bbecb3e3-6afa-4ea0-9221-2c0a27c27e03] received connection request\n2025-07-15 17:28:40.106 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:28:40.145 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][bbecb3e3-6afa-4ea0-9221-2c0a27c27e03] socks forwarding established\n2025-07-15 17:28:40.173 [info] [command][5e2e4675-506c-46c7-bf5b-a48b1fa81d99] Process exited with code 0\n2025-07-15 17:28:40.173 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][bbecb3e3-6afa-4ea0-9221-2c0a27c27e03] socks connection closed\n2025-07-15 17:28:40.173 [info] [command][5e2e4675-506c-46c7-bf5b-a48b1fa81d99] Socket close event received\n2025-07-15 17:28:40.201 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60824 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:29:40.179 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:29:40.181 [info] [command][aea09ecb-2696-4264-9aca-d0ed39bc6217] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""aea09ecb-2696-4264-9aca-d0ed39bc6217""}\n2025-07-15 17:29:40.182 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][370e76a3-2fe5-4681-971b-d670404c2b25] received connection request\n2025-07-15 17:29:40.183 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:29:40.245 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][370e76a3-2fe5-4681-971b-d670404c2b25] socks forwarding established\n2025-07-15 17:29:40.338 [info] [command][aea09ecb-2696-4264-9aca-d0ed39bc6217] Process exited with code 0\n2025-07-15 17:29:40.338 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][370e76a3-2fe5-4681-971b-d670404c2b25] socks connection closed\n2025-07-15 17:29:40.338 [info] [command][aea09ecb-2696-4264-9aca-d0ed39bc6217] Socket close event received\n2025-07-15 17:29:40.569 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60865 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:30:40.343 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:30:40.345 [info] [command][76d16aed-d6db-4e4d-8c9c-097a74d186e6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""76d16aed-d6db-4e4d-8c9c-097a74d186e6""}\n2025-07-15 17:30:40.346 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][829497b3-ebfa-40aa-9c1d-399fb3038410] received connection request\n2025-07-15 17:30:40.346 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 17:30:40.346 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:30:40.490 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][829497b3-ebfa-40aa-9c1d-399fb3038410] socks forwarding established\n2025-07-15 17:30:40.667 [info] [command][76d16aed-d6db-4e4d-8c9c-097a74d186e6] Process exited with code 0\n2025-07-15 17:30:40.668 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][829497b3-ebfa-40aa-9c1d-399fb3038410] socks connection closed\n2025-07-15 17:30:40.668 [info] [command][76d16aed-d6db-4e4d-8c9c-097a74d186e6] Socket close event received\n2025-07-15 17:30:40.692 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60906 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:31:40.668 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:31:40.669 [info] [command][26e646f7-0d03-4fe3-a31b-f3663d4e32d2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""26e646f7-0d03-4fe3-a31b-f3663d4e32d2""}\n2025-07-15 17:31:40.670 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ff29677f-32d9-49b8-a096-86546272221a] received connection request\n2025-07-15 17:31:40.670 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:31:40.693 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ff29677f-32d9-49b8-a096-86546272221a] socks forwarding established\n2025-07-15 17:31:40.718 [info] [command][26e646f7-0d03-4fe3-a31b-f3663d4e32d2] Process exited with code 0\n2025-07-15 17:31:40.718 [info] [command][26e646f7-0d03-4fe3-a31b-f3663d4e32d2] Socket close event received\n2025-07-15 17:31:40.741 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ff29677f-32d9-49b8-a096-86546272221a] socks connection closed\n2025-07-15 17:31:40.743 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60947 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:32:40.719 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:32:40.722 [info] [command][fe6f9683-52bd-4677-a1b8-76bd5ddbc50e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""fe6f9683-52bd-4677-a1b8-76bd5ddbc50e""}\n2025-07-15 17:32:40.723 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][b028cb57-967f-4ac1-920b-e4f459bb96ea] received connection request\n2025-07-15 17:32:40.723 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:32:40.806 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][b028cb57-967f-4ac1-920b-e4f459bb96ea] socks forwarding established\n2025-07-15 17:32:40.847 [info] [command][fe6f9683-52bd-4677-a1b8-76bd5ddbc50e] Process exited with code 0\n2025-07-15 17:32:40.847 [info] [command][fe6f9683-52bd-4677-a1b8-76bd5ddbc50e] Socket close event received\n2025-07-15 17:32:40.899 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][b028cb57-967f-4ac1-920b-e4f459bb96ea] socks connection closed\n2025-07-15 17:32:40.918 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 60973 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:33:40.848 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:33:40.851 [info] [command][49af796d-fb3f-4ebf-ab80-01661b790ae0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""49af796d-fb3f-4ebf-ab80-01661b790ae0""}\n2025-07-15 17:33:40.852 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][bb9c62ff-d17e-4803-af19-6b87de005ceb] received connection request\n2025-07-15 17:33:40.852 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:33:40.938 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][bb9c62ff-d17e-4803-af19-6b87de005ceb] socks forwarding established\n2025-07-15 17:33:41.101 [info] [command][49af796d-fb3f-4ebf-ab80-01661b790ae0] Process exited with code 0\n2025-07-15 17:33:41.102 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][bb9c62ff-d17e-4803-af19-6b87de005ceb] socks connection closed\n2025-07-15 17:33:41.102 [info] [command][49af796d-fb3f-4ebf-ab80-01661b790ae0] Socket close event received\n2025-07-15 17:33:41.125 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61004 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:34:41.107 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:34:41.110 [info] [command][1409be99-1caa-4bd5-a77b-b6d3b8e4df3f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""1409be99-1caa-4bd5-a77b-b6d3b8e4df3f""}\n2025-07-15 17:34:41.111 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][5f110074-1a93-435b-b90d-06555b126401] received connection request\n2025-07-15 17:34:41.112 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:34:41.137 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][5f110074-1a93-435b-b90d-06555b126401] socks forwarding established\n2025-07-15 17:34:41.165 [info] [command][1409be99-1caa-4bd5-a77b-b6d3b8e4df3f] Process exited with code 0\n2025-07-15 17:34:41.165 [info] [command][1409be99-1caa-4bd5-a77b-b6d3b8e4df3f] Socket close event received\n2025-07-15 17:34:41.190 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61039 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:34:41.190 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][5f110074-1a93-435b-b90d-06555b126401] socks connection closed\n2025-07-15 17:35:41.167 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:35:41.169 [info] [command][3689c4ad-9694-4bce-ba6c-2229e50068d3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""3689c4ad-9694-4bce-ba6c-2229e50068d3""}\n2025-07-15 17:35:41.170 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][417cfbb6-0932-4b3f-8149-845f6107308d] received connection request\n2025-07-15 17:35:41.171 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:35:41.195 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][417cfbb6-0932-4b3f-8149-845f6107308d] socks forwarding established\n2025-07-15 17:35:41.223 [info] [command][3689c4ad-9694-4bce-ba6c-2229e50068d3] Process exited with code 0\n2025-07-15 17:35:41.223 [info] [command][3689c4ad-9694-4bce-ba6c-2229e50068d3] Socket close event received\n2025-07-15 17:35:41.246 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][417cfbb6-0932-4b3f-8149-845f6107308d] socks connection closed\n2025-07-15 17:35:41.248 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61085 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:36:41.226 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:36:41.229 [info] [command][bd5d6a70-e4ed-4335-b230-1e70d31e4a60] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""bd5d6a70-e4ed-4335-b230-1e70d31e4a60""}\n2025-07-15 17:36:41.230 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][9bc1b7a8-aba8-46ed-ae92-41d6f2371cca] received connection request\n2025-07-15 17:36:41.231 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:36:41.259 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][9bc1b7a8-aba8-46ed-ae92-41d6f2371cca] socks forwarding established\n2025-07-15 17:36:41.311 [info] [command][bd5d6a70-e4ed-4335-b230-1e70d31e4a60] Process exited with code 0\n2025-07-15 17:36:41.311 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][9bc1b7a8-aba8-46ed-ae92-41d6f2371cca] socks connection closed\n2025-07-15 17:36:41.311 [info] [command][bd5d6a70-e4ed-4335-b230-1e70d31e4a60] Socket close event received\n2025-07-15 17:36:41.472 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61126 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:37:41.312 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:37:41.315 [info] [command][806cee54-a94c-4121-a2a0-050e4396f3bd] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""806cee54-a94c-4121-a2a0-050e4396f3bd""}\n2025-07-15 17:37:41.316 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][2cbb8597-037a-4710-bcc3-6af1e7ae7c78] received connection request\n2025-07-15 17:37:41.317 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:37:41.341 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][2cbb8597-037a-4710-bcc3-6af1e7ae7c78] socks forwarding established\n2025-07-15 17:37:41.372 [info] [command][806cee54-a94c-4121-a2a0-050e4396f3bd] Process exited with code 0\n2025-07-15 17:37:41.372 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][2cbb8597-037a-4710-bcc3-6af1e7ae7c78] socks connection closed\n2025-07-15 17:37:41.373 [info] [command][806cee54-a94c-4121-a2a0-050e4396f3bd] Socket close event received\n2025-07-15 17:37:41.397 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61146 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:38:41.378 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:38:41.379 [info] [command][fa215e29-7601-4b4c-9207-fd4f8c39bba0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""fa215e29-7601-4b4c-9207-fd4f8c39bba0""}\n2025-07-15 17:38:41.380 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][9308a647-d06f-4b36-b492-781bcd599e08] received connection request\n2025-07-15 17:38:41.380 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:38:41.405 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][9308a647-d06f-4b36-b492-781bcd599e08] socks forwarding established\n2025-07-15 17:38:41.431 [info] [command][fa215e29-7601-4b4c-9207-fd4f8c39bba0] Process exited with code 0\n2025-07-15 17:38:41.431 [info] [command][fa215e29-7601-4b4c-9207-fd4f8c39bba0] Socket close event received\n2025-07-15 17:38:41.432 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][9308a647-d06f-4b36-b492-781bcd599e08] socks connection closed\n2025-07-15 17:38:41.455 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61180 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:39:41.436 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:39:41.438 [info] [command][0efcffe1-3e48-43b4-a41b-c2f2764a9158] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""0efcffe1-3e48-43b4-a41b-c2f2764a9158""}\n2025-07-15 17:39:41.439 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][b08b1674-676f-42cf-b355-ae002652c0a7] received connection request\n2025-07-15 17:39:41.439 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:39:41.512 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][b08b1674-676f-42cf-b355-ae002652c0a7] socks forwarding established\n2025-07-15 17:39:41.542 [info] [command][0efcffe1-3e48-43b4-a41b-c2f2764a9158] Process exited with code 0\n2025-07-15 17:39:41.542 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][b08b1674-676f-42cf-b355-ae002652c0a7] socks connection closed\n2025-07-15 17:39:41.542 [info] [command][0efcffe1-3e48-43b4-a41b-c2f2764a9158] Socket close event received\n2025-07-15 17:39:41.568 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61217 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:40:41.544 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:40:41.547 [info] [command][19d1b17a-6dc3-4cc0-a62e-1e088f6bac43] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""19d1b17a-6dc3-4cc0-a62e-1e088f6bac43""}\n2025-07-15 17:40:41.548 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][7ff3dab0-d7d7-487f-8862-0541328af243] received connection request\n2025-07-15 17:40:41.548 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:40:41.573 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][7ff3dab0-d7d7-487f-8862-0541328af243] socks forwarding established\n2025-07-15 17:40:41.606 [info] [command][19d1b17a-6dc3-4cc0-a62e-1e088f6bac43] Process exited with code 0\n2025-07-15 17:40:41.606 [info] [command][19d1b17a-6dc3-4cc0-a62e-1e088f6bac43] Socket close event received\n2025-07-15 17:40:41.630 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][7ff3dab0-d7d7-487f-8862-0541328af243] socks connection closed\n2025-07-15 17:40:41.631 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61261 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:41:41.607 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:41:41.609 [info] [command][1bce189a-cca1-451a-a05c-6032b57cf700] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""1bce189a-cca1-451a-a05c-6032b57cf700""}\n2025-07-15 17:41:41.609 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][c3c42d9f-e10e-46cc-8a48-58b67e7549fb] received connection request\n2025-07-15 17:41:41.609 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:41:41.634 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][c3c42d9f-e10e-46cc-8a48-58b67e7549fb] socks forwarding established\n2025-07-15 17:41:41.662 [info] [command][1bce189a-cca1-451a-a05c-6032b57cf700] Process exited with code 0\n2025-07-15 17:41:41.662 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][c3c42d9f-e10e-46cc-8a48-58b67e7549fb] socks connection closed\n2025-07-15 17:41:41.662 [info] [command][1bce189a-cca1-451a-a05c-6032b57cf700] Socket close event received\n2025-07-15 17:41:41.686 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61309 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:42:41.666 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:42:41.668 [info] [command][996ba202-9520-4b60-9f30-86c04d825634] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""996ba202-9520-4b60-9f30-86c04d825634""}\n2025-07-15 17:42:41.669 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][56dc4a07-ba1f-42d3-a474-5af0bd2f2634] received connection request\n2025-07-15 17:42:41.669 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:42:41.775 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][56dc4a07-ba1f-42d3-a474-5af0bd2f2634] socks forwarding established\n2025-07-15 17:42:41.931 [info] [command][996ba202-9520-4b60-9f30-86c04d825634] Process exited with code 0\n2025-07-15 17:42:41.931 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][56dc4a07-ba1f-42d3-a474-5af0bd2f2634] socks connection closed\n2025-07-15 17:42:41.931 [info] [command][996ba202-9520-4b60-9f30-86c04d825634] Socket close event received\n2025-07-15 17:42:41.954 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61350 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:43:41.934 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:43:41.940 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][3ad09451-a3f9-48f9-8368-7040bc392542] received connection request\n2025-07-15 17:43:41.940 [info] [command][ca15602e-b5b1-4020-996a-72ea887058ec] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ca15602e-b5b1-4020-996a-72ea887058ec""}\n2025-07-15 17:43:41.940 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 17:43:41.941 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:43:41.965 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][3ad09451-a3f9-48f9-8368-7040bc392542] socks forwarding established\n2025-07-15 17:43:41.992 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][3ad09451-a3f9-48f9-8368-7040bc392542] socks connection closed\n2025-07-15 17:43:41.993 [info] [command][ca15602e-b5b1-4020-996a-72ea887058ec] Process exited with code 0\n2025-07-15 17:43:41.993 [info] [command][ca15602e-b5b1-4020-996a-72ea887058ec] Socket close event received\n2025-07-15 17:43:42.020 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61411 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:44:41.996 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:44:41.999 [info] [command][3356f12a-057b-41a6-a6bd-929ed3ed597d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""3356f12a-057b-41a6-a6bd-929ed3ed597d""}\n2025-07-15 17:44:42.000 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][6c8d4b77-ed50-45e3-a5b4-a79368bdf7bb] received connection request\n2025-07-15 17:44:42.002 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:44:42.031 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][6c8d4b77-ed50-45e3-a5b4-a79368bdf7bb] socks forwarding established\n2025-07-15 17:44:42.059 [info] [command][3356f12a-057b-41a6-a6bd-929ed3ed597d] Process exited with code 0\n2025-07-15 17:44:42.059 [info] [command][3356f12a-057b-41a6-a6bd-929ed3ed597d] Socket close event received\n2025-07-15 17:44:42.085 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61439 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:44:42.085 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][6c8d4b77-ed50-45e3-a5b4-a79368bdf7bb] socks connection closed\n2025-07-15 17:45:42.064 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:45:42.064 [info] [command][5490dfa8-ae48-4dfe-8c6d-6bd30df80fb4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""5490dfa8-ae48-4dfe-8c6d-6bd30df80fb4""}\n2025-07-15 17:45:42.065 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][19871b5e-9118-457d-aca2-62c8c9968eca] received connection request\n2025-07-15 17:45:42.065 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:45:42.089 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][19871b5e-9118-457d-aca2-62c8c9968eca] socks forwarding established\n2025-07-15 17:45:42.115 [info] [command][5490dfa8-ae48-4dfe-8c6d-6bd30df80fb4] Process exited with code 0\n2025-07-15 17:45:42.115 [info] [command][5490dfa8-ae48-4dfe-8c6d-6bd30df80fb4] Socket close event received\n2025-07-15 17:45:42.139 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][19871b5e-9118-457d-aca2-62c8c9968eca] socks connection closed\n2025-07-15 17:45:42.140 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61481 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:46:42.120 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:46:42.122 [info] [command][348a7664-a33b-48a4-85ea-697b8ae81770] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""348a7664-a33b-48a4-85ea-697b8ae81770""}\n2025-07-15 17:46:42.123 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][46f793cb-c679-4736-9ff6-35e6d2b123d6] received connection request\n2025-07-15 17:46:42.124 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:46:42.152 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][46f793cb-c679-4736-9ff6-35e6d2b123d6] socks forwarding established\n2025-07-15 17:46:42.180 [info] [command][348a7664-a33b-48a4-85ea-697b8ae81770] Process exited with code 0\n2025-07-15 17:46:42.180 [info] [command][348a7664-a33b-48a4-85ea-697b8ae81770] Socket close event received\n2025-07-15 17:46:42.204 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61521 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:46:42.204 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][46f793cb-c679-4736-9ff6-35e6d2b123d6] socks connection closed\n2025-07-15 17:47:42.183 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:47:42.185 [info] [command][94653d41-f66a-4243-abe4-e605c9c77803] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""94653d41-f66a-4243-abe4-e605c9c77803""}\n2025-07-15 17:47:42.186 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][cd6043fd-bec7-4564-9eef-1e4f03c921d4] received connection request\n2025-07-15 17:47:42.187 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:47:42.215 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][cd6043fd-bec7-4564-9eef-1e4f03c921d4] socks forwarding established\n2025-07-15 17:47:42.311 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][cd6043fd-bec7-4564-9eef-1e4f03c921d4] socks connection closed\n2025-07-15 17:47:42.312 [info] [command][94653d41-f66a-4243-abe4-e605c9c77803] Process exited with code 0\n2025-07-15 17:47:42.312 [info] [command][94653d41-f66a-4243-abe4-e605c9c77803] Socket close event received\n2025-07-15 17:47:42.335 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61546 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:48:42.312 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:48:42.315 [info] [command][19b6e1fd-0d5e-4098-9e0e-74942529278e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""19b6e1fd-0d5e-4098-9e0e-74942529278e""}\n2025-07-15 17:48:42.316 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ea752016-c7ae-42df-850a-e76e2299b6f7] received connection request\n2025-07-15 17:48:42.317 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:48:42.342 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ea752016-c7ae-42df-850a-e76e2299b6f7] socks forwarding established\n2025-07-15 17:48:42.371 [info] [command][19b6e1fd-0d5e-4098-9e0e-74942529278e] Process exited with code 0\n2025-07-15 17:48:42.372 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ea752016-c7ae-42df-850a-e76e2299b6f7] socks connection closed\n2025-07-15 17:48:42.372 [info] [command][19b6e1fd-0d5e-4098-9e0e-74942529278e] Socket close event received\n2025-07-15 17:48:42.396 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61579 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:49:42.377 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:49:42.379 [info] [command][12a6ea89-94c9-4e21-8913-14b45f3b143c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""12a6ea89-94c9-4e21-8913-14b45f3b143c""}\n2025-07-15 17:49:42.379 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][7818c45e-f6ad-4bca-b0c0-0dd74df31e4f] received connection request\n2025-07-15 17:49:42.379 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 17:49:42.379 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:49:42.403 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][7818c45e-f6ad-4bca-b0c0-0dd74df31e4f] socks forwarding established\n2025-07-15 17:49:42.430 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][7818c45e-f6ad-4bca-b0c0-0dd74df31e4f] socks connection closed\n2025-07-15 17:49:42.430 [info] [command][12a6ea89-94c9-4e21-8913-14b45f3b143c] Process exited with code 0\n2025-07-15 17:49:42.431 [info] [command][12a6ea89-94c9-4e21-8913-14b45f3b143c] Socket close event received\n2025-07-15 17:49:42.453 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61607 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:50:42.433 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:50:42.434 [info] [command][e4283ba7-cfc7-4a23-b69c-aa1240eb719b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""e4283ba7-cfc7-4a23-b69c-aa1240eb719b""}\n2025-07-15 17:50:42.434 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ff994046-5bfd-4955-a172-c50b8c0d51f5] received connection request\n2025-07-15 17:50:42.434 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 17:50:42.435 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:50:42.458 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ff994046-5bfd-4955-a172-c50b8c0d51f5] socks forwarding established\n2025-07-15 17:50:42.494 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ff994046-5bfd-4955-a172-c50b8c0d51f5] socks connection closed\n2025-07-15 17:50:42.494 [info] [command][e4283ba7-cfc7-4a23-b69c-aa1240eb719b] Process exited with code 0\n2025-07-15 17:50:42.494 [info] [command][e4283ba7-cfc7-4a23-b69c-aa1240eb719b] Socket close event received\n2025-07-15 17:50:42.517 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61650 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:51:42.497 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:51:42.499 [info] [command][b4ac886f-361d-4724-93ae-91fd63f15d85] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""b4ac886f-361d-4724-93ae-91fd63f15d85""}\n2025-07-15 17:51:42.500 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][074f0fb1-15e9-450a-8b27-436dd8700922] received connection request\n2025-07-15 17:51:42.501 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:51:42.602 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][074f0fb1-15e9-450a-8b27-436dd8700922] socks forwarding established\n2025-07-15 17:51:42.629 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][074f0fb1-15e9-450a-8b27-436dd8700922] socks connection closed\n2025-07-15 17:51:42.629 [info] [command][b4ac886f-361d-4724-93ae-91fd63f15d85] Process exited with code 0\n2025-07-15 17:51:42.629 [info] [command][b4ac886f-361d-4724-93ae-91fd63f15d85] Socket close event received\n2025-07-15 17:51:42.652 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61728 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:52:42.634 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:52:42.637 [info] [command][43a72bd3-caed-48b5-8c96-e549f4c773cb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""43a72bd3-caed-48b5-8c96-e549f4c773cb""}\n2025-07-15 17:52:42.638 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][778f6ab7-e314-45be-8b60-95116983fd07] received connection request\n2025-07-15 17:52:42.639 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:52:42.765 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][778f6ab7-e314-45be-8b60-95116983fd07] socks forwarding established\n2025-07-15 17:52:42.797 [info] [command][43a72bd3-caed-48b5-8c96-e549f4c773cb] Process exited with code 0\n2025-07-15 17:52:42.798 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][778f6ab7-e314-45be-8b60-95116983fd07] socks connection closed\n2025-07-15 17:52:42.798 [info] [command][43a72bd3-caed-48b5-8c96-e549f4c773cb] Socket close event received\n2025-07-15 17:52:42.827 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61755 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:53:42.800 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:53:42.803 [info] [command][643719ef-6ebe-4f8b-9fa3-52c221ba597e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""643719ef-6ebe-4f8b-9fa3-52c221ba597e""}\n2025-07-15 17:53:42.804 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][bfc2cd56-a408-4e0f-8bd0-813700383950] received connection request\n2025-07-15 17:53:42.804 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:53:42.828 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][bfc2cd56-a408-4e0f-8bd0-813700383950] socks forwarding established\n2025-07-15 17:53:42.856 [info] [command][643719ef-6ebe-4f8b-9fa3-52c221ba597e] Process exited with code 0\n2025-07-15 17:53:42.856 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][bfc2cd56-a408-4e0f-8bd0-813700383950] socks connection closed\n2025-07-15 17:53:42.857 [info] [command][643719ef-6ebe-4f8b-9fa3-52c221ba597e] Socket close event received\n2025-07-15 17:53:42.879 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61791 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:54:42.858 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:54:42.859 [info] [command][846eddb9-6dc2-4c67-931f-ee2e4092d2ad] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""846eddb9-6dc2-4c67-931f-ee2e4092d2ad""}\n2025-07-15 17:54:42.860 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][8f965116-3657-4cf0-b06a-0330bafa9521] received connection request\n2025-07-15 17:54:42.861 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:54:42.885 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8f965116-3657-4cf0-b06a-0330bafa9521] socks forwarding established\n2025-07-15 17:54:42.911 [info] [command][846eddb9-6dc2-4c67-931f-ee2e4092d2ad] Process exited with code 0\n2025-07-15 17:54:42.912 [info] [command][846eddb9-6dc2-4c67-931f-ee2e4092d2ad] Socket close event received\n2025-07-15 17:54:42.912 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8f965116-3657-4cf0-b06a-0330bafa9521] socks connection closed\n2025-07-15 17:54:42.935 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61814 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:55:42.917 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:55:42.919 [info] [command][7672d4dd-e8bd-4431-a7f4-46e064bd2bad] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""7672d4dd-e8bd-4431-a7f4-46e064bd2bad""}\n2025-07-15 17:55:42.920 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][fae23f91-6d40-453e-a76f-52bd916f85dd] received connection request\n2025-07-15 17:55:42.920 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:55:43.012 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][fae23f91-6d40-453e-a76f-52bd916f85dd] socks forwarding established\n2025-07-15 17:55:43.167 [info] [command][7672d4dd-e8bd-4431-a7f4-46e064bd2bad] Process exited with code 0\n2025-07-15 17:55:43.168 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][fae23f91-6d40-453e-a76f-52bd916f85dd] socks connection closed\n2025-07-15 17:55:43.168 [info] [command][7672d4dd-e8bd-4431-a7f4-46e064bd2bad] Socket close event received\n2025-07-15 17:55:43.200 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61851 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:56:43.172 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:56:43.174 [info] [command][fe266c1f-88a7-48ed-b9fd-72a4a448e300] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""fe266c1f-88a7-48ed-b9fd-72a4a448e300""}\n2025-07-15 17:56:43.175 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][1024637c-2210-4d1e-97b5-515164439aa7] received connection request\n2025-07-15 17:56:43.175 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:56:43.200 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1024637c-2210-4d1e-97b5-515164439aa7] socks forwarding established\n2025-07-15 17:56:43.227 [info] [command][fe266c1f-88a7-48ed-b9fd-72a4a448e300] Process exited with code 0\n2025-07-15 17:56:43.227 [info] [command][fe266c1f-88a7-48ed-b9fd-72a4a448e300] Socket close event received\n2025-07-15 17:56:43.228 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1024637c-2210-4d1e-97b5-515164439aa7] socks connection closed\n2025-07-15 17:56:43.253 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61898 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:57:43.230 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:57:43.233 [info] [command][8dd51766-7d90-4731-9f87-f371c52c65e6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""8dd51766-7d90-4731-9f87-f371c52c65e6""}\n2025-07-15 17:57:43.234 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][67e68079-cba0-42dc-b6da-b7be4629d212] received connection request\n2025-07-15 17:57:43.234 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:57:43.259 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][67e68079-cba0-42dc-b6da-b7be4629d212] socks forwarding established\n2025-07-15 17:57:43.285 [info] [command][8dd51766-7d90-4731-9f87-f371c52c65e6] Process exited with code 0\n2025-07-15 17:57:43.286 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][67e68079-cba0-42dc-b6da-b7be4629d212] socks connection closed\n2025-07-15 17:57:43.286 [info] [command][8dd51766-7d90-4731-9f87-f371c52c65e6] Socket close event received\n2025-07-15 17:57:43.315 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61922 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:58:43.293 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:58:43.295 [info] [command][5db0da48-3a27-41c6-8f20-95e69814eae1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""5db0da48-3a27-41c6-8f20-95e69814eae1""}\n2025-07-15 17:58:43.295 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][9346d8c0-314f-4ba5-a0a7-383e737ac6ed] received connection request\n2025-07-15 17:58:43.296 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:58:43.321 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][9346d8c0-314f-4ba5-a0a7-383e737ac6ed] socks forwarding established\n2025-07-15 17:58:43.350 [info] [command][5db0da48-3a27-41c6-8f20-95e69814eae1] Process exited with code 0\n2025-07-15 17:58:43.350 [info] [command][5db0da48-3a27-41c6-8f20-95e69814eae1] Socket close event received\n2025-07-15 17:58:43.351 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][9346d8c0-314f-4ba5-a0a7-383e737ac6ed] socks connection closed\n2025-07-15 17:58:43.378 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61958 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 17:59:43.352 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 17:59:43.353 [info] [command][27104cc1-9a42-4295-b082-806e5c78a9c3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""27104cc1-9a42-4295-b082-806e5c78a9c3""}\n2025-07-15 17:59:43.354 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][25611fdf-b168-45ff-a7c5-e7ffd0aedf98] received connection request\n2025-07-15 17:59:43.355 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 17:59:43.498 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][25611fdf-b168-45ff-a7c5-e7ffd0aedf98] socks forwarding established\n2025-07-15 17:59:43.529 [info] [command][27104cc1-9a42-4295-b082-806e5c78a9c3] Process exited with code 0\n2025-07-15 17:59:43.529 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][25611fdf-b168-45ff-a7c5-e7ffd0aedf98] socks connection closed\n2025-07-15 17:59:43.529 [info] [command][27104cc1-9a42-4295-b082-806e5c78a9c3] Socket close event received\n2025-07-15 17:59:43.630 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 61980 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:00:43.535 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:00:43.537 [info] [command][44fee38f-fe6a-46f4-8793-02baf37bbb89] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""44fee38f-fe6a-46f4-8793-02baf37bbb89""}\n2025-07-15 18:00:43.538 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][44a56c8b-5d81-4621-97aa-1d23c41ffece] received connection request\n2025-07-15 18:00:43.539 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:00:43.563 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][44a56c8b-5d81-4621-97aa-1d23c41ffece] socks forwarding established\n2025-07-15 18:00:43.591 [info] [command][44fee38f-fe6a-46f4-8793-02baf37bbb89] Process exited with code 0\n2025-07-15 18:00:43.591 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][44a56c8b-5d81-4621-97aa-1d23c41ffece] socks connection closed\n2025-07-15 18:00:43.591 [info] [command][44fee38f-fe6a-46f4-8793-02baf37bbb89] Socket close event received\n2025-07-15 18:00:43.614 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62017 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:01:43.594 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:01:43.596 [info] [command][c97dd73c-5374-4bfb-90d3-7cbddd2c0467] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""c97dd73c-5374-4bfb-90d3-7cbddd2c0467""}\n2025-07-15 18:01:43.597 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][a4c251e5-a13f-477f-9c8c-d135a3a842db] received connection request\n2025-07-15 18:01:43.598 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:01:43.655 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][a4c251e5-a13f-477f-9c8c-d135a3a842db] socks forwarding established\n2025-07-15 18:01:43.683 [info] [command][c97dd73c-5374-4bfb-90d3-7cbddd2c0467] Process exited with code 0\n2025-07-15 18:01:43.683 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][a4c251e5-a13f-477f-9c8c-d135a3a842db] socks connection closed\n2025-07-15 18:01:43.683 [info] [command][c97dd73c-5374-4bfb-90d3-7cbddd2c0467] Socket close event received\n2025-07-15 18:01:43.707 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62063 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:02:43.687 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:02:43.688 [info] [command][e607832a-2541-4ca6-b997-d7281413629f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""e607832a-2541-4ca6-b997-d7281413629f""}\n2025-07-15 18:02:43.688 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][137bf1d8-3343-4bd8-89fb-c4dc81d278f5] received connection request\n2025-07-15 18:02:43.689 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:02:43.713 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][137bf1d8-3343-4bd8-89fb-c4dc81d278f5] socks forwarding established\n2025-07-15 18:02:43.741 [info] [command][e607832a-2541-4ca6-b997-d7281413629f] Process exited with code 0\n2025-07-15 18:02:43.741 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][137bf1d8-3343-4bd8-89fb-c4dc81d278f5] socks connection closed\n2025-07-15 18:02:43.741 [info] [command][e607832a-2541-4ca6-b997-d7281413629f] Socket close event received\n2025-07-15 18:02:43.766 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62089 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:03:43.742 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:03:43.744 [info] [command][e20fe0e2-8031-4a28-bfdb-c20c144c9bfb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""e20fe0e2-8031-4a28-bfdb-c20c144c9bfb""}\n2025-07-15 18:03:43.745 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][70a892c3-78b9-4445-9871-27bbc258aaec] received connection request\n2025-07-15 18:03:43.745 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:03:43.771 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][70a892c3-78b9-4445-9871-27bbc258aaec] socks forwarding established\n2025-07-15 18:03:43.801 [info] [command][e20fe0e2-8031-4a28-bfdb-c20c144c9bfb] Process exited with code 0\n2025-07-15 18:03:43.801 [info] [command][e20fe0e2-8031-4a28-bfdb-c20c144c9bfb] Socket close event received\n2025-07-15 18:03:43.802 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][70a892c3-78b9-4445-9871-27bbc258aaec] socks connection closed\n2025-07-15 18:03:43.827 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62129 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:04:43.806 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:04:43.808 [info] [command][e94e7c97-fb8d-4ab3-8bb0-7d0e80eb6106] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""e94e7c97-fb8d-4ab3-8bb0-7d0e80eb6106""}\n2025-07-15 18:04:43.809 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][921887cf-a97c-4d76-b93d-31f756f56f5c] received connection request\n2025-07-15 18:04:43.810 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:04:43.834 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][921887cf-a97c-4d76-b93d-31f756f56f5c] socks forwarding established\n2025-07-15 18:04:43.862 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][921887cf-a97c-4d76-b93d-31f756f56f5c] socks connection closed\n2025-07-15 18:04:43.863 [info] [command][e94e7c97-fb8d-4ab3-8bb0-7d0e80eb6106] Process exited with code 0\n2025-07-15 18:04:43.863 [info] [command][e94e7c97-fb8d-4ab3-8bb0-7d0e80eb6106] Socket close event received\n2025-07-15 18:04:43.887 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62161 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:05:43.863 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:05:43.865 [info] [command][0ccacced-8514-4d8c-9585-b171bc358e2a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""0ccacced-8514-4d8c-9585-b171bc358e2a""}\n2025-07-15 18:05:43.866 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][59f2df1a-71b1-48af-bc1f-ae48d3950665] received connection request\n2025-07-15 18:05:43.866 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:05:43.895 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][59f2df1a-71b1-48af-bc1f-ae48d3950665] socks forwarding established\n2025-07-15 18:05:43.932 [info] [command][0ccacced-8514-4d8c-9585-b171bc358e2a] Process exited with code 0\n2025-07-15 18:05:43.932 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][59f2df1a-71b1-48af-bc1f-ae48d3950665] socks connection closed\n2025-07-15 18:05:43.932 [info] [command][0ccacced-8514-4d8c-9585-b171bc358e2a] Socket close event received\n2025-07-15 18:05:43.959 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62225 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:06:43.933 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:06:43.936 [info] [command][5d124c0c-361c-4441-8069-f09aac7e2927] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""5d124c0c-361c-4441-8069-f09aac7e2927""}\n2025-07-15 18:06:43.936 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][26f70178-262d-417b-9cf5-97e96170eeb5] received connection request\n2025-07-15 18:06:43.937 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:06:44.039 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][26f70178-262d-417b-9cf5-97e96170eeb5] socks forwarding established\n2025-07-15 18:06:44.070 [info] [command][5d124c0c-361c-4441-8069-f09aac7e2927] Process exited with code 0\n2025-07-15 18:06:44.070 [info] [command][5d124c0c-361c-4441-8069-f09aac7e2927] Socket close event received\n2025-07-15 18:06:44.189 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62289 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:06:44.189 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][26f70178-262d-417b-9cf5-97e96170eeb5] socks connection closed\n2025-07-15 18:07:44.071 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:07:44.072 [info] [command][24758bd5-377c-4a28-bddc-3f143f56ebe8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""24758bd5-377c-4a28-bddc-3f143f56ebe8""}\n2025-07-15 18:07:44.073 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][83ad0dc3-01c9-46ad-b2ce-0bdf205cc8d2] received connection request\n2025-07-15 18:07:44.073 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:07:44.097 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][83ad0dc3-01c9-46ad-b2ce-0bdf205cc8d2] socks forwarding established\n2025-07-15 18:07:44.126 [info] [command][24758bd5-377c-4a28-bddc-3f143f56ebe8] Process exited with code 0\n2025-07-15 18:07:44.126 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][83ad0dc3-01c9-46ad-b2ce-0bdf205cc8d2] socks connection closed\n2025-07-15 18:07:44.126 [info] [command][24758bd5-377c-4a28-bddc-3f143f56ebe8] Socket close event received\n2025-07-15 18:07:44.149 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62362 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:08:44.131 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:08:44.133 [info] [command][2c9e8c4a-2b16-4c96-91ba-a7467a86d987] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""2c9e8c4a-2b16-4c96-91ba-a7467a86d987""}\n2025-07-15 18:08:44.133 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][babead46-66a2-4ac8-9061-8b8086f480d3] received connection request\n2025-07-15 18:08:44.134 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:08:44.277 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][babead46-66a2-4ac8-9061-8b8086f480d3] socks forwarding established\n2025-07-15 18:08:44.437 [info] [command][2c9e8c4a-2b16-4c96-91ba-a7467a86d987] Process exited with code 0\n2025-07-15 18:08:44.438 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][babead46-66a2-4ac8-9061-8b8086f480d3] socks connection closed\n2025-07-15 18:08:44.438 [info] [command][2c9e8c4a-2b16-4c96-91ba-a7467a86d987] Socket close event received\n2025-07-15 18:08:44.467 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62403 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:09:44.438 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:09:44.439 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][d9caec69-7778-4d93-bb26-3e95cce8e806] received connection request\n2025-07-15 18:09:44.440 [info] [command][8e5d5949-d3e9-43b3-9e95-06c2494690a0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""8e5d5949-d3e9-43b3-9e95-06c2494690a0""}\n2025-07-15 18:09:44.440 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:09:44.496 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d9caec69-7778-4d93-bb26-3e95cce8e806] socks forwarding established\n2025-07-15 18:09:44.654 [info] [command][8e5d5949-d3e9-43b3-9e95-06c2494690a0] Process exited with code 0\n2025-07-15 18:09:44.655 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d9caec69-7778-4d93-bb26-3e95cce8e806] socks connection closed\n2025-07-15 18:09:44.655 [info] [command][8e5d5949-d3e9-43b3-9e95-06c2494690a0] Socket close event received\n2025-07-15 18:09:44.681 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62428 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:10:44.660 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:10:44.661 [info] [command][baa18f5c-8261-41a2-a1a9-557d15c3729b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""baa18f5c-8261-41a2-a1a9-557d15c3729b""}\n2025-07-15 18:10:44.662 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][6a084b9c-ad84-4d29-927b-fdaf86fefb94] received connection request\n2025-07-15 18:10:44.662 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:10:44.729 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][6a084b9c-ad84-4d29-927b-fdaf86fefb94] socks forwarding established\n2025-07-15 18:10:44.889 [info] [command][baa18f5c-8261-41a2-a1a9-557d15c3729b] Process exited with code 0\n2025-07-15 18:10:44.890 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][6a084b9c-ad84-4d29-927b-fdaf86fefb94] socks connection closed\n2025-07-15 18:10:44.890 [info] [command][baa18f5c-8261-41a2-a1a9-557d15c3729b] Socket close event received\n2025-07-15 18:10:44.918 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62500 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:11:44.892 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:11:44.894 [info] [command][19dee159-b05f-4d9b-88a3-82330b30369f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""19dee159-b05f-4d9b-88a3-82330b30369f""}\n2025-07-15 18:11:44.894 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][fd4452f0-64bc-43c3-aed7-0a1889c65c3c] received connection request\n2025-07-15 18:11:44.894 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 18:11:44.894 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:11:44.918 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][fd4452f0-64bc-43c3-aed7-0a1889c65c3c] socks forwarding established\n2025-07-15 18:11:45.060 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][fd4452f0-64bc-43c3-aed7-0a1889c65c3c] socks connection closed\n2025-07-15 18:11:45.060 [info] [command][19dee159-b05f-4d9b-88a3-82330b30369f] Process exited with code 0\n2025-07-15 18:11:45.060 [info] [command][19dee159-b05f-4d9b-88a3-82330b30369f] Socket close event received\n2025-07-15 18:11:45.089 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62541 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:12:45.065 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:12:45.066 [info] [command][acca514d-395b-4d6b-b092-1228a44e026b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""acca514d-395b-4d6b-b092-1228a44e026b""}\n2025-07-15 18:12:45.066 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][2bff37c6-8bb5-4a01-90e7-0f803df31e37] received connection request\n2025-07-15 18:12:45.066 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:12:45.167 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][2bff37c6-8bb5-4a01-90e7-0f803df31e37] socks forwarding established\n2025-07-15 18:12:45.356 [info] [command][acca514d-395b-4d6b-b092-1228a44e026b] Process exited with code 0\n2025-07-15 18:12:45.357 [info] [command][acca514d-395b-4d6b-b092-1228a44e026b] Socket close event received\n2025-07-15 18:12:45.362 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][2bff37c6-8bb5-4a01-90e7-0f803df31e37] socks connection closed\n2025-07-15 18:12:45.470 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62579 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:13:45.358 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:13:45.359 [info] [command][ea0cfcf4-3bf0-4f2d-a561-53e36f4dc6f3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ea0cfcf4-3bf0-4f2d-a561-53e36f4dc6f3""}\n2025-07-15 18:13:45.360 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][828fb8ed-5bc3-462d-af0e-d688b688d38b] received connection request\n2025-07-15 18:13:45.361 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:13:45.423 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][828fb8ed-5bc3-462d-af0e-d688b688d38b] socks forwarding established\n2025-07-15 18:13:45.461 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][828fb8ed-5bc3-462d-af0e-d688b688d38b] socks connection closed\n2025-07-15 18:13:45.461 [info] [command][ea0cfcf4-3bf0-4f2d-a561-53e36f4dc6f3] Process exited with code 0\n2025-07-15 18:13:45.461 [info] [command][ea0cfcf4-3bf0-4f2d-a561-53e36f4dc6f3] Socket close event received\n2025-07-15 18:13:45.567 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62615 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:14:45.466 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:14:45.468 [info] [command][3c9108ee-abfe-4052-8a35-91b3ff6e2971] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""3c9108ee-abfe-4052-8a35-91b3ff6e2971""}\n2025-07-15 18:14:45.468 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][991a001c-059b-45b5-9b23-c03c1ff83180] received connection request\n2025-07-15 18:14:45.469 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:14:45.493 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][991a001c-059b-45b5-9b23-c03c1ff83180] socks forwarding established\n2025-07-15 18:14:45.519 [info] [command][3c9108ee-abfe-4052-8a35-91b3ff6e2971] Process exited with code 0\n2025-07-15 18:14:45.519 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][991a001c-059b-45b5-9b23-c03c1ff83180] socks connection closed\n2025-07-15 18:14:45.519 [info] [command][3c9108ee-abfe-4052-8a35-91b3ff6e2971] Socket close event received\n2025-07-15 18:14:45.542 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62640 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:15:45.524 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:15:45.526 [info] [command][559301e5-05f9-4643-977f-7d5c3c7df3ab] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""559301e5-05f9-4643-977f-7d5c3c7df3ab""}\n2025-07-15 18:15:45.527 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][e5788a30-0715-48ae-aa45-f642771e64b4] received connection request\n2025-07-15 18:15:45.528 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:15:45.588 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][e5788a30-0715-48ae-aa45-f642771e64b4] socks forwarding established\n2025-07-15 18:15:45.666 [info] [command][559301e5-05f9-4643-977f-7d5c3c7df3ab] Process exited with code 0\n2025-07-15 18:15:45.666 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][e5788a30-0715-48ae-aa45-f642771e64b4] socks connection closed\n2025-07-15 18:15:45.666 [info] [command][559301e5-05f9-4643-977f-7d5c3c7df3ab] Socket close event received\n2025-07-15 18:15:45.740 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62682 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:16:45.668 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:16:45.669 [info] [command][5c8ecd6b-e692-48a9-bb17-9df14d731080] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""5c8ecd6b-e692-48a9-bb17-9df14d731080""}\n2025-07-15 18:16:45.670 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][cd114172-2318-42a7-a229-cf82dcf70130] received connection request\n2025-07-15 18:16:45.670 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:16:45.780 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][cd114172-2318-42a7-a229-cf82dcf70130] socks forwarding established\n2025-07-15 18:16:45.811 [info] [command][5c8ecd6b-e692-48a9-bb17-9df14d731080] Process exited with code 0\n2025-07-15 18:16:45.812 [info] [command][5c8ecd6b-e692-48a9-bb17-9df14d731080] Socket close event received\n2025-07-15 18:16:45.825 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][cd114172-2318-42a7-a229-cf82dcf70130] socks connection closed\n2025-07-15 18:16:45.841 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62732 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:17:45.813 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:17:45.816 [info] [command][32a903fe-f635-4153-932f-cbb86d8dfa83] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""32a903fe-f635-4153-932f-cbb86d8dfa83""}\n2025-07-15 18:17:45.816 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][89d7ff20-c34f-466b-a615-f78a1298d833] received connection request\n2025-07-15 18:17:45.817 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:17:45.846 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][89d7ff20-c34f-466b-a615-f78a1298d833] socks forwarding established\n2025-07-15 18:17:45.873 [info] [command][32a903fe-f635-4153-932f-cbb86d8dfa83] Process exited with code 0\n2025-07-15 18:17:45.874 [info] [command][32a903fe-f635-4153-932f-cbb86d8dfa83] Socket close event received\n2025-07-15 18:17:45.874 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][89d7ff20-c34f-466b-a615-f78a1298d833] socks connection closed\n2025-07-15 18:17:45.898 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62772 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:18:45.878 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:18:45.879 [info] [command][ea1944a1-93a4-4168-aaba-7b88eb9eb3dc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ea1944a1-93a4-4168-aaba-7b88eb9eb3dc""}\n2025-07-15 18:18:45.880 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][835fe101-71c2-4867-a9be-81c93a06844b] received connection request\n2025-07-15 18:18:45.881 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:18:45.905 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][835fe101-71c2-4867-a9be-81c93a06844b] socks forwarding established\n2025-07-15 18:18:45.931 [info] [command][ea1944a1-93a4-4168-aaba-7b88eb9eb3dc] Process exited with code 0\n2025-07-15 18:18:45.931 [info] [command][ea1944a1-93a4-4168-aaba-7b88eb9eb3dc] Socket close event received\n2025-07-15 18:18:45.932 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][835fe101-71c2-4867-a9be-81c93a06844b] socks connection closed\n2025-07-15 18:18:45.964 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62807 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:19:45.933 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:19:45.935 [info] [command][1f2f4cb2-7cae-42a6-bde2-dcce949bc7f4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""1f2f4cb2-7cae-42a6-bde2-dcce949bc7f4""}\n2025-07-15 18:19:45.935 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][e384fee1-2d1c-4a57-9270-75649d1df6dc] received connection request\n2025-07-15 18:19:45.936 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:19:46.040 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][e384fee1-2d1c-4a57-9270-75649d1df6dc] socks forwarding established\n2025-07-15 18:19:46.069 [info] [command][1f2f4cb2-7cae-42a6-bde2-dcce949bc7f4] Process exited with code 0\n2025-07-15 18:19:46.069 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][e384fee1-2d1c-4a57-9270-75649d1df6dc] socks connection closed\n2025-07-15 18:19:46.070 [info] [command][1f2f4cb2-7cae-42a6-bde2-dcce949bc7f4] Socket close event received\n2025-07-15 18:19:46.094 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62839 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:20:46.071 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:20:46.074 [info] [command][8f729e17-92cd-4ff2-854c-190bc5f4c7dc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""8f729e17-92cd-4ff2-854c-190bc5f4c7dc""}\n2025-07-15 18:20:46.074 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][2e1616ba-c99e-4128-9e90-8eda85b6144b] received connection request\n2025-07-15 18:20:46.074 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:20:46.100 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][2e1616ba-c99e-4128-9e90-8eda85b6144b] socks forwarding established\n2025-07-15 18:20:46.126 [info] [command][8f729e17-92cd-4ff2-854c-190bc5f4c7dc] Process exited with code 0\n2025-07-15 18:20:46.127 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][2e1616ba-c99e-4128-9e90-8eda85b6144b] socks connection closed\n2025-07-15 18:20:46.127 [info] [command][8f729e17-92cd-4ff2-854c-190bc5f4c7dc] Socket close event received\n2025-07-15 18:20:46.150 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62889 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:21:46.130 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:21:46.132 [info] [command][f685f2cc-437e-4027-a771-27c5db4a6b8e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""f685f2cc-437e-4027-a771-27c5db4a6b8e""}\n2025-07-15 18:21:46.132 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][80884f6b-eb02-43bf-a5e4-4f4a261713fb] received connection request\n2025-07-15 18:21:46.134 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:21:46.160 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][80884f6b-eb02-43bf-a5e4-4f4a261713fb] socks forwarding established\n2025-07-15 18:21:46.193 [info] [command][f685f2cc-437e-4027-a771-27c5db4a6b8e] Process exited with code 0\n2025-07-15 18:21:46.194 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][80884f6b-eb02-43bf-a5e4-4f4a261713fb] socks connection closed\n2025-07-15 18:21:46.194 [info] [command][f685f2cc-437e-4027-a771-27c5db4a6b8e] Socket close event received\n2025-07-15 18:21:46.219 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62942 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:22:46.198 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:22:46.199 [info] [command][52cb05eb-564c-4106-ae80-e5ac975f3f71] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""52cb05eb-564c-4106-ae80-e5ac975f3f71""}\n2025-07-15 18:22:46.200 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][3ad33e23-2885-4ad3-933e-7f9a18322536] received connection request\n2025-07-15 18:22:46.200 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 18:22:46.200 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:22:46.223 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][3ad33e23-2885-4ad3-933e-7f9a18322536] socks forwarding established\n2025-07-15 18:22:46.250 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][3ad33e23-2885-4ad3-933e-7f9a18322536] socks connection closed\n2025-07-15 18:22:46.251 [info] [command][52cb05eb-564c-4106-ae80-e5ac975f3f71] Process exited with code 0\n2025-07-15 18:22:46.251 [info] [command][52cb05eb-564c-4106-ae80-e5ac975f3f71] Socket close event received\n2025-07-15 18:22:46.273 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 62966 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:23:46.254 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:23:46.256 [info] [command][dfcd48aa-6874-436c-92ba-7d6983b0f89a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""dfcd48aa-6874-436c-92ba-7d6983b0f89a""}\n2025-07-15 18:23:46.256 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][888e9dc5-f9be-4453-ad51-f65ca546c18f] received connection request\n2025-07-15 18:23:46.256 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:23:46.356 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][888e9dc5-f9be-4453-ad51-f65ca546c18f] socks forwarding established\n2025-07-15 18:23:46.384 [info] [command][dfcd48aa-6874-436c-92ba-7d6983b0f89a] Process exited with code 0\n2025-07-15 18:23:46.385 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][888e9dc5-f9be-4453-ad51-f65ca546c18f] socks connection closed\n2025-07-15 18:23:46.385 [info] [command][dfcd48aa-6874-436c-92ba-7d6983b0f89a] Socket close event received\n2025-07-15 18:23:46.454 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63013 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:24:46.390 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:24:46.392 [info] [command][ca150766-b654-40d6-91af-99f88f53b64f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ca150766-b654-40d6-91af-99f88f53b64f""}\n2025-07-15 18:24:46.393 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][273a97b3-97c7-4a3f-b69f-908c52cfd675] received connection request\n2025-07-15 18:24:46.394 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:24:46.420 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][273a97b3-97c7-4a3f-b69f-908c52cfd675] socks forwarding established\n2025-07-15 18:24:46.457 [info] [command][ca150766-b654-40d6-91af-99f88f53b64f] Process exited with code 0\n2025-07-15 18:24:46.457 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][273a97b3-97c7-4a3f-b69f-908c52cfd675] socks connection closed\n2025-07-15 18:24:46.458 [info] [command][ca150766-b654-40d6-91af-99f88f53b64f] Socket close event received\n2025-07-15 18:24:46.484 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63037 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:25:46.463 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:25:46.465 [info] [command][06e8a926-bb0f-4b65-992d-03e0bc5ae884] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""06e8a926-bb0f-4b65-992d-03e0bc5ae884""}\n2025-07-15 18:25:46.467 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][8e51dd92-e765-438a-8847-9a0e8fa63e97] received connection request\n2025-07-15 18:25:46.467 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:25:46.492 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8e51dd92-e765-438a-8847-9a0e8fa63e97] socks forwarding established\n2025-07-15 18:25:46.521 [info] [command][06e8a926-bb0f-4b65-992d-03e0bc5ae884] Process exited with code 0\n2025-07-15 18:25:46.522 [info] [command][06e8a926-bb0f-4b65-992d-03e0bc5ae884] Socket close event received\n2025-07-15 18:25:46.522 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8e51dd92-e765-438a-8847-9a0e8fa63e97] socks connection closed\n2025-07-15 18:25:46.552 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63110 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:26:46.527 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:26:46.530 [info] [command][2f3512f3-48d4-4e7e-ad6c-6cfcac6ba983] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""2f3512f3-48d4-4e7e-ad6c-6cfcac6ba983""}\n2025-07-15 18:26:46.531 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][79520f0d-58c5-4aac-8d4d-97bcaad60d2b] received connection request\n2025-07-15 18:26:46.531 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:26:46.737 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][79520f0d-58c5-4aac-8d4d-97bcaad60d2b] socks forwarding established\n2025-07-15 18:26:46.764 [info] [command][2f3512f3-48d4-4e7e-ad6c-6cfcac6ba983] Process exited with code 0\n2025-07-15 18:26:46.764 [info] [command][2f3512f3-48d4-4e7e-ad6c-6cfcac6ba983] Socket close event received\n2025-07-15 18:26:46.864 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63157 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:26:46.865 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][79520f0d-58c5-4aac-8d4d-97bcaad60d2b] socks connection closed\n2025-07-15 18:27:46.769 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:27:46.771 [info] [command][3f7e8f37-1cb4-4c5e-b6df-0dfb707e0664] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""3f7e8f37-1cb4-4c5e-b6df-0dfb707e0664""}\n2025-07-15 18:27:46.772 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][d2e93b18-4225-4834-9874-36d229ccca52] received connection request\n2025-07-15 18:27:46.772 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:27:46.797 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d2e93b18-4225-4834-9874-36d229ccca52] socks forwarding established\n2025-07-15 18:27:46.825 [info] [command][3f7e8f37-1cb4-4c5e-b6df-0dfb707e0664] Process exited with code 0\n2025-07-15 18:27:46.826 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d2e93b18-4225-4834-9874-36d229ccca52] socks connection closed\n2025-07-15 18:27:46.826 [info] [command][3f7e8f37-1cb4-4c5e-b6df-0dfb707e0664] Socket close event received\n2025-07-15 18:27:46.849 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63190 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:28:46.830 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:28:46.833 [info] [command][ce64e8e3-5095-47e4-bf70-9935112ddc2e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ce64e8e3-5095-47e4-bf70-9935112ddc2e""}\n2025-07-15 18:28:46.834 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][0fdf080f-f293-49fc-aae9-cff5571e54e2] received connection request\n2025-07-15 18:28:46.834 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:28:46.860 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][0fdf080f-f293-49fc-aae9-cff5571e54e2] socks forwarding established\n2025-07-15 18:28:46.893 [info] [command][ce64e8e3-5095-47e4-bf70-9935112ddc2e] Process exited with code 0\n2025-07-15 18:28:46.894 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][0fdf080f-f293-49fc-aae9-cff5571e54e2] socks connection closed\n2025-07-15 18:28:46.894 [info] [command][ce64e8e3-5095-47e4-bf70-9935112ddc2e] Socket close event received\n2025-07-15 18:28:46.918 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63235 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:29:46.896 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:29:46.897 [info] [command][d24cc33e-01d6-49aa-b4ba-63a6b5d4f762] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""d24cc33e-01d6-49aa-b4ba-63a6b5d4f762""}\n2025-07-15 18:29:46.897 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][02e2ecc6-96a8-4877-bb66-332b6b5ff067] received connection request\n2025-07-15 18:29:46.897 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 18:29:46.897 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:29:47.021 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][02e2ecc6-96a8-4877-bb66-332b6b5ff067] socks forwarding established\n2025-07-15 18:29:47.078 [info] [command][d24cc33e-01d6-49aa-b4ba-63a6b5d4f762] Process exited with code 0\n2025-07-15 18:29:47.079 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][02e2ecc6-96a8-4877-bb66-332b6b5ff067] socks connection closed\n2025-07-15 18:29:47.079 [info] [command][d24cc33e-01d6-49aa-b4ba-63a6b5d4f762] Socket close event received\n2025-07-15 18:29:47.234 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63264 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:30:47.081 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:30:47.083 [info] [command][1e00b76c-fc59-4b68-842b-3148188bf191] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""1e00b76c-fc59-4b68-842b-3148188bf191""}\n2025-07-15 18:30:47.083 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ef6eb10a-19fd-490d-8bd9-1b3ae77e0d47] received connection request\n2025-07-15 18:30:47.083 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 18:30:47.084 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:30:47.126 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ef6eb10a-19fd-490d-8bd9-1b3ae77e0d47] socks forwarding established\n2025-07-15 18:30:47.257 [info] [command][1e00b76c-fc59-4b68-842b-3148188bf191] Process exited with code 0\n2025-07-15 18:30:47.258 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ef6eb10a-19fd-490d-8bd9-1b3ae77e0d47] socks connection closed\n2025-07-15 18:30:47.258 [info] [command][1e00b76c-fc59-4b68-842b-3148188bf191] Socket close event received\n2025-07-15 18:30:47.291 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63302 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:31:47.263 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:31:47.265 [info] [command][e2853dbb-5dd3-47e6-b18e-32f4ed97e416] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""e2853dbb-5dd3-47e6-b18e-32f4ed97e416""}\n2025-07-15 18:31:47.266 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ecd80bd8-e25d-4733-9717-98b346caa592] received connection request\n2025-07-15 18:31:47.267 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:31:47.347 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ecd80bd8-e25d-4733-9717-98b346caa592] socks forwarding established\n2025-07-15 18:31:47.393 [info] [command][e2853dbb-5dd3-47e6-b18e-32f4ed97e416] Process exited with code 0\n2025-07-15 18:31:47.393 [info] [command][e2853dbb-5dd3-47e6-b18e-32f4ed97e416] Socket close event received\n2025-07-15 18:31:47.424 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ecd80bd8-e25d-4733-9717-98b346caa592] socks connection closed\n2025-07-15 18:31:47.429 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63360 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:32:47.398 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:32:47.400 [info] [command][d7404e05-ada1-4a10-980d-e2c8f64c13ff] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""d7404e05-ada1-4a10-980d-e2c8f64c13ff""}\n2025-07-15 18:32:47.401 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][f380594b-c6a7-437f-bc13-eab5392a4842] received connection request\n2025-07-15 18:32:47.401 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:32:47.445 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f380594b-c6a7-437f-bc13-eab5392a4842] socks forwarding established\n2025-07-15 18:32:47.492 [info] [command][d7404e05-ada1-4a10-980d-e2c8f64c13ff] Process exited with code 0\n2025-07-15 18:32:47.492 [info] [command][d7404e05-ada1-4a10-980d-e2c8f64c13ff] Socket close event received\n2025-07-15 18:32:47.498 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f380594b-c6a7-437f-bc13-eab5392a4842] socks connection closed\n2025-07-15 18:32:47.641 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63397 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:33:47.497 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:33:47.499 [info] [command][b5948153-e19f-461e-935f-3f4149fb2fca] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""b5948153-e19f-461e-935f-3f4149fb2fca""}\n2025-07-15 18:33:47.499 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ec6ddc14-8e5e-4ad7-8c7c-1c97c7177ced] received connection request\n2025-07-15 18:33:47.500 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:33:47.537 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ec6ddc14-8e5e-4ad7-8c7c-1c97c7177ced] socks forwarding established\n2025-07-15 18:33:47.568 [info] [command][b5948153-e19f-461e-935f-3f4149fb2fca] Process exited with code 0\n2025-07-15 18:33:47.568 [info] [command][b5948153-e19f-461e-935f-3f4149fb2fca] Socket close event received\n2025-07-15 18:33:47.575 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ec6ddc14-8e5e-4ad7-8c7c-1c97c7177ced] socks connection closed\n2025-07-15 18:33:47.605 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63435 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:34:47.572 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:34:47.574 [info] [command][508ff2b3-9714-47e1-8023-abd3aa6be0c7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""508ff2b3-9714-47e1-8023-abd3aa6be0c7""}\n2025-07-15 18:34:47.575 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][e0c15ea4-8734-4897-81bd-10fc32de5724] received connection request\n2025-07-15 18:34:47.576 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:34:47.622 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][e0c15ea4-8734-4897-81bd-10fc32de5724] socks forwarding established\n2025-07-15 18:34:47.655 [info] [command][508ff2b3-9714-47e1-8023-abd3aa6be0c7] Process exited with code 0\n2025-07-15 18:34:47.655 [info] [command][508ff2b3-9714-47e1-8023-abd3aa6be0c7] Socket close event received\n2025-07-15 18:34:47.685 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][e0c15ea4-8734-4897-81bd-10fc32de5724] socks connection closed\n2025-07-15 18:34:47.690 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63472 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:35:47.660 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:35:47.662 [info] [command][2a5c14a8-c15a-4852-9a83-82769b547707] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""2a5c14a8-c15a-4852-9a83-82769b547707""}\n2025-07-15 18:35:47.663 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][acf92aaa-80b6-4785-b19f-ca82a4bd6676] received connection request\n2025-07-15 18:35:47.664 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:35:47.703 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][acf92aaa-80b6-4785-b19f-ca82a4bd6676] socks forwarding established\n2025-07-15 18:35:47.741 [info] [command][2a5c14a8-c15a-4852-9a83-82769b547707] Process exited with code 0\n2025-07-15 18:35:47.741 [info] [command][2a5c14a8-c15a-4852-9a83-82769b547707] Socket close event received\n2025-07-15 18:35:47.744 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][acf92aaa-80b6-4785-b19f-ca82a4bd6676] socks connection closed\n2025-07-15 18:35:47.770 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63514 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:36:47.746 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:36:47.747 [info] [command][a8ce98ad-6018-4173-9c8e-7f7fa99ccfca] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""a8ce98ad-6018-4173-9c8e-7f7fa99ccfca""}\n2025-07-15 18:36:47.747 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][6c4063c6-8681-426e-b9f9-956406de133c] received connection request\n2025-07-15 18:36:47.747 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:36:47.777 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][6c4063c6-8681-426e-b9f9-956406de133c] socks forwarding established\n2025-07-15 18:36:47.811 [info] [command][a8ce98ad-6018-4173-9c8e-7f7fa99ccfca] Process exited with code 0\n2025-07-15 18:36:47.811 [info] [command][a8ce98ad-6018-4173-9c8e-7f7fa99ccfca] Socket close event received\n2025-07-15 18:36:47.811 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][6c4063c6-8681-426e-b9f9-956406de133c] socks connection closed\n2025-07-15 18:36:47.838 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63554 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:37:47.816 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:37:47.818 [info] [command][8057c881-3f61-4b76-945c-62b33bbe2b73] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""8057c881-3f61-4b76-945c-62b33bbe2b73""}\n2025-07-15 18:37:47.819 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][56076d5b-5b71-4706-b986-d44d5576989d] received connection request\n2025-07-15 18:37:47.819 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:37:47.935 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][56076d5b-5b71-4706-b986-d44d5576989d] socks forwarding established\n2025-07-15 18:37:48.039 [info] [command][8057c881-3f61-4b76-945c-62b33bbe2b73] Process exited with code 0\n2025-07-15 18:37:48.039 [info] [command][8057c881-3f61-4b76-945c-62b33bbe2b73] Socket close event received\n2025-07-15 18:37:48.040 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][56076d5b-5b71-4706-b986-d44d5576989d] socks connection closed\n2025-07-15 18:37:48.068 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63579 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:38:48.044 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:38:48.045 [info] [command][69ff7554-a3b2-4dc4-9354-862517b3a20f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""69ff7554-a3b2-4dc4-9354-862517b3a20f""}\n2025-07-15 18:38:48.045 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ba4f6408-7912-4023-a3e2-733e675382be] received connection request\n2025-07-15 18:38:48.045 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:38:48.095 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ba4f6408-7912-4023-a3e2-733e675382be] socks forwarding established\n2025-07-15 18:38:48.250 [info] [command][69ff7554-a3b2-4dc4-9354-862517b3a20f] Process exited with code 0\n2025-07-15 18:38:48.250 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ba4f6408-7912-4023-a3e2-733e675382be] socks connection closed\n2025-07-15 18:38:48.251 [info] [command][69ff7554-a3b2-4dc4-9354-862517b3a20f] Socket close event received\n2025-07-15 18:38:48.275 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63620 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:39:48.253 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:39:48.256 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][f8d437c4-df20-46fc-ae54-d172cda08f63] received connection request\n2025-07-15 18:39:48.256 [info] [command][66706987-7352-44c2-a20d-25a481841e8b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""66706987-7352-44c2-a20d-25a481841e8b""}\n2025-07-15 18:39:48.257 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:39:48.300 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f8d437c4-df20-46fc-ae54-d172cda08f63] socks forwarding established\n2025-07-15 18:39:48.331 [info] [command][66706987-7352-44c2-a20d-25a481841e8b] Process exited with code 0\n2025-07-15 18:39:48.332 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f8d437c4-df20-46fc-ae54-d172cda08f63] socks connection closed\n2025-07-15 18:39:48.332 [info] [command][66706987-7352-44c2-a20d-25a481841e8b] Socket close event received\n2025-07-15 18:39:48.358 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63657 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:40:48.337 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:40:48.339 [info] [command][e6c05dd5-6042-4c71-a503-052432db54a4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""e6c05dd5-6042-4c71-a503-052432db54a4""}\n2025-07-15 18:40:48.339 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][7fc0b78c-0864-4ecb-9841-9460831b46ab] received connection request\n2025-07-15 18:40:48.339 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:40:48.449 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][7fc0b78c-0864-4ecb-9841-9460831b46ab] socks forwarding established\n2025-07-15 18:40:48.607 [info] [command][e6c05dd5-6042-4c71-a503-052432db54a4] Process exited with code 0\n2025-07-15 18:40:48.607 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][7fc0b78c-0864-4ecb-9841-9460831b46ab] socks connection closed\n2025-07-15 18:40:48.607 [info] [command][e6c05dd5-6042-4c71-a503-052432db54a4] Socket close event received\n2025-07-15 18:40:48.631 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63701 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:41:48.609 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:41:48.611 [info] [command][277da42a-1df3-457c-b9af-7129c101e5ae] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""277da42a-1df3-457c-b9af-7129c101e5ae""}\n2025-07-15 18:41:48.612 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][6e926646-d707-4947-8e10-68622c322f26] received connection request\n2025-07-15 18:41:48.613 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:41:48.716 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][6e926646-d707-4947-8e10-68622c322f26] socks forwarding established\n2025-07-15 18:41:48.831 [info] [command][277da42a-1df3-457c-b9af-7129c101e5ae] Process exited with code 0\n2025-07-15 18:41:48.831 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][6e926646-d707-4947-8e10-68622c322f26] socks connection closed\n2025-07-15 18:41:48.831 [info] [command][277da42a-1df3-457c-b9af-7129c101e5ae] Socket close event received\n2025-07-15 18:41:48.854 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63753 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:42:48.833 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:42:48.836 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][932af781-bec6-4dc9-8d3f-999ab948aac6] received connection request\n2025-07-15 18:42:48.837 [info] [command][7db6bebb-1c9b-4f30-9c5d-f87218db975f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""7db6bebb-1c9b-4f30-9c5d-f87218db975f""}\n2025-07-15 18:42:48.837 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:42:48.863 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][932af781-bec6-4dc9-8d3f-999ab948aac6] socks forwarding established\n2025-07-15 18:42:48.930 [info] [command][7db6bebb-1c9b-4f30-9c5d-f87218db975f] Process exited with code 0\n2025-07-15 18:42:48.930 [info] [command][7db6bebb-1c9b-4f30-9c5d-f87218db975f] Socket close event received\n2025-07-15 18:42:48.930 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][932af781-bec6-4dc9-8d3f-999ab948aac6] socks connection closed\n2025-07-15 18:42:49.002 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63785 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:43:48.930 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:43:48.931 [info] [command][da2c35ca-2bdc-488c-b625-2a7a39b7f7df] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""da2c35ca-2bdc-488c-b625-2a7a39b7f7df""}\n2025-07-15 18:43:48.932 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][04351061-b68d-4f74-a4c1-a240bb7f31f6] received connection request\n2025-07-15 18:43:48.932 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:43:48.957 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][04351061-b68d-4f74-a4c1-a240bb7f31f6] socks forwarding established\n2025-07-15 18:43:48.987 [info] [command][da2c35ca-2bdc-488c-b625-2a7a39b7f7df] Process exited with code 0\n2025-07-15 18:43:48.987 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][04351061-b68d-4f74-a4c1-a240bb7f31f6] socks connection closed\n2025-07-15 18:43:48.987 [info] [command][da2c35ca-2bdc-488c-b625-2a7a39b7f7df] Socket close event received\n2025-07-15 18:43:49.011 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63832 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:44:48.992 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:44:48.994 [info] [command][ed257bfe-10b8-49f2-ae53-9832ce7fdfbe] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ed257bfe-10b8-49f2-ae53-9832ce7fdfbe""}\n2025-07-15 18:44:48.996 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][88892767-68f8-4230-9a29-5bb50bf9ff99] received connection request\n2025-07-15 18:44:48.996 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:44:49.078 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][88892767-68f8-4230-9a29-5bb50bf9ff99] socks forwarding established\n2025-07-15 18:44:49.164 [info] [command][ed257bfe-10b8-49f2-ae53-9832ce7fdfbe] Process exited with code 0\n2025-07-15 18:44:49.164 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][88892767-68f8-4230-9a29-5bb50bf9ff99] socks connection closed\n2025-07-15 18:44:49.164 [info] [command][ed257bfe-10b8-49f2-ae53-9832ce7fdfbe] Socket close event received\n2025-07-15 18:44:49.189 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63861 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:45:49.167 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:45:49.168 [info] [command][2da25318-257a-47d6-a335-dd3dd520c673] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""2da25318-257a-47d6-a335-dd3dd520c673""}\n2025-07-15 18:45:49.169 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][d6c62a13-794c-430d-bb3f-0a80c9121685] received connection request\n2025-07-15 18:45:49.169 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:45:49.192 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d6c62a13-794c-430d-bb3f-0a80c9121685] socks forwarding established\n2025-07-15 18:45:49.219 [info] [command][2da25318-257a-47d6-a335-dd3dd520c673] Process exited with code 0\n2025-07-15 18:45:49.219 [info] [command][2da25318-257a-47d6-a335-dd3dd520c673] Socket close event received\n2025-07-15 18:45:49.219 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][d6c62a13-794c-430d-bb3f-0a80c9121685] socks connection closed\n2025-07-15 18:45:49.244 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63912 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:46:49.222 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:46:49.224 [info] [command][4e842c8d-4a67-49a9-90da-9af81a35da48] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""4e842c8d-4a67-49a9-90da-9af81a35da48""}\n2025-07-15 18:46:49.224 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][e9131080-be11-4f53-a7ce-9ecc217b8414] received connection request\n2025-07-15 18:46:49.225 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:46:49.286 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][e9131080-be11-4f53-a7ce-9ecc217b8414] socks forwarding established\n2025-07-15 18:46:49.397 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][e9131080-be11-4f53-a7ce-9ecc217b8414] socks connection closed\n2025-07-15 18:46:49.397 [info] [command][4e842c8d-4a67-49a9-90da-9af81a35da48] Process exited with code 0\n2025-07-15 18:46:49.397 [info] [command][4e842c8d-4a67-49a9-90da-9af81a35da48] Socket close event received\n2025-07-15 18:46:49.420 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63953 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:47:49.401 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:47:49.403 [info] [command][b1cc8010-6d4c-45af-83ef-ea4919f89ae4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""b1cc8010-6d4c-45af-83ef-ea4919f89ae4""}\n2025-07-15 18:47:49.403 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][44ca205b-fc52-4b83-8c8d-642857be9431] received connection request\n2025-07-15 18:47:49.404 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:47:49.435 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][44ca205b-fc52-4b83-8c8d-642857be9431] socks forwarding established\n2025-07-15 18:47:49.463 [info] [command][b1cc8010-6d4c-45af-83ef-ea4919f89ae4] Process exited with code 0\n2025-07-15 18:47:49.463 [info] [command][b1cc8010-6d4c-45af-83ef-ea4919f89ae4] Socket close event received\n2025-07-15 18:47:49.463 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][44ca205b-fc52-4b83-8c8d-642857be9431] socks connection closed\n2025-07-15 18:47:49.487 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 63980 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:48:49.466 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:48:49.469 [info] [command][82c47244-b173-4cac-9d83-8d016ccadce9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""82c47244-b173-4cac-9d83-8d016ccadce9""}\n2025-07-15 18:48:49.470 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][4ed758db-4129-4d42-8899-f0d2df259351] received connection request\n2025-07-15 18:48:49.470 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:48:49.495 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][4ed758db-4129-4d42-8899-f0d2df259351] socks forwarding established\n2025-07-15 18:48:49.522 [info] [command][82c47244-b173-4cac-9d83-8d016ccadce9] Process exited with code 0\n2025-07-15 18:48:49.523 [info] [command][82c47244-b173-4cac-9d83-8d016ccadce9] Socket close event received\n2025-07-15 18:48:49.523 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][4ed758db-4129-4d42-8899-f0d2df259351] socks connection closed\n2025-07-15 18:48:49.547 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64017 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:49:49.527 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:49:49.530 [info] [command][f3671420-c14f-429a-b2df-887276b7edf2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""f3671420-c14f-429a-b2df-887276b7edf2""}\n2025-07-15 18:49:49.531 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ffdd8d88-4fdb-4768-b433-d71dcb661eb1] received connection request\n2025-07-15 18:49:49.532 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:49:49.638 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ffdd8d88-4fdb-4768-b433-d71dcb661eb1] socks forwarding established\n2025-07-15 18:49:49.673 [info] [command][f3671420-c14f-429a-b2df-887276b7edf2] Process exited with code 0\n2025-07-15 18:49:49.674 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ffdd8d88-4fdb-4768-b433-d71dcb661eb1] socks connection closed\n2025-07-15 18:49:49.674 [info] [command][f3671420-c14f-429a-b2df-887276b7edf2] Socket close event received\n2025-07-15 18:49:49.699 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64055 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:50:49.675 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:50:49.677 [info] [command][e5c2dcc3-ceb5-4bd7-b11c-a2c568953b44] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""e5c2dcc3-ceb5-4bd7-b11c-a2c568953b44""}\n2025-07-15 18:50:49.678 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][8fe70255-f71c-4197-8905-dac09979d18b] received connection request\n2025-07-15 18:50:49.678 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:50:49.703 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8fe70255-f71c-4197-8905-dac09979d18b] socks forwarding established\n2025-07-15 18:50:49.729 [info] [command][e5c2dcc3-ceb5-4bd7-b11c-a2c568953b44] Process exited with code 0\n2025-07-15 18:50:49.729 [info] [command][e5c2dcc3-ceb5-4bd7-b11c-a2c568953b44] Socket close event received\n2025-07-15 18:50:49.729 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][8fe70255-f71c-4197-8905-dac09979d18b] socks connection closed\n2025-07-15 18:50:49.753 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64094 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:51:49.734 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:51:49.735 [info] [command][ba1cfafc-c6b1-4c9e-80d6-87ec761bde7a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ba1cfafc-c6b1-4c9e-80d6-87ec761bde7a""}\n2025-07-15 18:51:49.736 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][c777a373-e289-4648-97e0-24c2bc2c2985] received connection request\n2025-07-15 18:51:49.736 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:51:49.761 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][c777a373-e289-4648-97e0-24c2bc2c2985] socks forwarding established\n2025-07-15 18:51:49.788 [info] [command][ba1cfafc-c6b1-4c9e-80d6-87ec761bde7a] Process exited with code 0\n2025-07-15 18:51:49.789 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][c777a373-e289-4648-97e0-24c2bc2c2985] socks connection closed\n2025-07-15 18:51:49.789 [info] [command][ba1cfafc-c6b1-4c9e-80d6-87ec761bde7a] Socket close event received\n2025-07-15 18:51:49.813 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64134 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:52:49.793 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:52:49.796 [info] [command][69d2c48b-908d-4a6e-9687-b25fd57f7b71] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""69d2c48b-908d-4a6e-9687-b25fd57f7b71""}\n2025-07-15 18:52:49.797 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][62fdc184-153e-40f6-b49d-9c9425e39c6a] received connection request\n2025-07-15 18:52:49.798 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:52:49.961 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][62fdc184-153e-40f6-b49d-9c9425e39c6a] socks forwarding established\n2025-07-15 18:52:50.132 [info] [command][69d2c48b-908d-4a6e-9687-b25fd57f7b71] Process exited with code 0\n2025-07-15 18:52:50.132 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][62fdc184-153e-40f6-b49d-9c9425e39c6a] socks connection closed\n2025-07-15 18:52:50.132 [info] [command][69d2c48b-908d-4a6e-9687-b25fd57f7b71] Socket close event received\n2025-07-15 18:52:50.157 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64165 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:53:50.138 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:53:50.140 [info] [command][9016053b-9e39-45b4-9af0-c61f32ebafbc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""9016053b-9e39-45b4-9af0-c61f32ebafbc""}\n2025-07-15 18:53:50.141 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][1e710bb4-c414-4335-a7f5-ad11845f1489] received connection request\n2025-07-15 18:53:50.142 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:53:50.224 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1e710bb4-c414-4335-a7f5-ad11845f1489] socks forwarding established\n2025-07-15 18:53:50.384 [info] [command][9016053b-9e39-45b4-9af0-c61f32ebafbc] Process exited with code 0\n2025-07-15 18:53:50.385 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1e710bb4-c414-4335-a7f5-ad11845f1489] socks connection closed\n2025-07-15 18:53:50.385 [info] [command][9016053b-9e39-45b4-9af0-c61f32ebafbc] Socket close event received\n2025-07-15 18:53:50.410 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64201 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:54:50.388 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:54:50.391 [info] [command][15170b53-dd36-49bb-a79f-b0ae0adc678a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""15170b53-dd36-49bb-a79f-b0ae0adc678a""}\n2025-07-15 18:54:50.391 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][44684b89-ba24-478b-b339-f97b59959974] received connection request\n2025-07-15 18:54:50.392 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:54:50.550 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][44684b89-ba24-478b-b339-f97b59959974] socks forwarding established\n2025-07-15 18:54:50.582 [info] [command][15170b53-dd36-49bb-a79f-b0ae0adc678a] Process exited with code 0\n2025-07-15 18:54:50.582 [info] [command][15170b53-dd36-49bb-a79f-b0ae0adc678a] Socket close event received\n2025-07-15 18:54:50.727 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64227 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:54:50.728 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][44684b89-ba24-478b-b339-f97b59959974] socks connection closed\n2025-07-15 18:55:50.585 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:55:50.588 [info] [command][abb936be-6561-41ae-9f35-2b8f779d7e36] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""abb936be-6561-41ae-9f35-2b8f779d7e36""}\n2025-07-15 18:55:50.589 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][796ae8b6-4884-49ee-ae81-b4d7200421d2] received connection request\n2025-07-15 18:55:50.590 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:55:50.634 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][796ae8b6-4884-49ee-ae81-b4d7200421d2] socks forwarding established\n2025-07-15 18:55:50.796 [info] [command][abb936be-6561-41ae-9f35-2b8f779d7e36] Process exited with code 0\n2025-07-15 18:55:50.796 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][796ae8b6-4884-49ee-ae81-b4d7200421d2] socks connection closed\n2025-07-15 18:55:50.796 [info] [command][abb936be-6561-41ae-9f35-2b8f779d7e36] Socket close event received\n2025-07-15 18:55:50.823 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64267 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:56:50.796 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:56:50.798 [info] [command][08fb5453-7b73-4f61-b02f-f8bc873e9d91] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""08fb5453-7b73-4f61-b02f-f8bc873e9d91""}\n2025-07-15 18:56:50.799 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][9ad2147b-b735-44cb-9e5a-435291fddda8] received connection request\n2025-07-15 18:56:50.800 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:56:50.854 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][9ad2147b-b735-44cb-9e5a-435291fddda8] socks forwarding established\n2025-07-15 18:56:50.968 [info] [command][08fb5453-7b73-4f61-b02f-f8bc873e9d91] Process exited with code 0\n2025-07-15 18:56:50.969 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][9ad2147b-b735-44cb-9e5a-435291fddda8] socks connection closed\n2025-07-15 18:56:50.969 [info] [command][08fb5453-7b73-4f61-b02f-f8bc873e9d91] Socket close event received\n2025-07-15 18:56:50.992 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64309 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:57:50.973 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:57:50.975 [info] [command][1b140e5a-f8cc-4358-9c54-2eddb5438a21] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""1b140e5a-f8cc-4358-9c54-2eddb5438a21""}\n2025-07-15 18:57:50.975 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][11b6a48d-42c5-4948-95b1-251a980c9991] received connection request\n2025-07-15 18:57:50.976 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:57:51.123 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][11b6a48d-42c5-4948-95b1-251a980c9991] socks forwarding established\n2025-07-15 18:57:51.278 [info] [command][1b140e5a-f8cc-4358-9c54-2eddb5438a21] Process exited with code 0\n2025-07-15 18:57:51.278 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][11b6a48d-42c5-4948-95b1-251a980c9991] socks connection closed\n2025-07-15 18:57:51.278 [info] [command][1b140e5a-f8cc-4358-9c54-2eddb5438a21] Socket close event received\n2025-07-15 18:57:51.301 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64346 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:58:51.284 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:58:51.287 [info] [command][70cf9d23-1473-4aa6-925e-29cb627a2db7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""70cf9d23-1473-4aa6-925e-29cb627a2db7""}\n2025-07-15 18:58:51.287 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][5b28ee9b-88b2-4c2e-bb91-6a31672b9e97] received connection request\n2025-07-15 18:58:51.288 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:58:51.313 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][5b28ee9b-88b2-4c2e-bb91-6a31672b9e97] socks forwarding established\n2025-07-15 18:58:51.473 [info] [command][70cf9d23-1473-4aa6-925e-29cb627a2db7] Process exited with code 0\n2025-07-15 18:58:51.473 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][5b28ee9b-88b2-4c2e-bb91-6a31672b9e97] socks connection closed\n2025-07-15 18:58:51.473 [info] [command][70cf9d23-1473-4aa6-925e-29cb627a2db7] Socket close event received\n2025-07-15 18:58:51.497 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64382 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 18:59:51.475 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 18:59:51.477 [info] [command][a4da8ead-f2cd-415d-89ec-c61305e68adc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""a4da8ead-f2cd-415d-89ec-c61305e68adc""}\n2025-07-15 18:59:51.478 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][f09bfd3e-e559-486a-b789-24f015cda081] received connection request\n2025-07-15 18:59:51.479 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 18:59:51.520 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f09bfd3e-e559-486a-b789-24f015cda081] socks forwarding established\n2025-07-15 18:59:51.597 [info] [command][a4da8ead-f2cd-415d-89ec-c61305e68adc] Process exited with code 0\n2025-07-15 18:59:51.597 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f09bfd3e-e559-486a-b789-24f015cda081] socks connection closed\n2025-07-15 18:59:51.598 [info] [command][a4da8ead-f2cd-415d-89ec-c61305e68adc] Socket close event received\n2025-07-15 18:59:51.672 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64409 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:00:51.603 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:00:51.606 [info] [command][18a586a0-39fd-4a3b-af0d-d387ec42a216] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""18a586a0-39fd-4a3b-af0d-d387ec42a216""}\n2025-07-15 19:00:51.606 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][33358b2a-094c-44e5-b0a4-ac2c04a91f74] received connection request\n2025-07-15 19:00:51.607 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:00:51.687 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][33358b2a-094c-44e5-b0a4-ac2c04a91f74] socks forwarding established\n2025-07-15 19:00:51.840 [info] [command][18a586a0-39fd-4a3b-af0d-d387ec42a216] Process exited with code 0\n2025-07-15 19:00:51.840 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][33358b2a-094c-44e5-b0a4-ac2c04a91f74] socks connection closed\n2025-07-15 19:00:51.840 [info] [command][18a586a0-39fd-4a3b-af0d-d387ec42a216] Socket close event received\n2025-07-15 19:00:51.864 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64446 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:01:51.845 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:01:51.849 [info] [command][b1967140-5cea-462d-8757-5c241da992e1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""b1967140-5cea-462d-8757-5c241da992e1""}\n2025-07-15 19:01:51.850 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][ae0a2292-4e9a-4811-b236-0f16c87b1185] received connection request\n2025-07-15 19:01:51.851 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:01:51.875 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ae0a2292-4e9a-4811-b236-0f16c87b1185] socks forwarding established\n2025-07-15 19:01:51.905 [info] [command][b1967140-5cea-462d-8757-5c241da992e1] Process exited with code 0\n2025-07-15 19:01:51.905 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][ae0a2292-4e9a-4811-b236-0f16c87b1185] socks connection closed\n2025-07-15 19:01:51.905 [info] [command][b1967140-5cea-462d-8757-5c241da992e1] Socket close event received\n2025-07-15 19:01:51.929 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64489 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:02:51.907 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:02:51.909 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][921ef411-a41b-4a51-a10d-cefd6fd89bb4] received connection request\n2025-07-15 19:02:51.909 [info] [command][d19bb3c2-9856-417d-b249-be1ac67d6cfc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""d19bb3c2-9856-417d-b249-be1ac67d6cfc""}\n2025-07-15 19:02:51.910 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:02:51.934 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][921ef411-a41b-4a51-a10d-cefd6fd89bb4] socks forwarding established\n2025-07-15 19:02:51.963 [info] [command][d19bb3c2-9856-417d-b249-be1ac67d6cfc] Process exited with code 0\n2025-07-15 19:02:51.963 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][921ef411-a41b-4a51-a10d-cefd6fd89bb4] socks connection closed\n2025-07-15 19:02:51.963 [info] [command][d19bb3c2-9856-417d-b249-be1ac67d6cfc] Socket close event received\n2025-07-15 19:02:51.988 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64531 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:03:51.968 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:03:51.969 [info] [command][3580d530-ac16-47a6-a528-7414f7df3300] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""3580d530-ac16-47a6-a528-7414f7df3300""}\n2025-07-15 19:03:51.970 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][21245d21-fb4f-4c79-b1af-b52f5f4f436c] received connection request\n2025-07-15 19:03:51.971 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:03:51.997 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][21245d21-fb4f-4c79-b1af-b52f5f4f436c] socks forwarding established\n2025-07-15 19:03:52.023 [info] [command][3580d530-ac16-47a6-a528-7414f7df3300] Process exited with code 0\n2025-07-15 19:03:52.023 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][21245d21-fb4f-4c79-b1af-b52f5f4f436c] socks connection closed\n2025-07-15 19:03:52.023 [info] [command][3580d530-ac16-47a6-a528-7414f7df3300] Socket close event received\n2025-07-15 19:03:52.048 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64571 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:04:52.028 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:04:52.030 [info] [command][99515d7c-e8cf-4806-884d-be6e600bd431] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""99515d7c-e8cf-4806-884d-be6e600bd431""}\n2025-07-15 19:04:52.031 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][1cc24d59-f240-4016-a17a-de163c502555] received connection request\n2025-07-15 19:04:52.032 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:04:52.082 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1cc24d59-f240-4016-a17a-de163c502555] socks forwarding established\n2025-07-15 19:04:52.111 [info] [command][99515d7c-e8cf-4806-884d-be6e600bd431] Process exited with code 0\n2025-07-15 19:04:52.112 [info] [command][99515d7c-e8cf-4806-884d-be6e600bd431] Socket close event received\n2025-07-15 19:04:52.112 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][1cc24d59-f240-4016-a17a-de163c502555] socks connection closed\n2025-07-15 19:04:52.136 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64608 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:05:52.113 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:05:52.115 [info] [command][a1c1286a-8569-4e52-8746-ae314152501b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""a1c1286a-8569-4e52-8746-ae314152501b""}\n2025-07-15 19:05:52.116 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][4d20a6d0-0f24-4a26-8457-66d34c23c3dc] received connection request\n2025-07-15 19:05:52.117 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:05:52.142 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][4d20a6d0-0f24-4a26-8457-66d34c23c3dc] socks forwarding established\n2025-07-15 19:05:52.170 [info] [command][a1c1286a-8569-4e52-8746-ae314152501b] Process exited with code 0\n2025-07-15 19:05:52.170 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][4d20a6d0-0f24-4a26-8457-66d34c23c3dc] socks connection closed\n2025-07-15 19:05:52.170 [info] [command][a1c1286a-8569-4e52-8746-ae314152501b] Socket close event received\n2025-07-15 19:05:52.197 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64653 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:06:52.175 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:06:52.177 [info] [command][0065735c-9292-4b67-8ca3-7ad9e10350c7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""0065735c-9292-4b67-8ca3-7ad9e10350c7""}\n2025-07-15 19:06:52.177 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][219b6bcc-9ae0-4061-968d-cbcd68d6e6b5] received connection request\n2025-07-15 19:06:52.177 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:06:52.277 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][219b6bcc-9ae0-4061-968d-cbcd68d6e6b5] socks forwarding established\n2025-07-15 19:06:52.305 [info] [command][0065735c-9292-4b67-8ca3-7ad9e10350c7] Process exited with code 0\n2025-07-15 19:06:52.305 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][219b6bcc-9ae0-4061-968d-cbcd68d6e6b5] socks connection closed\n2025-07-15 19:06:52.305 [info] [command][0065735c-9292-4b67-8ca3-7ad9e10350c7] Socket close event received\n2025-07-15 19:06:52.336 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64717 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:07:52.310 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:07:52.310 [info] [command][e8e848f8-407e-4267-ba6c-bda8457ae00d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""e8e848f8-407e-4267-ba6c-bda8457ae00d""}\n2025-07-15 19:07:52.311 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][3cc8f9a2-4874-4efa-b342-563cd5557e3b] received connection request\n2025-07-15 19:07:52.311 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:07:52.334 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][3cc8f9a2-4874-4efa-b342-563cd5557e3b] socks forwarding established\n2025-07-15 19:07:52.360 [info] [command][e8e848f8-407e-4267-ba6c-bda8457ae00d] Process exited with code 0\n2025-07-15 19:07:52.360 [info] [command][e8e848f8-407e-4267-ba6c-bda8457ae00d] Socket close event received\n2025-07-15 19:07:52.360 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][3cc8f9a2-4874-4efa-b342-563cd5557e3b] socks connection closed\n2025-07-15 19:07:52.383 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64747 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:08:52.362 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:08:52.362 [info] [command][7c08ae9f-e8e7-4ace-8ad6-77cec95e2245] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""7c08ae9f-e8e7-4ace-8ad6-77cec95e2245""}\n2025-07-15 19:08:52.363 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][597644b3-9951-45a7-8081-3fcc0c8e19fb] received connection request\n2025-07-15 19:08:52.363 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:08:52.414 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][597644b3-9951-45a7-8081-3fcc0c8e19fb] socks forwarding established\n2025-07-15 19:08:52.443 [info] [command][7c08ae9f-e8e7-4ace-8ad6-77cec95e2245] Process exited with code 0\n2025-07-15 19:08:52.443 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][597644b3-9951-45a7-8081-3fcc0c8e19fb] socks connection closed\n2025-07-15 19:08:52.443 [info] [command][7c08ae9f-e8e7-4ace-8ad6-77cec95e2245] Socket close event received\n2025-07-15 19:08:52.468 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64782 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:09:52.444 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:09:52.447 [info] [command][68d2ac22-b6f7-4dbd-802a-d5ee5f5e45f1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""68d2ac22-b6f7-4dbd-802a-d5ee5f5e45f1""}\n2025-07-15 19:09:52.448 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][189fc69a-94ac-42ac-b1ca-419eb0286c54] received connection request\n2025-07-15 19:09:52.448 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:09:52.510 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][189fc69a-94ac-42ac-b1ca-419eb0286c54] socks forwarding established\n2025-07-15 19:09:52.670 [info] [command][68d2ac22-b6f7-4dbd-802a-d5ee5f5e45f1] Process exited with code 0\n2025-07-15 19:09:52.670 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][189fc69a-94ac-42ac-b1ca-419eb0286c54] socks connection closed\n2025-07-15 19:09:52.671 [info] [command][68d2ac22-b6f7-4dbd-802a-d5ee5f5e45f1] Socket close event received\n2025-07-15 19:09:52.694 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64804 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:10:52.676 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:10:52.679 [info] [command][6b26140a-f89d-4688-b4ab-53d79b56c65c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""6b26140a-f89d-4688-b4ab-53d79b56c65c""}\n2025-07-15 19:10:52.679 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][5917bd4a-bf20-4f9a-8800-e46dfcfa5808] received connection request\n2025-07-15 19:10:52.680 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:10:52.811 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][5917bd4a-bf20-4f9a-8800-e46dfcfa5808] socks forwarding established\n2025-07-15 19:10:52.839 [info] [command][6b26140a-f89d-4688-b4ab-53d79b56c65c] Process exited with code 0\n2025-07-15 19:10:52.839 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][5917bd4a-bf20-4f9a-8800-e46dfcfa5808] socks connection closed\n2025-07-15 19:10:52.839 [info] [command][6b26140a-f89d-4688-b4ab-53d79b56c65c] Socket close event received\n2025-07-15 19:10:52.862 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64840 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:11:52.845 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:11:52.848 [info] [command][3d991ea6-1a29-437e-97b3-471b2c7c50c9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""3d991ea6-1a29-437e-97b3-471b2c7c50c9""}\n2025-07-15 19:11:52.849 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][f6ea104e-f522-43d4-9b19-a5ec59f568cf] received connection request\n2025-07-15 19:11:52.851 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:11:52.943 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f6ea104e-f522-43d4-9b19-a5ec59f568cf] socks forwarding established\n2025-07-15 19:11:53.012 [info] [command][3d991ea6-1a29-437e-97b3-471b2c7c50c9] Process exited with code 0\n2025-07-15 19:11:53.012 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][f6ea104e-f522-43d4-9b19-a5ec59f568cf] socks connection closed\n2025-07-15 19:11:53.012 [info] [command][3d991ea6-1a29-437e-97b3-471b2c7c50c9] Socket close event received\n2025-07-15 19:11:53.082 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64901 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:12:53.016 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:12:53.018 [info] [command][11647f75-7c95-4951-9e5c-a8a178f6a927] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""11647f75-7c95-4951-9e5c-a8a178f6a927""}\n2025-07-15 19:12:53.019 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][31ff6df4-8baf-431e-a4a8-e342c5c6b546] received connection request\n2025-07-15 19:12:53.019 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:12:53.094 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][31ff6df4-8baf-431e-a4a8-e342c5c6b546] socks forwarding established\n2025-07-15 19:12:53.122 [info] [command][11647f75-7c95-4951-9e5c-a8a178f6a927] Process exited with code 0\n2025-07-15 19:12:53.122 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][31ff6df4-8baf-431e-a4a8-e342c5c6b546] socks connection closed\n2025-07-15 19:12:53.122 [info] [command][11647f75-7c95-4951-9e5c-a8a178f6a927] Socket close event received\n2025-07-15 19:12:53.146 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 64957 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:13:53.128 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:13:53.131 [info] [command][ec53ed45-21fc-48a3-8508-552ddf02274e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""ec53ed45-21fc-48a3-8508-552ddf02274e""}\n2025-07-15 19:13:53.132 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:37279][3d02b387-1dd3-4560-b36e-99fe2b7581a1] received connection request\n2025-07-15 19:13:53.132 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:13:53.156 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][3d02b387-1dd3-4560-b36e-99fe2b7581a1] socks forwarding established\n2025-07-15 19:13:53.183 [info] [command][ec53ed45-21fc-48a3-8508-552ddf02274e] Process exited with code 0\n2025-07-15 19:13:53.183 [info] [command][ec53ed45-21fc-48a3-8508-552ddf02274e] Socket close event received\n2025-07-15 19:13:53.184 [info] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:58080 -> 127.0.0.1:37279][3d02b387-1dd3-4560-b36e-99fe2b7581a1] socks connection closed\n2025-07-15 19:13:53.207 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58080 for 127.0.0.1 port 37279, connect from 127.0.0.1 port 65000 to 127.0.0.1 port 58080, nchannels 6\n\n2025-07-15 19:13:57.573 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #2)\n2025-07-15 19:13:57.573 [info] Received re-connection request; checking to see if existing connection is still valid\n2025-07-15 19:13:57.746 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:33373][4f65fccf-3612-4ccf-b0ff-f9b2a58693db] received connection request\n2025-07-15 19:13:57.851 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:33373][7ddb875e-e89e-4377-becf-13dd51ffec58] received connection request\n2025-07-15 19:13:57.851 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\n\n2025-07-15 19:13:57.851 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\ndebug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 6: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:14:00.580 [error] Unexpected error while checking if existing connection is still valid Timeout while checking if existing connection is still valid\n2025-07-15 19:14:00.581 [error] Failed to connect to Cursor server at http://127.0.0.1:58082, attempt 1 of 3 This operation was aborted\n2025-07-15 19:14:00.584 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:33373][c2246823-ad1f-4c04-b1e4-90e08afac9a0] received connection request\n2025-07-15 19:14:00.585 [info] (ssh_tunnel) stderr: debug1: Connection to port 58080 forwarding to socks port 0 requested.\ndebug1: channel 7: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-15 19:14:00.687 [info] Terminating existing SSH process with pid: 51223\n2025-07-15 19:14:00.687 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-15 19:14:00.688 [info] (ssh_tunnel): exit: code=null signal=SIGKILL\n2025-07-15 19:14:00.689 [error] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:58080 -> 127.0.0.1:33373][4f65fccf-3612-4ccf-b0ff-f9b2a58693db] error while creating socks forwarding Socket closed\n2025-07-15 19:14:00.689 [error] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:58080 -> 127.0.0.1:33373][7ddb875e-e89e-4377-becf-13dd51ffec58] error while creating socks forwarding Socket closed\n2025-07-15 19:14:00.689 [error] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:58080 -> 127.0.0.1:33373][c2246823-ad1f-4c04-b1e4-90e08afac9a0] error while creating socks forwarding Socket closed\n2025-07-15 19:14:00.689 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:58080 -> 127.0.0.1:33373][aa80c3b4-4bb6-4c78-bbd1-5b24883d7017] socks connection closed\n2025-07-15 19:14:00.689 [info] [forwarding][code][127.0.0.1:58082 -> 127.0.0.1:58080 -> 127.0.0.1:33373][588813ae-6e64-458d-b1a5-35c8f9f3d47b] socks connection closed\n2025-07-15 19:14:00.689 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 19:14:00.692 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_69360.sh"" | ssh -v -T -D 65015 login.haicore.berlin bash --login -c bash\n2025-07-15 19:14:00.692 [info] Started installation script. Waiting for it to finish...\n2025-07-15 19:14:00.692 [info] Waiting for server to install via process(56270)...\n2025-07-15 19:14:00.699 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-15 19:14:00.699 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 19:14:00.700 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\ndebug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 19:14:00.701 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 19:14:00.703 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-15 19:14:00.703 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-15 19:14:00.704 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:14:00.704 [info] Retrying connection in 5 seconds...\n2025-07-15 19:14:01.586 [error] Failed to connect to Cursor server at http://127.0.0.1:58082, attempt 2 of 3 This operation was aborted\n2025-07-15 19:14:02.594 [error] Failed to connect to Cursor server at http://127.0.0.1:58082, attempt 3 of 3 This operation was aborted\n2025-07-15 19:14:02.595 [error] Could not re-use existing SOCKS connection; attempting to re-establish SOCKS forwarding Failed to connect to Cursor code server. Ensure that your remote host ssh config has 'AllowTcpForwarding yes' in '/etc/ssh/sshd_config'. Please check the logs and try reinstalling the server.\n2025-07-15 19:14:02.595 [error] Could not re-establish SOCKS forwarding; re-establishing entire SSH connection Remote server is not set\n2025-07-15 19:14:21.570 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_69360.sh\n2025-07-15 19:14:21.577 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 19:14:21.584 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_75339.sh"" | ssh -v -T -D 65016 login.haicore.berlin bash --login -c bash\n2025-07-15 19:14:21.584 [info] Started installation script. Waiting for it to finish...\n2025-07-15 19:14:21.584 [info] Waiting for server to install via process(56279)...\n2025-07-15 19:14:21.604 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-15 19:14:21.604 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 19:14:21.604 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 19:14:21.604 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 19:14:21.605 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 19:14:21.607 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-15 19:14:21.608 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-15 19:14:21.608 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:14:21.608 [info] Retrying connection in 5 seconds...\n2025-07-15 19:14:26.612 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_75339.sh\n2025-07-15 19:14:26.613 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 19:14:26.619 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_3133.sh"" | ssh -v -T -D 65018 login.haicore.berlin bash --login -c bash\n2025-07-15 19:14:26.620 [info] Started installation script. Waiting for it to finish...\n2025-07-15 19:14:26.620 [info] Waiting for server to install via process(56287)...\n2025-07-15 19:14:26.642 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 19:14:26.642 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 19:14:26.642 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 19:14:26.642 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 19:14:26.643 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-15 19:14:26.644 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-15 19:14:26.644 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:14:26.644 [info] Retrying connection in 5 seconds...\n2025-07-15 19:14:31.650 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_3133.sh\n2025-07-15 19:14:31.653 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 19:14:31.656 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_82005.sh"" | ssh -v -T -D 65021 login.haicore.berlin bash --login -c bash\n2025-07-15 19:14:31.657 [info] Started installation script. Waiting for it to finish...\n2025-07-15 19:14:31.657 [info] Waiting for server to install via process(56296)...\n2025-07-15 19:14:31.681 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-15 19:14:31.681 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 19:14:31.682 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 19:14:31.682 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 19:14:31.682 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 19:14:31.685 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-15 19:14:31.685 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-15 19:14:31.686 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:14:31.686 [info] Retrying connection in 5 seconds...\n2025-07-15 19:14:39.401 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_82005.sh\n2025-07-15 19:14:39.402 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 19:14:39.407 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_43308.sh"" | ssh -v -T -D 65023 login.haicore.berlin bash --login -c bash\n2025-07-15 19:14:39.407 [info] Started installation script. Waiting for it to finish...\n2025-07-15 19:14:39.407 [info] Waiting for server to install via process(56303)...\n2025-07-15 19:14:39.430 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-15 19:14:39.430 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 19:14:39.430 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 19:14:39.430 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 19:14:39.431 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 19:14:39.433 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-15 19:14:39.434 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-15 19:14:39.434 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:14:39.434 [info] Retrying connection in 5 seconds...\n2025-07-15 19:14:44.444 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_43308.sh\n2025-07-15 19:14:44.446 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 19:14:44.453 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_69322.sh"" | ssh -v -T -D 65028 login.haicore.berlin bash --login -c bash\n2025-07-15 19:14:44.453 [info] Started installation script. Waiting for it to finish...\n2025-07-15 19:14:44.453 [info] Waiting for server to install via process(56311)...\n2025-07-15 19:14:44.469 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\n\n2025-07-15 19:14:44.469 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 19:14:44.470 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 19:14:44.470 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 19:14:44.470 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 19:14:44.472 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-15 19:14:44.473 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-15 19:14:44.473 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:14:44.473 [info] Retrying connection in 5 seconds...\n2025-07-15 19:14:49.483 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_69322.sh\n2025-07-15 19:14:49.488 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 19:14:49.492 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_44608.sh"" | ssh -v -T -D 65030 login.haicore.berlin bash --login -c bash\n2025-07-15 19:14:49.492 [info] Started installation script. Waiting for it to finish...\n2025-07-15 19:14:49.492 [info] Waiting for server to install via process(56318)...\n2025-07-15 19:14:49.507 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\n\n2025-07-15 19:14:49.507 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 19:14:49.507 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 19:14:49.507 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 19:14:49.508 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 19:14:49.509 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-15 19:14:49.510 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-15 19:14:49.510 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:14:49.510 [info] Retrying connection in 5 seconds...\n2025-07-15 19:15:37.922 [info] [remote-ssh] Pinging remote server on port 58083\n2025-07-15 19:15:37.922 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_44608.sh\n2025-07-15 19:15:37.923 [error] [forwarding][multiplex][127.0.0.1:58083 -> 127.0.0.1:undefined][e987a2a7-ca35-49e0-b9a2-3c8a68fc4b7b] remote server not configured\n2025-07-15 19:15:37.923 [info] [command][0b74d6ee-2c2a-4fff-b92e-1a20b4702ffc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""9c7afd2b-b7f2-421b-8a62-3e0f8de9f546"",""id"":""0b74d6ee-2c2a-4fff-b92e-1a20b4702ffc""}\n2025-07-15 19:15:37.926 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 19:15:37.927 [error] [command][0b74d6ee-2c2a-4fff-b92e-1a20b4702ffc] Socket error: Error: read ECONNRESET\n2025-07-15 19:15:37.927 [info] [command][0b74d6ee-2c2a-4fff-b92e-1a20b4702ffc] Socket close event received\n2025-07-15 19:15:37.931 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_59894.sh"" | ssh -v -T -D 65034 login.haicore.berlin bash --login -c bash\n2025-07-15 19:15:37.931 [info] Started installation script. Waiting for it to finish...\n2025-07-15 19:15:37.931 [info] Waiting for server to install via process(56328)...\n2025-07-15 19:15:37.941 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-15 19:15:37.941 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\n\n2025-07-15 19:15:37.942 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 19:15:37.942 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 19:15:37.943 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 19:15:37.943 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 19:15:37.946 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-15 19:15:37.946 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-15 19:15:37.947 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:15:37.947 [info] Retrying connection in 5 seconds...\n2025-07-15 19:15:42.957 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_59894.sh\n2025-07-15 19:15:42.961 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 19:15:42.971 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_94948.sh"" | ssh -v -T -D 65035 login.haicore.berlin bash --login -c bash\n2025-07-15 19:15:42.971 [info] Started installation script. Waiting for it to finish...\n2025-07-15 19:15:42.971 [info] Waiting for server to install via process(56336)...\n2025-07-15 19:15:42.982 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\n\n2025-07-15 19:15:42.982 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 19:15:42.982 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 19:15:42.982 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 19:15:42.982 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 19:15:42.984 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-15 19:15:42.985 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-15 19:15:42.985 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:15:42.985 [info] Retrying connection in 5 seconds...\n2025-07-15 19:25:48.696 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_94948.sh\n2025-07-15 19:25:48.697 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-XEb2su/socket.sock\n2025-07-15 19:25:48.699 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_96651.sh"" | ssh -v -T -D 65038 login.haicore.berlin bash --login -c bash\n2025-07-15 19:25:48.699 [info] Started installation script. Waiting for it to finish...\n2025-07-15 19:25:48.699 [info] Waiting for server to install via process(56349)...\n2025-07-15 19:25:48.708 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-15 19:25:48.708 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-15 19:25:48.708 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-15 19:25:48.708 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-15 19:25:48.708 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-15 19:25:48.709 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-15 19:25:48.709 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-15 19:25:48.710 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:25:48.710 [error] Failed to connect after 10 attempts: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-15 19:25:48.710 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_96651.sh\n2025-07-15 19:25:48.710 [error] Error resolving SSH authority Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 12:13:59.201 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #1)\n2025-07-16 12:13:59.211 [info] SSH askpass server listening on /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-POjzgS/socket.sock\n2025-07-16 12:13:59.212 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-16 12:13:59.214 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-POjzgS/socket.sock\n2025-07-16 12:13:59.216 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_59363.sh"" | ssh -v -T -D 57955 login.haicore.berlin bash --login -c bash\n2025-07-16 12:13:59.216 [info] Started installation script. Waiting for it to finish...\n2025-07-16 12:13:59.216 [info] Waiting for server to install via process(61892)...\n2025-07-16 12:13:59.221 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-16 12:13:59.221 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 12:13:59.221 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 12:13:59.221 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 12:13:59.222 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 12:13:59.247 [info] (ssh_tunnel) stderr: debug1: Connection established.\n\n2025-07-16 12:13:59.247 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519 type 3\n\n2025-07-16 12:13:59.247 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519-cert type -1\ndebug1: Local version string SSH-2.0-OpenSSH_9.9\n\n2025-07-16 12:13:59.276 [info] (ssh_tunnel) stderr: debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7\ndebug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000\ndebug1: Authenticating to login.haicore.berlin:22 as 'franz.srambical'\n\n2025-07-16 12:13:59.276 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\n\n2025-07-16 12:13:59.277 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT sent\n\n2025-07-16 12:13:59.300 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT received\ndebug1: kex: algorithm: ecdh-sha2-nistp256\ndebug1: kex: host key algorithm: ssh-ed25519\ndebug1: kex: server->client cipher: aes128-gcm@openssh.com MAC: compression: none\n\n2025-07-16 12:13:59.300 [info] (ssh_tunnel) stderr: debug1: kex: client->server cipher: aes128-gcm@openssh.com MAC: compression: none\n\n2025-07-16 12:13:59.300 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_KEX_ECDH_REPLY\n\n2025-07-16 12:13:59.324 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEX_ECDH_REPLY received\ndebug1: Server host key: ssh-ed25519 SHA256:3/BGZ1UNXR9SufKdsZVtx4Yd+kZTnZzSvRH0l6rtbvo\n\n2025-07-16 12:13:59.325 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-16 12:13:59.325 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: Host 'login.haicore.berlin' is known and matches the ED25519 host key.\ndebug1: Found key in /Users/franzsrambical/.ssh/known_hosts:17\n\n2025-07-16 12:13:59.328 [info] (ssh_tunnel) stderr: debug1: ssh_packet_send2_wrapped: resetting send seqnr 3\ndebug1: rekey out after 4294967296 blocks\ndebug1: SSH2_MSG_NEWKEYS sent\n\n2025-07-16 12:13:59.328 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_NEWKEYS\ndebug1: ssh_packet_read_poll2: resetting read seqnr 3\ndebug1: SSH2_MSG_NEWKEYS received\ndebug1: rekey in after 4294967296 blocks\ndebug1: SSH2_MSG_EXT_INFO received\n\n2025-07-16 12:13:59.328 [info] (ssh_tunnel) stderr: debug1: kex_ext_info_client_parse: server-sig-algs=\n\n2025-07-16 12:13:59.416 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_SERVICE_ACCEPT received\n\n2025-07-16 12:13:59.481 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: publickey\n\n2025-07-16 12:13:59.484 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: bound agent to hostkey\n\n2025-07-16 12:13:59.485 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: ssh_fetch_identitylist: agent contains no identities\ndebug1: Will attempt key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\ndebug1: Offering public key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-16 12:13:59.521 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: keyboard-interactive\n\n2025-07-16 12:13:59.588 [info] (ssh_tunnel) stderr: debug1: read_passphrase: requested to askpass\n\n2025-07-16 12:14:02.724 [info] Askpass server received request: POST /\n2025-07-16 12:14:02.725 [info] Askpass server received request body: {""request"":""(franz.srambical@login.haicore.berlin) Password: ""}\n2025-07-16 12:14:02.725 [info] Received SSH askpass request: (franz.srambical@login.haicore.berlin) Password: \n2025-07-16 12:14:13.637 [error] Password authentication cancelled\n2025-07-16 12:14:13.640 [info] (ssh_tunnel) stderr: Server returned status code: 500\n\n2025-07-16 12:14:15.799 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\n\n2025-07-16 12:14:15.862 [info] (ssh_tunnel) stderr: debug1: read_passphrase: requested to askpass\n\n2025-07-16 12:14:15.945 [info] Askpass server received request: POST /\n2025-07-16 12:14:15.946 [info] Askpass server received request body: {""request"":""(franz.srambical@login.haicore.berlin) Password: ""}\n2025-07-16 12:14:15.946 [info] Received SSH askpass request: (franz.srambical@login.haicore.berlin) Password: \n2025-07-16 12:14:21.275 [error] Password authentication cancelled\n2025-07-16 12:14:21.279 [info] (ssh_tunnel) stderr: Server returned status code: 500\n\n2025-07-16 12:14:23.556 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\n\n2025-07-16 12:14:23.612 [info] (ssh_tunnel) stderr: debug1: read_passphrase: requested to askpass\n\n2025-07-16 12:14:23.704 [info] Askpass server received request: POST /\n2025-07-16 12:14:23.704 [info] Askpass server received request body: {""request"":""(franz.srambical@login.haicore.berlin) Password: ""}\n2025-07-16 12:14:23.704 [info] Received SSH askpass request: (franz.srambical@login.haicore.berlin) Password: \n2025-07-16 12:14:24.805 [error] Password authentication cancelled\n2025-07-16 12:14:24.807 [info] (ssh_tunnel) stderr: Server returned status code: 500\n\n2025-07-16 12:14:26.907 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #1)\n2025-07-16 12:14:26.918 [info] SSH askpass server listening on /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 12:14:26.919 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-16 12:14:26.919 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 12:14:26.922 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_51705.sh"" | ssh -v -T -D 57974 login.haicore.berlin bash --login -c bash\n2025-07-16 12:14:26.922 [info] Started installation script. Waiting for it to finish...\n2025-07-16 12:14:26.922 [info] Waiting for server to install via process(61926)...\n2025-07-16 12:14:26.927 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-16 12:14:26.927 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 12:14:26.927 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 12:14:26.927 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 12:14:26.927 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 12:14:26.989 [info] (ssh_tunnel) stderr: debug1: Connection established.\n\n2025-07-16 12:14:26.989 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519 type 3\ndebug1: identity file /Users/franzsrambical/.ssh/id_ed25519-cert type -1\ndebug1: Local version string SSH-2.0-OpenSSH_9.9\n\n2025-07-16 12:14:27.065 [info] (ssh_tunnel) stderr: debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7\ndebug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000\ndebug1: Authenticating to login.haicore.berlin:22 as 'franz.srambical'\n\n2025-07-16 12:14:27.066 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\n\n2025-07-16 12:14:27.066 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT sent\n\n2025-07-16 12:14:27.089 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT received\ndebug1: kex: algorithm: ecdh-sha2-nistp256\ndebug1: kex: host key algorithm: ssh-ed25519\ndebug1: kex: server->client cipher: aes128-gcm@openssh.com MAC: compression: none\ndebug1: kex: client->server cipher: aes128-gcm@openssh.com MAC: compression: none\n\n2025-07-16 12:14:27.089 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_KEX_ECDH_REPLY\n\n2025-07-16 12:14:27.115 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEX_ECDH_REPLY received\ndebug1: Server host key: ssh-ed25519 SHA256:3/BGZ1UNXR9SufKdsZVtx4Yd+kZTnZzSvRH0l6rtbvo\n\n2025-07-16 12:14:27.116 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\n\n2025-07-16 12:14:27.116 [info] (ssh_tunnel) stderr: debug1: Host 'login.haicore.berlin' is known and matches the ED25519 host key.\ndebug1: Found key in /Users/franzsrambical/.ssh/known_hosts:17\n\n2025-07-16 12:14:27.119 [info] (ssh_tunnel) stderr: debug1: ssh_packet_send2_wrapped: resetting send seqnr 3\ndebug1: rekey out after 4294967296 blocks\ndebug1: SSH2_MSG_NEWKEYS sent\n\n2025-07-16 12:14:27.119 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_NEWKEYS\ndebug1: ssh_packet_read_poll2: resetting read seqnr 3\ndebug1: SSH2_MSG_NEWKEYS received\ndebug1: rekey in after 4294967296 blocks\ndebug1: SSH2_MSG_EXT_INFO received\ndebug1: kex_ext_info_client_parse: server-sig-algs=\n\n2025-07-16 12:14:27.208 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_SERVICE_ACCEPT received\n\n2025-07-16 12:14:27.239 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: publickey\n\n2025-07-16 12:14:27.244 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: bound agent to hostkey\n\n2025-07-16 12:14:27.244 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: ssh_fetch_identitylist: agent contains no identities\ndebug1: Will attempt key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\ndebug1: Offering public key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-16 12:14:27.283 [info] (ssh_tunnel) stderr: debug1: Server accepts key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-16 12:14:27.351 [info] (ssh_tunnel) stderr: Authenticated to login.haicore.berlin ([141.80.150.4]:22) using ""publickey"".\ndebug1: Local connections to LOCALHOST:57974 forwarded to remote address socks:0\n\n2025-07-16 12:14:27.351 [info] (ssh_tunnel) stderr: debug1: Local forwarding listening on ::1 port 57974.\n\n2025-07-16 12:14:27.351 [info] (ssh_tunnel) stderr: debug1: channel 0: new port-listener [port listener] (inactive timeout: 0)\ndebug1: Local forwarding listening on 127.0.0.1 port 57974.\ndebug1: channel 1: new port-listener [port listener] (inactive timeout: 0)\ndebug1: channel 2: new session [client-session] (inactive timeout: 0)\n\n2025-07-16 12:14:27.351 [info] (ssh_tunnel) stderr: debug1: Requesting no-more-sessions@openssh.com\ndebug1: Entering interactive session.\ndebug1: pledge: filesystem\n\n2025-07-16 12:14:27.527 [info] (ssh_tunnel) stderr: debug1: client_input_global_request: rtype hostkeys-00@openssh.com want_reply 0\n\n2025-07-16 12:14:27.529 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts for login.haicore.berlin / (none)\n\n2025-07-16 12:14:27.537 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts2 for login.haicore.berlin / (none)\ndebug1: client_input_hostkeys: hostkeys file /Users/franzsrambical/.ssh/known_hosts2 does not exist\ndebug1: client_input_hostkeys: no new or deprecated keys from server\n\n2025-07-16 12:14:27.538 [info] (ssh_tunnel) stderr: debug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\n\n2025-07-16 12:14:27.551 [info] (ssh_tunnel) stderr: debug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\ndebug1: Sending environment.\ndebug1: Sending command: bash --login -c bash\ndebug1: pledge: network\n\n2025-07-16 12:14:27.944 [info] (ssh_tunnel) stdout: Using TMP_DIR: /run/user/961800067\n\n2025-07-16 12:14:27.981 [info] (ssh_tunnel) stdout: Locking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-16 12:14:27.985 [info] (ssh_tunnel) stdout: Server script already installed in /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server\nChecking node executable\n\n2025-07-16 12:14:28.131 [info] (ssh_tunnel) stdout: v20.18.2\n\n2025-07-16 12:14:28.135 [info] (ssh_tunnel) stdout: Checking for running multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-16 12:14:28.153 [info] (ssh_tunnel) stdout: Running multiplex server: \n\n2025-07-16 12:14:28.155 [info] (ssh_tunnel) stdout: Creating multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-16 12:14:28.157 [info] (ssh_tunnel) stdout: Creating directory for multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server\n\n2025-07-16 12:14:28.158 [info] (ssh_tunnel) stdout: Writing multiplex server script to /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-16 12:14:28.164 [info] (ssh_tunnel) stdout: Starting multiplex server: /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js de7d54cc-97ec-4acb-a7dc-909a8230ad14\n\n2025-07-16 12:14:28.165 [info] (ssh_tunnel) stdout: Multiplex server started with PID 3478950 and wrote pid to file /run/user/961800067/cursor-remote-multiplex.pid.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nReading multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nMultiplex server token file found\n\n2025-07-16 12:14:28.167 [info] (ssh_tunnel) stdout: Reading multiplex server log file /run/user/961800067/cursor-remote-multiplex.log.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-16 12:14:28.673 [info] (ssh_tunnel) stdout: Checking for code servers\n\n2025-07-16 12:14:28.691 [info] (ssh_tunnel) stdout: Code server script is not running\nCreating code server token file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-16 12:14:28.692 [info] (ssh_tunnel) stdout: Starting code server script /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server --start-server --host=127.0.0.1 --port 0 --connection-token-file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0 --telemetry-level off --enable-remote-auto-shutdown --accept-server-license-terms &> /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0 &\n\n2025-07-16 12:14:28.694 [info] (ssh_tunnel) stdout: Code server started with PID 3478974 and wrote pid to file /run/user/961800067/cursor-remote-code.pid.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-16 12:14:28.695 [info] (ssh_tunnel) stdout: Code server log file is /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-16 12:14:29.206 [info] (ssh_tunnel) stdout: 6688dccd9f7fdc775c19767c: start\nexitCode==0==\nnodeExecutable==/home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node==\nerrorMessage====\nisFatalError==false==\nmultiplexListeningOn==37309==\n\n2025-07-16 12:14:29.207 [info] (ssh_tunnel) stdout: multiplexConnectionToken==de7d54cc-97ec-4acb-a7dc-909a8230ad14==\ncodeListeningOn==46727==\ncodeConnectionToken==5538bc56-13b0-4367-ac8c-e67bd8713128==\ndetectedPlatform==linux==\narch==x64==\nSSH_AUTH_SOCK====\n6688dccd9f7fdc775c19767c: end\n\n2025-07-16 12:14:29.209 [info] Server install command exit code: 0\n2025-07-16 12:14:29.209 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_51705.sh\n2025-07-16 12:14:29.215 [info] [forwarding][code] creating new forwarding server\n2025-07-16 12:14:29.216 [info] [forwarding][code] server listening on 57977\n2025-07-16 12:14:29.216 [info] [forwarding][code] Set up server\n2025-07-16 12:14:29.217 [info] [remote-ssh] codeListeningOn (remote=46727; local=57977) codeConnectionToken: 5538bc56-13b0-4367-ac8c-e67bd8713128\n2025-07-16 12:14:29.217 [info] [forwarding][multiplex] creating new forwarding server\n2025-07-16 12:14:29.217 [info] [forwarding][multiplex] server listening on 57978\n2025-07-16 12:14:29.217 [info] [forwarding][multiplex] Set up server\n2025-07-16 12:14:29.219 [info] [remote-ssh] multiplexListeningOn (remote=37309; local=57978) multiplexConnectionToken: de7d54cc-97ec-4acb-a7dc-909a8230ad14\n2025-07-16 12:14:29.219 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:14:29.222 [info] (ssh_tunnel) stdout: Unlocking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-16 12:14:29.222 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][169965cf-637a-4cd5-95c0-1701f6093201] received connection request\n2025-07-16 12:14:29.223 [info] [command][61b2187a-411a-4844-b411-0ed72df546d8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""61b2187a-411a-4844-b411-0ed72df546d8""}\n2025-07-16 12:14:29.223 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:14:29.232 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:46727][cf33d4e1-5b58-4ca1-b31a-b1998647bb45] received connection request\n2025-07-16 12:14:29.233 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:14:29.250 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][169965cf-637a-4cd5-95c0-1701f6093201] socks forwarding established\n2025-07-16 12:14:29.318 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:57974 -> 127.0.0.1:46727][cf33d4e1-5b58-4ca1-b31a-b1998647bb45] socks forwarding established\n2025-07-16 12:14:29.318 [info] [command][61b2187a-411a-4844-b411-0ed72df546d8] Process exited with code 0\n2025-07-16 12:14:29.319 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][169965cf-637a-4cd5-95c0-1701f6093201] socks connection closed\n2025-07-16 12:14:29.319 [info] [command][61b2187a-411a-4844-b411-0ed72df546d8] Socket close event received\n2025-07-16 12:14:29.342 [info] (ssh_tunnel) stderr: debug1: channel 3: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57980 to 127.0.0.1 port 57974, nchannels 5\n\n2025-07-16 12:14:29.356 [info] Successfully connected to Cursor server at http://127.0.0.1:57977/version\n2025-07-16 12:14:29.356 [info] [execServer][spawn] command: echo, args: 1, options: {}\n2025-07-16 12:14:29.357 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b67f21f6-2b29-42be-93bd-e68effded320] received connection request\n2025-07-16 12:14:29.357 [info] [command][1158e9ce-e250-4068-9b7f-a3ade39b29f7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1158e9ce-e250-4068-9b7f-a3ade39b29f7""}\n2025-07-16 12:14:29.357 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:14:29.383 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b67f21f6-2b29-42be-93bd-e68effded320] socks forwarding established\n2025-07-16 12:14:29.410 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b67f21f6-2b29-42be-93bd-e68effded320] socks connection closed\n2025-07-16 12:14:29.410 [info] [command][1158e9ce-e250-4068-9b7f-a3ade39b29f7] Process exited with code 0\n2025-07-16 12:14:29.410 [info] Successfully ran 'echo 1' against the multiplex server\n2025-07-16 12:14:29.411 [info] [remote-ssh] Resolved exec server. Socks port: 57974\n2025-07-16 12:14:29.411 [info] [remote-ssh] Resolved authority: {""host"":""127.0.0.1"",""port"":57977,""connectionToken"":""5538bc56-13b0-4367-ac8c-e67bd8713128"",""extensionHostEnv"":{}}. Socks port: 57974\n2025-07-16 12:14:29.411 [info] [command][1158e9ce-e250-4068-9b7f-a3ade39b29f7] Socket close event received\n2025-07-16 12:14:29.427 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:46727][a28e2b9d-8aa2-4937-9fa5-64a130a89a90] received connection request\n2025-07-16 12:14:29.427 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 12:14:29.427 [info] (ssh_tunnel) stderr: debug1: channel 5: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:14:29.434 [info] (ssh_tunnel) stderr: debug1: channel 3: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57984 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:14:29.453 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:57974 -> 127.0.0.1:46727][a28e2b9d-8aa2-4937-9fa5-64a130a89a90] socks forwarding established\n2025-07-16 12:14:29.492 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:46727][9243c5a5-29cb-40d6-a147-75b8db807ee6] received connection request\n2025-07-16 12:14:29.492 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:14:29.516 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:57974 -> 127.0.0.1:46727][9243c5a5-29cb-40d6-a147-75b8db807ee6] socks forwarding established\n2025-07-16 12:14:29.613 [info] Saved platform linux for remote host login.haicore.berlin\n2025-07-16 12:14:32.381 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 46727, connect from 127.0.0.1 port 57982 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:14:32.381 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:57974 -> 127.0.0.1:46727][cf33d4e1-5b58-4ca1-b31a-b1998647bb45] socks connection closed\n2025-07-16 12:15:29.321 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:15:29.322 [info] [command][1f58e9ed-ed33-4da2-88d6-26887959b514] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1f58e9ed-ed33-4da2-88d6-26887959b514""}\n2025-07-16 12:15:29.322 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][bed33dab-cbeb-4272-9544-b831512cb47f] received connection request\n2025-07-16 12:15:29.323 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:15:29.347 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][bed33dab-cbeb-4272-9544-b831512cb47f] socks forwarding established\n2025-07-16 12:15:29.379 [info] [command][1f58e9ed-ed33-4da2-88d6-26887959b514] Process exited with code 0\n2025-07-16 12:15:29.379 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][bed33dab-cbeb-4272-9544-b831512cb47f] socks connection closed\n2025-07-16 12:15:29.379 [info] [command][1f58e9ed-ed33-4da2-88d6-26887959b514] Socket close event received\n2025-07-16 12:15:29.403 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58071 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:16:29.384 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:16:29.384 [info] [command][1cc02a7b-d7f3-4578-a437-b612456078c2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1cc02a7b-d7f3-4578-a437-b612456078c2""}\n2025-07-16 12:16:29.385 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][30a34b0e-deb8-4a64-91bd-2c1219aef01e] received connection request\n2025-07-16 12:16:29.385 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:16:29.409 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][30a34b0e-deb8-4a64-91bd-2c1219aef01e] socks forwarding established\n2025-07-16 12:16:29.435 [info] [command][1cc02a7b-d7f3-4578-a437-b612456078c2] Process exited with code 0\n2025-07-16 12:16:29.435 [info] [command][1cc02a7b-d7f3-4578-a437-b612456078c2] Socket close event received\n2025-07-16 12:16:29.435 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][30a34b0e-deb8-4a64-91bd-2c1219aef01e] socks connection closed\n2025-07-16 12:16:29.458 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58104 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:17:29.440 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:17:29.441 [info] [command][2e2b4f3a-a88d-4ad3-ae1e-6ef0dfb2a1ae] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2e2b4f3a-a88d-4ad3-ae1e-6ef0dfb2a1ae""}\n2025-07-16 12:17:29.442 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4b755936-e9ce-4a44-97f6-fb617d8c5bca] received connection request\n2025-07-16 12:17:29.442 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:17:29.466 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4b755936-e9ce-4a44-97f6-fb617d8c5bca] socks forwarding established\n2025-07-16 12:17:29.493 [info] [command][2e2b4f3a-a88d-4ad3-ae1e-6ef0dfb2a1ae] Process exited with code 0\n2025-07-16 12:17:29.493 [info] [command][2e2b4f3a-a88d-4ad3-ae1e-6ef0dfb2a1ae] Socket close event received\n2025-07-16 12:17:29.494 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4b755936-e9ce-4a44-97f6-fb617d8c5bca] socks connection closed\n2025-07-16 12:17:29.518 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58162 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:18:29.498 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:18:29.500 [info] [command][ff9bd3b4-2e6f-48f6-adac-0e50fbbffe0b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ff9bd3b4-2e6f-48f6-adac-0e50fbbffe0b""}\n2025-07-16 12:18:29.500 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c649c99b-317c-4a74-a746-82a10e45fd58] received connection request\n2025-07-16 12:18:29.501 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:18:29.528 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c649c99b-317c-4a74-a746-82a10e45fd58] socks forwarding established\n2025-07-16 12:18:29.558 [info] [command][ff9bd3b4-2e6f-48f6-adac-0e50fbbffe0b] Process exited with code 0\n2025-07-16 12:18:29.558 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c649c99b-317c-4a74-a746-82a10e45fd58] socks connection closed\n2025-07-16 12:18:29.558 [info] [command][ff9bd3b4-2e6f-48f6-adac-0e50fbbffe0b] Socket close event received\n2025-07-16 12:18:29.582 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58192 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:19:29.562 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:19:29.564 [info] [command][2e9d5b1f-beb6-4ae2-b6c0-56ecfa678e6f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2e9d5b1f-beb6-4ae2-b6c0-56ecfa678e6f""}\n2025-07-16 12:19:29.565 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b7e65cbe-8b6f-4f78-9dd5-85e1c1538491] received connection request\n2025-07-16 12:19:29.565 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:19:29.597 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b7e65cbe-8b6f-4f78-9dd5-85e1c1538491] socks forwarding established\n2025-07-16 12:19:29.623 [info] [command][2e9d5b1f-beb6-4ae2-b6c0-56ecfa678e6f] Process exited with code 0\n2025-07-16 12:19:29.624 [info] [command][2e9d5b1f-beb6-4ae2-b6c0-56ecfa678e6f] Socket close event received\n2025-07-16 12:19:29.624 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b7e65cbe-8b6f-4f78-9dd5-85e1c1538491] socks connection closed\n2025-07-16 12:19:29.647 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58241 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:20:29.627 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:20:29.628 [info] [command][ca37be40-18a4-4b3a-b501-a1b7b94f1b5a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ca37be40-18a4-4b3a-b501-a1b7b94f1b5a""}\n2025-07-16 12:20:29.629 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a915a890-064a-4a67-bacb-38b7c571c5ab] received connection request\n2025-07-16 12:20:29.630 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:20:29.658 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a915a890-064a-4a67-bacb-38b7c571c5ab] socks forwarding established\n2025-07-16 12:20:29.686 [info] [command][ca37be40-18a4-4b3a-b501-a1b7b94f1b5a] Process exited with code 0\n2025-07-16 12:20:29.686 [info] [command][ca37be40-18a4-4b3a-b501-a1b7b94f1b5a] Socket close event received\n2025-07-16 12:20:29.687 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a915a890-064a-4a67-bacb-38b7c571c5ab] socks connection closed\n2025-07-16 12:20:29.712 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58272 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:21:29.688 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:21:29.690 [info] [command][d910d192-cb90-4c7e-a1a9-a8dc992e7ec9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d910d192-cb90-4c7e-a1a9-a8dc992e7ec9""}\n2025-07-16 12:21:29.690 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][afbba6c8-6d38-4e1a-8cdc-aeaf3c91de84] received connection request\n2025-07-16 12:21:29.690 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:21:29.715 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][afbba6c8-6d38-4e1a-8cdc-aeaf3c91de84] socks forwarding established\n2025-07-16 12:21:29.745 [info] [command][d910d192-cb90-4c7e-a1a9-a8dc992e7ec9] Process exited with code 0\n2025-07-16 12:21:29.745 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][afbba6c8-6d38-4e1a-8cdc-aeaf3c91de84] socks connection closed\n2025-07-16 12:21:29.746 [info] [command][d910d192-cb90-4c7e-a1a9-a8dc992e7ec9] Socket close event received\n2025-07-16 12:21:29.772 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58295 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:22:29.747 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:22:29.749 [info] [command][f77ae0d1-bfc4-4815-8b29-18073a8f33ab] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f77ae0d1-bfc4-4815-8b29-18073a8f33ab""}\n2025-07-16 12:22:29.749 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a5ac21c0-c8c3-439c-a3d3-469a8a6f0048] received connection request\n2025-07-16 12:22:29.750 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:22:29.775 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a5ac21c0-c8c3-439c-a3d3-469a8a6f0048] socks forwarding established\n2025-07-16 12:22:29.804 [info] [command][f77ae0d1-bfc4-4815-8b29-18073a8f33ab] Process exited with code 0\n2025-07-16 12:22:29.805 [info] [command][f77ae0d1-bfc4-4815-8b29-18073a8f33ab] Socket close event received\n2025-07-16 12:22:29.805 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a5ac21c0-c8c3-439c-a3d3-469a8a6f0048] socks connection closed\n2025-07-16 12:22:29.830 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58343 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:23:29.810 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:23:29.812 [info] [command][48ae1a1d-2cd0-4078-9ea1-f85bdad95b5b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""48ae1a1d-2cd0-4078-9ea1-f85bdad95b5b""}\n2025-07-16 12:23:29.813 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][94e09560-26f9-4ae9-86e9-a5449e035ffd] received connection request\n2025-07-16 12:23:29.814 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:23:29.838 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][94e09560-26f9-4ae9-86e9-a5449e035ffd] socks forwarding established\n2025-07-16 12:23:29.867 [info] [command][48ae1a1d-2cd0-4078-9ea1-f85bdad95b5b] Process exited with code 0\n2025-07-16 12:23:29.867 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][94e09560-26f9-4ae9-86e9-a5449e035ffd] socks connection closed\n2025-07-16 12:23:29.867 [info] [command][48ae1a1d-2cd0-4078-9ea1-f85bdad95b5b] Socket close event received\n2025-07-16 12:23:29.891 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58367 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:24:29.870 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:24:29.871 [info] [command][32e85efb-aded-4caa-8583-fdfaabfa5bd3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""32e85efb-aded-4caa-8583-fdfaabfa5bd3""}\n2025-07-16 12:24:29.872 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7c1ffcb8-7306-4229-807f-d741214bfeeb] received connection request\n2025-07-16 12:24:29.873 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:24:29.899 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7c1ffcb8-7306-4229-807f-d741214bfeeb] socks forwarding established\n2025-07-16 12:24:29.928 [info] [command][32e85efb-aded-4caa-8583-fdfaabfa5bd3] Process exited with code 0\n2025-07-16 12:24:29.928 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7c1ffcb8-7306-4229-807f-d741214bfeeb] socks connection closed\n2025-07-16 12:24:29.928 [info] [command][32e85efb-aded-4caa-8583-fdfaabfa5bd3] Socket close event received\n2025-07-16 12:24:29.952 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58405 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:25:29.929 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:25:29.930 [info] [command][461724b3-d880-4e48-8ee5-7b9524e3c5c8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""461724b3-d880-4e48-8ee5-7b9524e3c5c8""}\n2025-07-16 12:25:29.930 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f91847a7-4421-452a-9935-06c20ce9f43c] received connection request\n2025-07-16 12:25:29.930 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:25:29.955 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f91847a7-4421-452a-9935-06c20ce9f43c] socks forwarding established\n2025-07-16 12:25:29.980 [info] [command][461724b3-d880-4e48-8ee5-7b9524e3c5c8] Process exited with code 0\n2025-07-16 12:25:29.980 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f91847a7-4421-452a-9935-06c20ce9f43c] socks connection closed\n2025-07-16 12:25:29.980 [info] [command][461724b3-d880-4e48-8ee5-7b9524e3c5c8] Socket close event received\n2025-07-16 12:25:30.006 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58429 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:26:29.986 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:26:29.988 [info] [command][ca7a7e42-b095-4bbc-80e2-15d0ac687b38] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ca7a7e42-b095-4bbc-80e2-15d0ac687b38""}\n2025-07-16 12:26:29.988 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][15583d29-6a3d-4bc7-87d2-80b190c382a7] received connection request\n2025-07-16 12:26:29.989 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:26:30.018 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][15583d29-6a3d-4bc7-87d2-80b190c382a7] socks forwarding established\n2025-07-16 12:26:30.045 [info] [command][ca7a7e42-b095-4bbc-80e2-15d0ac687b38] Process exited with code 0\n2025-07-16 12:26:30.045 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][15583d29-6a3d-4bc7-87d2-80b190c382a7] socks connection closed\n2025-07-16 12:26:30.045 [info] [command][ca7a7e42-b095-4bbc-80e2-15d0ac687b38] Socket close event received\n2025-07-16 12:26:30.068 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58454 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:27:30.051 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:27:30.052 [info] [command][e3b8c7cb-2ced-4494-ab61-f44780ae714a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e3b8c7cb-2ced-4494-ab61-f44780ae714a""}\n2025-07-16 12:27:30.053 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][be9a8d5b-e2f6-483a-b135-ef314928f770] received connection request\n2025-07-16 12:27:30.054 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:27:30.078 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][be9a8d5b-e2f6-483a-b135-ef314928f770] socks forwarding established\n2025-07-16 12:27:30.227 [info] [command][e3b8c7cb-2ced-4494-ab61-f44780ae714a] Process exited with code 0\n2025-07-16 12:27:30.228 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][be9a8d5b-e2f6-483a-b135-ef314928f770] socks connection closed\n2025-07-16 12:27:30.228 [info] [command][e3b8c7cb-2ced-4494-ab61-f44780ae714a] Socket close event received\n2025-07-16 12:27:30.250 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58508 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:28:30.232 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:28:30.234 [info] [command][1fbf5ced-1413-4e34-be8f-4605d96a4c8d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1fbf5ced-1413-4e34-be8f-4605d96a4c8d""}\n2025-07-16 12:28:30.234 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d865ed4c-ad4d-4ad3-a0d7-b3cbf41cc9ab] received connection request\n2025-07-16 12:28:30.235 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:28:30.337 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d865ed4c-ad4d-4ad3-a0d7-b3cbf41cc9ab] socks forwarding established\n2025-07-16 12:28:30.484 [info] [command][1fbf5ced-1413-4e34-be8f-4605d96a4c8d] Process exited with code 0\n2025-07-16 12:28:30.485 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d865ed4c-ad4d-4ad3-a0d7-b3cbf41cc9ab] socks connection closed\n2025-07-16 12:28:30.485 [info] [command][1fbf5ced-1413-4e34-be8f-4605d96a4c8d] Socket close event received\n2025-07-16 12:28:30.508 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58537 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:29:30.490 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:29:30.492 [info] [command][532dfdcd-03a1-4281-9b6e-24010fcdd709] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""532dfdcd-03a1-4281-9b6e-24010fcdd709""}\n2025-07-16 12:29:30.493 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8a71f5b2-5a2a-4843-b88c-f7ce01f642bb] received connection request\n2025-07-16 12:29:30.493 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:29:30.518 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8a71f5b2-5a2a-4843-b88c-f7ce01f642bb] socks forwarding established\n2025-07-16 12:29:30.547 [info] [command][532dfdcd-03a1-4281-9b6e-24010fcdd709] Process exited with code 0\n2025-07-16 12:29:30.547 [info] [command][532dfdcd-03a1-4281-9b6e-24010fcdd709] Socket close event received\n2025-07-16 12:29:30.570 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8a71f5b2-5a2a-4843-b88c-f7ce01f642bb] socks connection closed\n2025-07-16 12:29:30.572 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58574 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:30:30.549 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:30:30.551 [info] [command][80e63837-9416-49a3-9660-75a3ff5d3885] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""80e63837-9416-49a3-9660-75a3ff5d3885""}\n2025-07-16 12:30:30.552 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e5e10ff0-1066-4730-88a7-a027e133fa4f] received connection request\n2025-07-16 12:30:30.553 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:30:30.576 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e5e10ff0-1066-4730-88a7-a027e133fa4f] socks forwarding established\n2025-07-16 12:30:30.602 [info] [command][80e63837-9416-49a3-9660-75a3ff5d3885] Process exited with code 0\n2025-07-16 12:30:30.602 [info] [command][80e63837-9416-49a3-9660-75a3ff5d3885] Socket close event received\n2025-07-16 12:30:30.624 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e5e10ff0-1066-4730-88a7-a027e133fa4f] socks connection closed\n2025-07-16 12:30:30.626 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58603 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:31:30.610 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:31:30.611 [info] [command][c82d4336-c1ea-4ad5-9135-b37f773c4b86] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c82d4336-c1ea-4ad5-9135-b37f773c4b86""}\n2025-07-16 12:31:30.612 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][2dc17a2f-b93d-4909-a5e3-0fe55ad958d6] received connection request\n2025-07-16 12:31:30.613 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:31:30.638 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2dc17a2f-b93d-4909-a5e3-0fe55ad958d6] socks forwarding established\n2025-07-16 12:31:30.665 [info] [command][c82d4336-c1ea-4ad5-9135-b37f773c4b86] Process exited with code 0\n2025-07-16 12:31:30.666 [info] [command][c82d4336-c1ea-4ad5-9135-b37f773c4b86] Socket close event received\n2025-07-16 12:31:30.666 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2dc17a2f-b93d-4909-a5e3-0fe55ad958d6] socks connection closed\n2025-07-16 12:31:30.690 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58633 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:32:30.676 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:32:30.678 [info] [command][fff9ec53-64c2-4c0e-9362-ceef4d737526] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""fff9ec53-64c2-4c0e-9362-ceef4d737526""}\n2025-07-16 12:32:30.679 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a06691e4-2e7e-47c1-a877-e6cf21cc24f3] received connection request\n2025-07-16 12:32:30.679 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:32:30.704 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a06691e4-2e7e-47c1-a877-e6cf21cc24f3] socks forwarding established\n2025-07-16 12:32:30.731 [info] [command][fff9ec53-64c2-4c0e-9362-ceef4d737526] Process exited with code 0\n2025-07-16 12:32:30.731 [info] [command][fff9ec53-64c2-4c0e-9362-ceef4d737526] Socket close event received\n2025-07-16 12:32:30.732 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a06691e4-2e7e-47c1-a877-e6cf21cc24f3] socks connection closed\n2025-07-16 12:32:30.755 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58695 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:33:30.741 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:33:30.742 [info] [command][4aa6e22d-e776-444e-8ac5-5fae70149fe6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4aa6e22d-e776-444e-8ac5-5fae70149fe6""}\n2025-07-16 12:33:30.743 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8ac50093-3ff2-45b8-a8fb-866d7a969719] received connection request\n2025-07-16 12:33:30.743 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:33:30.769 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8ac50093-3ff2-45b8-a8fb-866d7a969719] socks forwarding established\n2025-07-16 12:33:30.797 [info] [command][4aa6e22d-e776-444e-8ac5-5fae70149fe6] Process exited with code 0\n2025-07-16 12:33:30.798 [info] [command][4aa6e22d-e776-444e-8ac5-5fae70149fe6] Socket close event received\n2025-07-16 12:33:30.821 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8ac50093-3ff2-45b8-a8fb-866d7a969719] socks connection closed\n2025-07-16 12:33:30.822 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58726 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:34:30.809 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:34:30.811 [info] [command][6996e2cd-3784-42d2-939d-fb88d9705ed0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6996e2cd-3784-42d2-939d-fb88d9705ed0""}\n2025-07-16 12:34:30.811 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7c9dc65b-7c54-4563-9610-70cf1b81a711] received connection request\n2025-07-16 12:34:30.812 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:34:30.836 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7c9dc65b-7c54-4563-9610-70cf1b81a711] socks forwarding established\n2025-07-16 12:34:30.863 [info] [command][6996e2cd-3784-42d2-939d-fb88d9705ed0] Process exited with code 0\n2025-07-16 12:34:30.863 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7c9dc65b-7c54-4563-9610-70cf1b81a711] socks connection closed\n2025-07-16 12:34:30.863 [info] [command][6996e2cd-3784-42d2-939d-fb88d9705ed0] Socket close event received\n2025-07-16 12:34:30.887 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58769 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:35:30.873 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:35:30.875 [info] [command][1434aef7-5736-46eb-b877-159673308af5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1434aef7-5736-46eb-b877-159673308af5""}\n2025-07-16 12:35:30.876 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][489e63a2-9b02-4afe-aaee-2f1bf64ee09d] received connection request\n2025-07-16 12:35:30.877 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:35:30.904 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][489e63a2-9b02-4afe-aaee-2f1bf64ee09d] socks forwarding established\n2025-07-16 12:35:30.931 [info] [command][1434aef7-5736-46eb-b877-159673308af5] Process exited with code 0\n2025-07-16 12:35:30.931 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][489e63a2-9b02-4afe-aaee-2f1bf64ee09d] socks connection closed\n2025-07-16 12:35:30.931 [info] [command][1434aef7-5736-46eb-b877-159673308af5] Socket close event received\n2025-07-16 12:35:30.956 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58793 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:36:30.941 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:36:30.943 [info] [command][e1f60fcc-2b00-450b-9f63-a910349695fc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e1f60fcc-2b00-450b-9f63-a910349695fc""}\n2025-07-16 12:36:30.944 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0da27eed-38d9-49a6-8cf7-04c5e4331123] received connection request\n2025-07-16 12:36:30.945 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:36:30.972 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0da27eed-38d9-49a6-8cf7-04c5e4331123] socks forwarding established\n2025-07-16 12:36:30.998 [info] [command][e1f60fcc-2b00-450b-9f63-a910349695fc] Process exited with code 0\n2025-07-16 12:36:30.998 [info] [command][e1f60fcc-2b00-450b-9f63-a910349695fc] Socket close event received\n2025-07-16 12:36:30.999 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0da27eed-38d9-49a6-8cf7-04c5e4331123] socks connection closed\n2025-07-16 12:36:31.024 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58812 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:37:31.009 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:37:31.010 [info] [command][a557a91b-8bab-48a6-a8db-c58d925ba4a1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a557a91b-8bab-48a6-a8db-c58d925ba4a1""}\n2025-07-16 12:37:31.011 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0287f1ec-ea95-46f0-9764-e5ceda15ffd6] received connection request\n2025-07-16 12:37:31.011 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:37:31.036 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0287f1ec-ea95-46f0-9764-e5ceda15ffd6] socks forwarding established\n2025-07-16 12:37:31.063 [info] [command][a557a91b-8bab-48a6-a8db-c58d925ba4a1] Process exited with code 0\n2025-07-16 12:37:31.064 [info] [command][a557a91b-8bab-48a6-a8db-c58d925ba4a1] Socket close event received\n2025-07-16 12:37:31.064 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0287f1ec-ea95-46f0-9764-e5ceda15ffd6] socks connection closed\n2025-07-16 12:37:31.088 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58834 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:38:31.071 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:38:31.073 [info] [command][16be1c0f-c28b-4b7a-8ef4-e0d915813fbe] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""16be1c0f-c28b-4b7a-8ef4-e0d915813fbe""}\n2025-07-16 12:38:31.074 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0628e3ef-f24d-4365-a961-63400ce065f8] received connection request\n2025-07-16 12:38:31.074 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:38:31.100 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0628e3ef-f24d-4365-a961-63400ce065f8] socks forwarding established\n2025-07-16 12:38:31.129 [info] [command][16be1c0f-c28b-4b7a-8ef4-e0d915813fbe] Process exited with code 0\n2025-07-16 12:38:31.130 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0628e3ef-f24d-4365-a961-63400ce065f8] socks connection closed\n2025-07-16 12:38:31.130 [info] [command][16be1c0f-c28b-4b7a-8ef4-e0d915813fbe] Socket close event received\n2025-07-16 12:38:31.154 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58890 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:39:31.130 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:39:31.132 [info] [command][df8c6fca-5e8b-446c-af6e-5aca8d397eba] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""df8c6fca-5e8b-446c-af6e-5aca8d397eba""}\n2025-07-16 12:39:31.133 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c2718abb-f52b-4671-95d4-c3eb5db106e0] received connection request\n2025-07-16 12:39:31.134 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:39:31.182 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c2718abb-f52b-4671-95d4-c3eb5db106e0] socks forwarding established\n2025-07-16 12:39:31.210 [info] [command][df8c6fca-5e8b-446c-af6e-5aca8d397eba] Process exited with code 0\n2025-07-16 12:39:31.210 [info] [command][df8c6fca-5e8b-446c-af6e-5aca8d397eba] Socket close event received\n2025-07-16 12:39:31.211 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c2718abb-f52b-4671-95d4-c3eb5db106e0] socks connection closed\n2025-07-16 12:39:31.234 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58915 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:40:31.221 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:40:31.222 [info] [command][33c758b7-b6c8-4875-8efa-78c353e6d409] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""33c758b7-b6c8-4875-8efa-78c353e6d409""}\n2025-07-16 12:40:31.223 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][9293e15c-00fd-4d57-b730-a085196fe81f] received connection request\n2025-07-16 12:40:31.224 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:40:31.248 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9293e15c-00fd-4d57-b730-a085196fe81f] socks forwarding established\n2025-07-16 12:40:31.391 [info] [command][33c758b7-b6c8-4875-8efa-78c353e6d409] Process exited with code 0\n2025-07-16 12:40:31.392 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9293e15c-00fd-4d57-b730-a085196fe81f] socks connection closed\n2025-07-16 12:40:31.392 [info] [command][33c758b7-b6c8-4875-8efa-78c353e6d409] Socket close event received\n2025-07-16 12:40:31.420 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58952 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:41:31.400 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:41:31.401 [info] [command][987bd5bb-9bd0-4be8-8346-8f4089ab6b33] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""987bd5bb-9bd0-4be8-8346-8f4089ab6b33""}\n2025-07-16 12:41:31.402 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8cc76284-6b64-4acb-b9db-d63e22abc703] received connection request\n2025-07-16 12:41:31.402 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:41:31.429 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8cc76284-6b64-4acb-b9db-d63e22abc703] socks forwarding established\n2025-07-16 12:41:31.459 [info] [command][987bd5bb-9bd0-4be8-8346-8f4089ab6b33] Process exited with code 0\n2025-07-16 12:41:31.459 [info] [command][987bd5bb-9bd0-4be8-8346-8f4089ab6b33] Socket close event received\n2025-07-16 12:41:31.460 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8cc76284-6b64-4acb-b9db-d63e22abc703] socks connection closed\n2025-07-16 12:41:31.483 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58978 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:42:31.468 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:42:31.471 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][88188c84-00f1-43c4-ab41-8065889a7595] received connection request\n2025-07-16 12:42:31.472 [info] [command][75a4bd47-17c0-4e13-89fd-e01b5368c63b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""75a4bd47-17c0-4e13-89fd-e01b5368c63b""}\n2025-07-16 12:42:31.473 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:42:31.499 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][88188c84-00f1-43c4-ab41-8065889a7595] socks forwarding established\n2025-07-16 12:42:31.527 [info] [command][75a4bd47-17c0-4e13-89fd-e01b5368c63b] Process exited with code 0\n2025-07-16 12:42:31.528 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][88188c84-00f1-43c4-ab41-8065889a7595] socks connection closed\n2025-07-16 12:42:31.528 [info] [command][75a4bd47-17c0-4e13-89fd-e01b5368c63b] Socket close event received\n2025-07-16 12:42:31.551 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59001 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:43:31.538 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:43:31.539 [info] [command][ec9c3e89-1bf5-4122-8a45-bea08b5cabc5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ec9c3e89-1bf5-4122-8a45-bea08b5cabc5""}\n2025-07-16 12:43:31.541 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][346157f9-0124-4ba1-9fc6-50be4db3e026] received connection request\n2025-07-16 12:43:31.541 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 12:43:31.542 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:43:31.567 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][346157f9-0124-4ba1-9fc6-50be4db3e026] socks forwarding established\n2025-07-16 12:43:31.596 [info] [command][ec9c3e89-1bf5-4122-8a45-bea08b5cabc5] Process exited with code 0\n2025-07-16 12:43:31.596 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][346157f9-0124-4ba1-9fc6-50be4db3e026] socks connection closed\n2025-07-16 12:43:31.596 [info] [command][ec9c3e89-1bf5-4122-8a45-bea08b5cabc5] Socket close event received\n2025-07-16 12:43:31.621 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59053 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:44:31.606 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:44:31.607 [info] [command][ea747e35-07a4-4008-bb9b-4b9c9243f4fb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ea747e35-07a4-4008-bb9b-4b9c9243f4fb""}\n2025-07-16 12:44:31.608 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8c898eea-2fee-481b-b7ab-091b2dd1f109] received connection request\n2025-07-16 12:44:31.609 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:44:31.633 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8c898eea-2fee-481b-b7ab-091b2dd1f109] socks forwarding established\n2025-07-16 12:44:31.661 [info] [command][ea747e35-07a4-4008-bb9b-4b9c9243f4fb] Process exited with code 0\n2025-07-16 12:44:31.661 [info] [command][ea747e35-07a4-4008-bb9b-4b9c9243f4fb] Socket close event received\n2025-07-16 12:44:31.662 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8c898eea-2fee-481b-b7ab-091b2dd1f109] socks connection closed\n2025-07-16 12:44:31.779 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59082 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:45:31.671 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:45:31.672 [info] [command][4fda897e-5a65-4642-be29-31ded4f4e59e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4fda897e-5a65-4642-be29-31ded4f4e59e""}\n2025-07-16 12:45:31.673 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d45bdde7-4085-4a65-8927-fec202b04573] received connection request\n2025-07-16 12:45:31.674 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:45:31.700 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d45bdde7-4085-4a65-8927-fec202b04573] socks forwarding established\n2025-07-16 12:45:31.727 [info] [command][4fda897e-5a65-4642-be29-31ded4f4e59e] Process exited with code 0\n2025-07-16 12:45:31.727 [info] [command][4fda897e-5a65-4642-be29-31ded4f4e59e] Socket close event received\n2025-07-16 12:45:31.727 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d45bdde7-4085-4a65-8927-fec202b04573] socks connection closed\n2025-07-16 12:45:31.751 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59117 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:46:31.736 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:46:31.738 [info] [command][b8ef114d-45e1-4955-affe-b4c6e183e072] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b8ef114d-45e1-4955-affe-b4c6e183e072""}\n2025-07-16 12:46:31.739 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][de1db15e-5e36-4d57-b5cf-b4c35a0128a9] received connection request\n2025-07-16 12:46:31.740 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:46:31.764 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][de1db15e-5e36-4d57-b5cf-b4c35a0128a9] socks forwarding established\n2025-07-16 12:46:31.793 [info] [command][b8ef114d-45e1-4955-affe-b4c6e183e072] Process exited with code 0\n2025-07-16 12:46:31.794 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][de1db15e-5e36-4d57-b5cf-b4c35a0128a9] socks connection closed\n2025-07-16 12:46:31.794 [info] [command][b8ef114d-45e1-4955-affe-b4c6e183e072] Socket close event received\n2025-07-16 12:46:31.820 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59140 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:47:31.797 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:47:31.799 [info] [command][1d9971c9-0285-408d-acd8-94df01746f9d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1d9971c9-0285-408d-acd8-94df01746f9d""}\n2025-07-16 12:47:31.800 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][55246408-3cfc-4e80-8efb-a92675149625] received connection request\n2025-07-16 12:47:31.800 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:47:31.828 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][55246408-3cfc-4e80-8efb-a92675149625] socks forwarding established\n2025-07-16 12:47:31.856 [info] [command][1d9971c9-0285-408d-acd8-94df01746f9d] Process exited with code 0\n2025-07-16 12:47:31.857 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][55246408-3cfc-4e80-8efb-a92675149625] socks connection closed\n2025-07-16 12:47:31.857 [info] [command][1d9971c9-0285-408d-acd8-94df01746f9d] Socket close event received\n2025-07-16 12:47:31.883 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59165 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:48:31.867 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:48:31.869 [info] [command][5fd8dd84-64d1-40f9-a20b-d2537d44a92c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5fd8dd84-64d1-40f9-a20b-d2537d44a92c""}\n2025-07-16 12:48:31.870 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3127e921-494a-4bbe-a705-8d432e60c67f] received connection request\n2025-07-16 12:48:31.871 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:48:31.899 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3127e921-494a-4bbe-a705-8d432e60c67f] socks forwarding established\n2025-07-16 12:48:31.928 [info] [command][5fd8dd84-64d1-40f9-a20b-d2537d44a92c] Process exited with code 0\n2025-07-16 12:48:31.928 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3127e921-494a-4bbe-a705-8d432e60c67f] socks connection closed\n2025-07-16 12:48:31.928 [info] [command][5fd8dd84-64d1-40f9-a20b-d2537d44a92c] Socket close event received\n2025-07-16 12:48:31.959 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59214 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:49:31.935 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:49:31.937 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][21285d6b-5865-4137-9c53-16e7d506342e] received connection request\n2025-07-16 12:49:31.937 [info] [command][deb87e10-7870-497b-9d32-306fb8775ebb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""deb87e10-7870-497b-9d32-306fb8775ebb""}\n2025-07-16 12:49:31.938 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:49:31.966 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][21285d6b-5865-4137-9c53-16e7d506342e] socks forwarding established\n2025-07-16 12:49:31.995 [info] [command][deb87e10-7870-497b-9d32-306fb8775ebb] Process exited with code 0\n2025-07-16 12:49:31.995 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][21285d6b-5865-4137-9c53-16e7d506342e] socks connection closed\n2025-07-16 12:49:31.995 [info] [command][deb87e10-7870-497b-9d32-306fb8775ebb] Socket close event received\n2025-07-16 12:49:32.021 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59246 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:50:31.998 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:50:32.000 [info] [command][2999a297-0dca-482b-a2a9-1f520c35f287] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2999a297-0dca-482b-a2a9-1f520c35f287""}\n2025-07-16 12:50:32.002 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][865588a9-d668-459d-afcf-82acd1f883b6] received connection request\n2025-07-16 12:50:32.003 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:50:32.031 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][865588a9-d668-459d-afcf-82acd1f883b6] socks forwarding established\n2025-07-16 12:50:32.059 [info] [command][2999a297-0dca-482b-a2a9-1f520c35f287] Process exited with code 0\n2025-07-16 12:50:32.059 [info] [command][2999a297-0dca-482b-a2a9-1f520c35f287] Socket close event received\n2025-07-16 12:50:32.060 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][865588a9-d668-459d-afcf-82acd1f883b6] socks connection closed\n2025-07-16 12:50:32.085 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59281 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:51:32.061 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:51:32.063 [info] [command][22d98a51-5a6e-410c-9948-fd68a21420b6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""22d98a51-5a6e-410c-9948-fd68a21420b6""}\n2025-07-16 12:51:32.064 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ee47a392-0ddc-4997-8457-d57d1f2e9bae] received connection request\n2025-07-16 12:51:32.064 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:51:32.090 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ee47a392-0ddc-4997-8457-d57d1f2e9bae] socks forwarding established\n2025-07-16 12:51:32.118 [info] [command][22d98a51-5a6e-410c-9948-fd68a21420b6] Process exited with code 0\n2025-07-16 12:51:32.118 [info] [command][22d98a51-5a6e-410c-9948-fd68a21420b6] Socket close event received\n2025-07-16 12:51:32.121 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ee47a392-0ddc-4997-8457-d57d1f2e9bae] socks connection closed\n2025-07-16 12:51:32.147 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59305 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:52:32.130 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:52:32.131 [info] [command][d02f7489-ee26-4992-ad12-be6183cec190] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d02f7489-ee26-4992-ad12-be6183cec190""}\n2025-07-16 12:52:32.132 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a6ed5225-8ef2-41ad-88dc-7e3fe827af59] received connection request\n2025-07-16 12:52:32.133 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:52:32.167 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a6ed5225-8ef2-41ad-88dc-7e3fe827af59] socks forwarding established\n2025-07-16 12:52:32.294 [info] [command][d02f7489-ee26-4992-ad12-be6183cec190] Process exited with code 0\n2025-07-16 12:52:32.294 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a6ed5225-8ef2-41ad-88dc-7e3fe827af59] socks connection closed\n2025-07-16 12:52:32.294 [info] [command][d02f7489-ee26-4992-ad12-be6183cec190] Socket close event received\n2025-07-16 12:52:32.319 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59333 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:53:32.296 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:53:32.299 [info] [command][d45f31d4-17ae-4ad4-8537-c722a688b902] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d45f31d4-17ae-4ad4-8537-c722a688b902""}\n2025-07-16 12:53:32.300 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4578e3e8-38f4-492a-a315-072e81a5bf99] received connection request\n2025-07-16 12:53:32.301 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:53:32.339 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4578e3e8-38f4-492a-a315-072e81a5bf99] socks forwarding established\n2025-07-16 12:53:32.497 [info] [command][d45f31d4-17ae-4ad4-8537-c722a688b902] Process exited with code 0\n2025-07-16 12:53:32.498 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4578e3e8-38f4-492a-a315-072e81a5bf99] socks connection closed\n2025-07-16 12:53:32.498 [info] [command][d45f31d4-17ae-4ad4-8537-c722a688b902] Socket close event received\n2025-07-16 12:53:32.523 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59383 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:54:32.499 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:54:32.500 [info] [command][d94f114d-1ee6-4167-b720-04c3e2dd0ffe] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d94f114d-1ee6-4167-b720-04c3e2dd0ffe""}\n2025-07-16 12:54:32.501 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][265c309d-c51b-4213-937f-3bac566322a4] received connection request\n2025-07-16 12:54:32.501 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:54:32.610 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][265c309d-c51b-4213-937f-3bac566322a4] socks forwarding established\n2025-07-16 12:54:32.686 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][265c309d-c51b-4213-937f-3bac566322a4] socks connection closed\n2025-07-16 12:54:32.687 [info] [command][d94f114d-1ee6-4167-b720-04c3e2dd0ffe] Process exited with code 0\n2025-07-16 12:54:32.687 [info] [command][d94f114d-1ee6-4167-b720-04c3e2dd0ffe] Socket close event received\n2025-07-16 12:54:32.711 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59409 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:55:32.697 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:55:32.700 [info] [command][6145e914-c99d-489a-8613-e0b22a9b4bf9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6145e914-c99d-489a-8613-e0b22a9b4bf9""}\n2025-07-16 12:55:32.701 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][83f669ac-478a-4bb2-8c7b-84e1552cf15d] received connection request\n2025-07-16 12:55:32.702 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:55:32.753 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][83f669ac-478a-4bb2-8c7b-84e1552cf15d] socks forwarding established\n2025-07-16 12:55:32.910 [info] [command][6145e914-c99d-489a-8613-e0b22a9b4bf9] Process exited with code 0\n2025-07-16 12:55:32.911 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][83f669ac-478a-4bb2-8c7b-84e1552cf15d] socks connection closed\n2025-07-16 12:55:32.911 [info] [command][6145e914-c99d-489a-8613-e0b22a9b4bf9] Socket close event received\n2025-07-16 12:55:32.937 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59448 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:56:32.919 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:56:32.921 [info] [command][76dfee9c-947a-495a-9d6f-0a12436c36cb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""76dfee9c-947a-495a-9d6f-0a12436c36cb""}\n2025-07-16 12:56:32.922 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][875c0579-4ef5-4e23-b546-e986b3d15ec6] received connection request\n2025-07-16 12:56:32.923 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:56:32.952 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][875c0579-4ef5-4e23-b546-e986b3d15ec6] socks forwarding established\n2025-07-16 12:56:33.057 [info] [command][76dfee9c-947a-495a-9d6f-0a12436c36cb] Process exited with code 0\n2025-07-16 12:56:33.058 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][875c0579-4ef5-4e23-b546-e986b3d15ec6] socks connection closed\n2025-07-16 12:56:33.058 [info] [command][76dfee9c-947a-495a-9d6f-0a12436c36cb] Socket close event received\n2025-07-16 12:56:33.083 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59470 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:57:33.068 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:57:33.069 [info] [command][18f18d8c-7356-4452-9f13-c44c017debfc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""18f18d8c-7356-4452-9f13-c44c017debfc""}\n2025-07-16 12:57:33.070 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][190fcdc9-524b-4f54-8d3b-36cdde2e7faa] received connection request\n2025-07-16 12:57:33.070 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:57:33.095 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][190fcdc9-524b-4f54-8d3b-36cdde2e7faa] socks forwarding established\n2025-07-16 12:57:33.122 [info] [command][18f18d8c-7356-4452-9f13-c44c017debfc] Process exited with code 0\n2025-07-16 12:57:33.123 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][190fcdc9-524b-4f54-8d3b-36cdde2e7faa] socks connection closed\n2025-07-16 12:57:33.123 [info] [command][18f18d8c-7356-4452-9f13-c44c017debfc] Socket close event received\n2025-07-16 12:57:33.147 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59497 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:58:33.128 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:58:33.132 [info] [command][95927c70-2abb-4e75-9169-bf2005efa570] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""95927c70-2abb-4e75-9169-bf2005efa570""}\n2025-07-16 12:58:33.133 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][fcf78e25-90de-42c2-9d84-de104ae0529d] received connection request\n2025-07-16 12:58:33.134 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:58:33.160 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fcf78e25-90de-42c2-9d84-de104ae0529d] socks forwarding established\n2025-07-16 12:58:33.188 [info] [command][95927c70-2abb-4e75-9169-bf2005efa570] Process exited with code 0\n2025-07-16 12:58:33.188 [info] [command][95927c70-2abb-4e75-9169-bf2005efa570] Socket close event received\n2025-07-16 12:58:33.189 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fcf78e25-90de-42c2-9d84-de104ae0529d] socks connection closed\n2025-07-16 12:58:33.213 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59559 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 12:59:33.189 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 12:59:33.190 [info] [command][de1477f6-f784-4408-bc8d-641583126303] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""de1477f6-f784-4408-bc8d-641583126303""}\n2025-07-16 12:59:33.191 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c8da11e1-8419-4ccb-aef8-1c6017ac79dc] received connection request\n2025-07-16 12:59:33.192 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 12:59:33.217 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c8da11e1-8419-4ccb-aef8-1c6017ac79dc] socks forwarding established\n2025-07-16 12:59:33.244 [info] [command][de1477f6-f784-4408-bc8d-641583126303] Process exited with code 0\n2025-07-16 12:59:33.245 [info] [command][de1477f6-f784-4408-bc8d-641583126303] Socket close event received\n2025-07-16 12:59:33.245 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c8da11e1-8419-4ccb-aef8-1c6017ac79dc] socks connection closed\n2025-07-16 12:59:33.270 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59592 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:00:33.249 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:00:33.250 [info] [command][6035553b-9962-4df3-9530-9a1e29c5de35] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6035553b-9962-4df3-9530-9a1e29c5de35""}\n2025-07-16 13:00:33.251 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][51f2fdae-4a1d-454e-98b6-a91c7f0afc68] received connection request\n2025-07-16 13:00:33.251 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:00:33.277 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][51f2fdae-4a1d-454e-98b6-a91c7f0afc68] socks forwarding established\n2025-07-16 13:00:33.305 [info] [command][6035553b-9962-4df3-9530-9a1e29c5de35] Process exited with code 0\n2025-07-16 13:00:33.306 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][51f2fdae-4a1d-454e-98b6-a91c7f0afc68] socks connection closed\n2025-07-16 13:00:33.306 [info] [command][6035553b-9962-4df3-9530-9a1e29c5de35] Socket close event received\n2025-07-16 13:00:33.331 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59638 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:01:33.311 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:01:33.313 [info] [command][3c49114f-b031-4301-bc82-6b8c8027ab12] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3c49114f-b031-4301-bc82-6b8c8027ab12""}\n2025-07-16 13:01:33.314 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d03f4081-729b-4e2b-b779-b6ea713b1100] received connection request\n2025-07-16 13:01:33.314 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:01:33.340 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d03f4081-729b-4e2b-b779-b6ea713b1100] socks forwarding established\n2025-07-16 13:01:33.367 [info] [command][3c49114f-b031-4301-bc82-6b8c8027ab12] Process exited with code 0\n2025-07-16 13:01:33.367 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d03f4081-729b-4e2b-b779-b6ea713b1100] socks connection closed\n2025-07-16 13:01:33.367 [info] [command][3c49114f-b031-4301-bc82-6b8c8027ab12] Socket close event received\n2025-07-16 13:01:33.393 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59659 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:02:33.373 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:02:33.374 [info] [command][d84d4349-4fc8-46b9-935a-9f5da5eb8c06] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d84d4349-4fc8-46b9-935a-9f5da5eb8c06""}\n2025-07-16 13:02:33.375 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b9ba3d9f-5081-4d20-83a4-41b4e1525e31] received connection request\n2025-07-16 13:02:33.376 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:02:33.400 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b9ba3d9f-5081-4d20-83a4-41b4e1525e31] socks forwarding established\n2025-07-16 13:02:33.427 [info] [command][d84d4349-4fc8-46b9-935a-9f5da5eb8c06] Process exited with code 0\n2025-07-16 13:02:33.428 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b9ba3d9f-5081-4d20-83a4-41b4e1525e31] socks connection closed\n2025-07-16 13:02:33.428 [info] [command][d84d4349-4fc8-46b9-935a-9f5da5eb8c06] Socket close event received\n2025-07-16 13:02:33.451 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59716 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:03:33.433 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:03:33.434 [info] [command][3f2f106e-53c2-436c-a98e-d9d14f77455c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3f2f106e-53c2-436c-a98e-d9d14f77455c""}\n2025-07-16 13:03:33.434 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][47ffaa99-e5dd-4442-a6e9-4ffc15c8876c] received connection request\n2025-07-16 13:03:33.434 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:03:33.458 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][47ffaa99-e5dd-4442-a6e9-4ffc15c8876c] socks forwarding established\n2025-07-16 13:03:33.486 [info] [command][3f2f106e-53c2-436c-a98e-d9d14f77455c] Process exited with code 0\n2025-07-16 13:03:33.486 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][47ffaa99-e5dd-4442-a6e9-4ffc15c8876c] socks connection closed\n2025-07-16 13:03:33.486 [info] [command][3f2f106e-53c2-436c-a98e-d9d14f77455c] Socket close event received\n2025-07-16 13:03:33.512 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59743 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:04:33.492 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:04:33.494 [info] [command][2b7e9f7e-c3c2-4067-9bcb-39cb06c9fba8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2b7e9f7e-c3c2-4067-9bcb-39cb06c9fba8""}\n2025-07-16 13:04:33.494 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][532edc95-e373-4031-b2ea-a4ec27088ac0] received connection request\n2025-07-16 13:04:33.495 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:04:33.518 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][532edc95-e373-4031-b2ea-a4ec27088ac0] socks forwarding established\n2025-07-16 13:04:33.545 [info] [command][2b7e9f7e-c3c2-4067-9bcb-39cb06c9fba8] Process exited with code 0\n2025-07-16 13:04:33.545 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][532edc95-e373-4031-b2ea-a4ec27088ac0] socks connection closed\n2025-07-16 13:04:33.545 [info] [command][2b7e9f7e-c3c2-4067-9bcb-39cb06c9fba8] Socket close event received\n2025-07-16 13:04:33.569 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59769 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:05:33.549 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:05:33.552 [info] [command][eab28431-2239-4ab0-a592-5daee580a544] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""eab28431-2239-4ab0-a592-5daee580a544""}\n2025-07-16 13:05:33.552 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][17d99e9c-c573-4f46-89ac-f472a0f21196] received connection request\n2025-07-16 13:05:33.552 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:05:33.576 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][17d99e9c-c573-4f46-89ac-f472a0f21196] socks forwarding established\n2025-07-16 13:05:33.604 [info] [command][eab28431-2239-4ab0-a592-5daee580a544] Process exited with code 0\n2025-07-16 13:05:33.604 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][17d99e9c-c573-4f46-89ac-f472a0f21196] socks connection closed\n2025-07-16 13:05:33.604 [info] [command][eab28431-2239-4ab0-a592-5daee580a544] Socket close event received\n2025-07-16 13:05:33.628 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59812 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:06:33.607 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:06:33.610 [info] [command][b1ffd431-62d2-4c1a-8b46-8473d11269ef] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b1ffd431-62d2-4c1a-8b46-8473d11269ef""}\n2025-07-16 13:06:33.610 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ed655557-a2d0-4223-af79-ef214dd9d8b7] received connection request\n2025-07-16 13:06:33.611 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:06:33.635 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ed655557-a2d0-4223-af79-ef214dd9d8b7] socks forwarding established\n2025-07-16 13:06:33.664 [info] [command][b1ffd431-62d2-4c1a-8b46-8473d11269ef] Process exited with code 0\n2025-07-16 13:06:33.664 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ed655557-a2d0-4223-af79-ef214dd9d8b7] socks connection closed\n2025-07-16 13:06:33.666 [info] [command][b1ffd431-62d2-4c1a-8b46-8473d11269ef] Socket close event received\n2025-07-16 13:06:33.689 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59840 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:07:33.671 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:07:33.672 [info] [command][79190c08-bc6f-4fc3-a399-a20dc6ddc49c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""79190c08-bc6f-4fc3-a399-a20dc6ddc49c""}\n2025-07-16 13:07:33.673 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][599b6116-da93-466e-9ecf-0bfe39a1a0b8] received connection request\n2025-07-16 13:07:33.673 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:07:33.699 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][599b6116-da93-466e-9ecf-0bfe39a1a0b8] socks forwarding established\n2025-07-16 13:07:33.727 [info] [command][79190c08-bc6f-4fc3-a399-a20dc6ddc49c] Process exited with code 0\n2025-07-16 13:07:33.728 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][599b6116-da93-466e-9ecf-0bfe39a1a0b8] socks connection closed\n2025-07-16 13:07:33.728 [info] [command][79190c08-bc6f-4fc3-a399-a20dc6ddc49c] Socket close event received\n2025-07-16 13:07:33.751 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59895 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:08:33.733 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:08:33.735 [info] [command][f95d653d-428c-4c7b-988c-66ca7755f2aa] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f95d653d-428c-4c7b-988c-66ca7755f2aa""}\n2025-07-16 13:08:33.736 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1f2628f0-ab2e-416b-875e-e8fdd4d78455] received connection request\n2025-07-16 13:08:33.736 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:08:33.761 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1f2628f0-ab2e-416b-875e-e8fdd4d78455] socks forwarding established\n2025-07-16 13:08:33.787 [info] [command][f95d653d-428c-4c7b-988c-66ca7755f2aa] Process exited with code 0\n2025-07-16 13:08:33.787 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1f2628f0-ab2e-416b-875e-e8fdd4d78455] socks connection closed\n2025-07-16 13:08:33.787 [info] [command][f95d653d-428c-4c7b-988c-66ca7755f2aa] Socket close event received\n2025-07-16 13:08:33.848 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59920 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:09:33.792 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:09:33.794 [info] [command][f5858c91-2b19-4d89-a3bf-0b06fd568b88] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f5858c91-2b19-4d89-a3bf-0b06fd568b88""}\n2025-07-16 13:09:33.795 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e73ffe8a-b226-40d2-9fd9-cd0d099c8028] received connection request\n2025-07-16 13:09:33.795 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:09:33.820 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e73ffe8a-b226-40d2-9fd9-cd0d099c8028] socks forwarding established\n2025-07-16 13:09:33.846 [info] [command][f5858c91-2b19-4d89-a3bf-0b06fd568b88] Process exited with code 0\n2025-07-16 13:09:33.846 [info] [command][f5858c91-2b19-4d89-a3bf-0b06fd568b88] Socket close event received\n2025-07-16 13:09:33.847 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e73ffe8a-b226-40d2-9fd9-cd0d099c8028] socks connection closed\n2025-07-16 13:09:33.871 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59993 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:10:33.853 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:10:33.855 [info] [command][e0545a7c-ed0e-4b38-a979-81977828c9df] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e0545a7c-ed0e-4b38-a979-81977828c9df""}\n2025-07-16 13:10:33.856 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ae7f630f-0fc4-4cc0-97d0-590593d71731] received connection request\n2025-07-16 13:10:33.857 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:10:33.966 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ae7f630f-0fc4-4cc0-97d0-590593d71731] socks forwarding established\n2025-07-16 13:10:33.995 [info] [command][e0545a7c-ed0e-4b38-a979-81977828c9df] Process exited with code 0\n2025-07-16 13:10:33.996 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ae7f630f-0fc4-4cc0-97d0-590593d71731] socks connection closed\n2025-07-16 13:10:33.996 [info] [command][e0545a7c-ed0e-4b38-a979-81977828c9df] Socket close event received\n2025-07-16 13:10:34.025 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60020 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:11:33.999 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:11:34.001 [info] [command][8189c103-9d6f-42d3-bbf8-105038609ba1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8189c103-9d6f-42d3-bbf8-105038609ba1""}\n2025-07-16 13:11:34.002 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3284741f-98ed-4350-a905-7a7a39d3449d] received connection request\n2025-07-16 13:11:34.002 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:11:34.025 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3284741f-98ed-4350-a905-7a7a39d3449d] socks forwarding established\n2025-07-16 13:11:34.050 [info] [command][8189c103-9d6f-42d3-bbf8-105038609ba1] Process exited with code 0\n2025-07-16 13:11:34.051 [info] [command][8189c103-9d6f-42d3-bbf8-105038609ba1] Socket close event received\n2025-07-16 13:11:34.051 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3284741f-98ed-4350-a905-7a7a39d3449d] socks connection closed\n2025-07-16 13:11:34.074 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60079 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:12:34.056 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:12:34.057 [info] [command][88bbf688-d9dd-4dfe-822e-21bd74bad14f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""88bbf688-d9dd-4dfe-822e-21bd74bad14f""}\n2025-07-16 13:12:34.057 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][fc73627c-6b0f-4bba-92f5-596e3762a59f] received connection request\n2025-07-16 13:12:34.058 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:12:34.081 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fc73627c-6b0f-4bba-92f5-596e3762a59f] socks forwarding established\n2025-07-16 13:12:34.106 [info] [command][88bbf688-d9dd-4dfe-822e-21bd74bad14f] Process exited with code 0\n2025-07-16 13:12:34.107 [info] [command][88bbf688-d9dd-4dfe-822e-21bd74bad14f] Socket close event received\n2025-07-16 13:12:34.107 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fc73627c-6b0f-4bba-92f5-596e3762a59f] socks connection closed\n2025-07-16 13:12:34.130 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60133 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:13:34.109 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:13:34.111 [info] [command][6285fe4f-db60-4e8d-b216-300bd4f0778e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6285fe4f-db60-4e8d-b216-300bd4f0778e""}\n2025-07-16 13:13:34.112 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a620636b-4f18-4afb-b152-c9dc6db34770] received connection request\n2025-07-16 13:13:34.112 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:13:34.136 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a620636b-4f18-4afb-b152-c9dc6db34770] socks forwarding established\n2025-07-16 13:13:34.163 [info] [command][6285fe4f-db60-4e8d-b216-300bd4f0778e] Process exited with code 0\n2025-07-16 13:13:34.164 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a620636b-4f18-4afb-b152-c9dc6db34770] socks connection closed\n2025-07-16 13:13:34.164 [info] [command][6285fe4f-db60-4e8d-b216-300bd4f0778e] Socket close event received\n2025-07-16 13:13:34.186 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60161 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:14:34.168 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:14:34.171 [info] [command][e6bc87bf-5a7f-444d-9851-e0c5ac8995cf] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e6bc87bf-5a7f-444d-9851-e0c5ac8995cf""}\n2025-07-16 13:14:34.171 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6fda740c-7484-4e95-8c87-3d168c0d02c1] received connection request\n2025-07-16 13:14:34.172 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:14:34.195 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6fda740c-7484-4e95-8c87-3d168c0d02c1] socks forwarding established\n2025-07-16 13:14:34.222 [info] [command][e6bc87bf-5a7f-444d-9851-e0c5ac8995cf] Process exited with code 0\n2025-07-16 13:14:34.223 [info] [command][e6bc87bf-5a7f-444d-9851-e0c5ac8995cf] Socket close event received\n2025-07-16 13:14:34.223 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6fda740c-7484-4e95-8c87-3d168c0d02c1] socks connection closed\n2025-07-16 13:14:34.246 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60208 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:15:34.225 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:15:34.227 [info] [command][8dcd6bf7-e644-4bf7-9141-5c36e75797e3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8dcd6bf7-e644-4bf7-9141-5c36e75797e3""}\n2025-07-16 13:15:34.228 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d4c07564-0fe7-4420-a74e-c371990b1bee] received connection request\n2025-07-16 13:15:34.228 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:15:34.257 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d4c07564-0fe7-4420-a74e-c371990b1bee] socks forwarding established\n2025-07-16 13:15:34.285 [info] [command][8dcd6bf7-e644-4bf7-9141-5c36e75797e3] Process exited with code 0\n2025-07-16 13:15:34.285 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d4c07564-0fe7-4420-a74e-c371990b1bee] socks connection closed\n2025-07-16 13:15:34.285 [info] [command][8dcd6bf7-e644-4bf7-9141-5c36e75797e3] Socket close event received\n2025-07-16 13:15:34.309 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60233 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:16:34.286 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:16:34.288 [info] [command][a3138fbe-a391-466a-9873-3e844bd8142f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a3138fbe-a391-466a-9873-3e844bd8142f""}\n2025-07-16 13:16:34.289 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][44924c89-ddcd-4758-8460-d1ce11f0c5d4] received connection request\n2025-07-16 13:16:34.289 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:16:34.340 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][44924c89-ddcd-4758-8460-d1ce11f0c5d4] socks forwarding established\n2025-07-16 13:16:34.368 [info] [command][a3138fbe-a391-466a-9873-3e844bd8142f] Process exited with code 0\n2025-07-16 13:16:34.368 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][44924c89-ddcd-4758-8460-d1ce11f0c5d4] socks connection closed\n2025-07-16 13:16:34.368 [info] [command][a3138fbe-a391-466a-9873-3e844bd8142f] Socket close event received\n2025-07-16 13:16:34.396 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60259 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:17:34.369 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:17:34.371 [info] [command][fc00b4a0-e493-4aa2-bce6-0e5779aab6e0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""fc00b4a0-e493-4aa2-bce6-0e5779aab6e0""}\n2025-07-16 13:17:34.372 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][67c1892f-3b8f-4d45-806f-22a15bcf3a60] received connection request\n2025-07-16 13:17:34.372 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:17:34.396 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][67c1892f-3b8f-4d45-806f-22a15bcf3a60] socks forwarding established\n2025-07-16 13:17:34.422 [info] [command][fc00b4a0-e493-4aa2-bce6-0e5779aab6e0] Process exited with code 0\n2025-07-16 13:17:34.422 [info] [command][fc00b4a0-e493-4aa2-bce6-0e5779aab6e0] Socket close event received\n2025-07-16 13:17:34.423 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][67c1892f-3b8f-4d45-806f-22a15bcf3a60] socks connection closed\n2025-07-16 13:17:34.447 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60323 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:18:34.423 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:18:34.425 [info] [command][04e90238-88a0-401f-99c1-2d9353190296] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""04e90238-88a0-401f-99c1-2d9353190296""}\n2025-07-16 13:18:34.425 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7da76dfc-3ab8-43a1-bd12-771ef41e2229] received connection request\n2025-07-16 13:18:34.425 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:18:34.493 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7da76dfc-3ab8-43a1-bd12-771ef41e2229] socks forwarding established\n2025-07-16 13:18:34.519 [info] [command][04e90238-88a0-401f-99c1-2d9353190296] Process exited with code 0\n2025-07-16 13:18:34.519 [info] [command][04e90238-88a0-401f-99c1-2d9353190296] Socket close event received\n2025-07-16 13:18:34.520 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7da76dfc-3ab8-43a1-bd12-771ef41e2229] socks connection closed\n2025-07-16 13:18:34.542 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60369 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:19:34.524 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:19:34.525 [info] [command][92c4a44e-abb0-4ce9-9d7c-941fcd50c0ec] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""92c4a44e-abb0-4ce9-9d7c-941fcd50c0ec""}\n2025-07-16 13:19:34.526 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][9fa6086e-da01-4576-a0ec-4aab59ea899a] received connection request\n2025-07-16 13:19:34.527 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:19:34.552 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9fa6086e-da01-4576-a0ec-4aab59ea899a] socks forwarding established\n2025-07-16 13:19:34.580 [info] [command][92c4a44e-abb0-4ce9-9d7c-941fcd50c0ec] Process exited with code 0\n2025-07-16 13:19:34.581 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9fa6086e-da01-4576-a0ec-4aab59ea899a] socks connection closed\n2025-07-16 13:19:34.581 [info] [command][92c4a44e-abb0-4ce9-9d7c-941fcd50c0ec] Socket close event received\n2025-07-16 13:19:34.607 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60413 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:20:34.586 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:20:34.587 [info] [command][944634f9-6f7f-4380-a1b3-f9c50953069a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""944634f9-6f7f-4380-a1b3-f9c50953069a""}\n2025-07-16 13:20:34.588 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][80637473-4abe-42a3-862e-bdb14dd550dd] received connection request\n2025-07-16 13:20:34.589 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:20:34.612 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][80637473-4abe-42a3-862e-bdb14dd550dd] socks forwarding established\n2025-07-16 13:20:34.644 [info] [command][944634f9-6f7f-4380-a1b3-f9c50953069a] Process exited with code 0\n2025-07-16 13:20:34.644 [info] [command][944634f9-6f7f-4380-a1b3-f9c50953069a] Socket close event received\n2025-07-16 13:20:34.662 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][80637473-4abe-42a3-862e-bdb14dd550dd] socks connection closed\n2025-07-16 13:20:34.668 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60442 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:21:34.648 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:21:34.649 [info] [command][1720030d-59e0-445d-9007-a9e45293e03a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1720030d-59e0-445d-9007-a9e45293e03a""}\n2025-07-16 13:21:34.650 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][2bd4cbfd-470f-4c05-aef5-938d81dc6120] received connection request\n2025-07-16 13:21:34.651 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:21:34.675 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2bd4cbfd-470f-4c05-aef5-938d81dc6120] socks forwarding established\n2025-07-16 13:21:34.704 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2bd4cbfd-470f-4c05-aef5-938d81dc6120] socks connection closed\n2025-07-16 13:21:34.704 [info] [command][1720030d-59e0-445d-9007-a9e45293e03a] Process exited with code 0\n2025-07-16 13:21:34.704 [info] [command][1720030d-59e0-445d-9007-a9e45293e03a] Socket close event received\n2025-07-16 13:21:34.813 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60480 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:22:34.710 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:22:34.711 [info] [command][a657cc4d-b0e3-4c92-8615-21d77ce8f5f7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a657cc4d-b0e3-4c92-8615-21d77ce8f5f7""}\n2025-07-16 13:22:34.712 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e4172b64-bf13-472d-931e-44207e3dd0c8] received connection request\n2025-07-16 13:22:34.713 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:22:34.753 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e4172b64-bf13-472d-931e-44207e3dd0c8] socks forwarding established\n2025-07-16 13:22:34.782 [info] [command][a657cc4d-b0e3-4c92-8615-21d77ce8f5f7] Process exited with code 0\n2025-07-16 13:22:34.782 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e4172b64-bf13-472d-931e-44207e3dd0c8] socks connection closed\n2025-07-16 13:22:34.782 [info] [command][a657cc4d-b0e3-4c92-8615-21d77ce8f5f7] Socket close event received\n2025-07-16 13:22:34.805 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60537 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:23:34.787 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:23:34.788 [info] [command][3228515f-0adb-4fc5-b12b-96c30d8bbb77] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3228515f-0adb-4fc5-b12b-96c30d8bbb77""}\n2025-07-16 13:23:34.789 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b238a096-6d33-4d55-a229-ff4dbe6ad184] received connection request\n2025-07-16 13:23:34.789 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:23:34.819 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b238a096-6d33-4d55-a229-ff4dbe6ad184] socks forwarding established\n2025-07-16 13:23:34.846 [info] [command][3228515f-0adb-4fc5-b12b-96c30d8bbb77] Process exited with code 0\n2025-07-16 13:23:34.846 [info] [command][3228515f-0adb-4fc5-b12b-96c30d8bbb77] Socket close event received\n2025-07-16 13:23:34.847 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b238a096-6d33-4d55-a229-ff4dbe6ad184] socks connection closed\n2025-07-16 13:23:34.869 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60560 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:24:34.849 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:24:34.850 [info] [command][c393b76f-9864-4de6-b471-5ff5f0ff31c3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c393b76f-9864-4de6-b471-5ff5f0ff31c3""}\n2025-07-16 13:24:34.850 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][cb778d7d-f5c6-4e33-947e-c4410fccef41] received connection request\n2025-07-16 13:24:34.851 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:24:34.874 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cb778d7d-f5c6-4e33-947e-c4410fccef41] socks forwarding established\n2025-07-16 13:24:34.900 [info] [command][c393b76f-9864-4de6-b471-5ff5f0ff31c3] Process exited with code 0\n2025-07-16 13:24:34.900 [info] [command][c393b76f-9864-4de6-b471-5ff5f0ff31c3] Socket close event received\n2025-07-16 13:24:34.918 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cb778d7d-f5c6-4e33-947e-c4410fccef41] socks connection closed\n2025-07-16 13:24:34.923 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60604 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:25:34.902 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:25:34.904 [info] [command][f7a90bb8-4e97-4213-9446-fea554701c16] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f7a90bb8-4e97-4213-9446-fea554701c16""}\n2025-07-16 13:25:34.905 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e74c57ee-6974-4353-b860-d5f8199a31e0] received connection request\n2025-07-16 13:25:34.906 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:25:34.930 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e74c57ee-6974-4353-b860-d5f8199a31e0] socks forwarding established\n2025-07-16 13:25:34.956 [info] [command][f7a90bb8-4e97-4213-9446-fea554701c16] Process exited with code 0\n2025-07-16 13:25:34.956 [info] [command][f7a90bb8-4e97-4213-9446-fea554701c16] Socket close event received\n2025-07-16 13:25:34.957 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e74c57ee-6974-4353-b860-d5f8199a31e0] socks connection closed\n2025-07-16 13:25:34.980 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60642 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:26:34.957 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:26:34.959 [info] [command][f2202e26-1c51-46f2-85d1-c163afaf4e41] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f2202e26-1c51-46f2-85d1-c163afaf4e41""}\n2025-07-16 13:26:34.960 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][aac82229-6d10-446c-83d4-b4b23eacb271] received connection request\n2025-07-16 13:26:34.960 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:26:34.984 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][aac82229-6d10-446c-83d4-b4b23eacb271] socks forwarding established\n2025-07-16 13:26:35.015 [info] [command][f2202e26-1c51-46f2-85d1-c163afaf4e41] Process exited with code 0\n2025-07-16 13:26:35.015 [info] [command][f2202e26-1c51-46f2-85d1-c163afaf4e41] Socket close event received\n2025-07-16 13:26:35.016 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][aac82229-6d10-446c-83d4-b4b23eacb271] socks connection closed\n2025-07-16 13:26:35.038 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60668 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:27:35.021 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:27:35.022 [info] [command][3217f505-0c0f-41f2-a0c1-5da64c95407c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3217f505-0c0f-41f2-a0c1-5da64c95407c""}\n2025-07-16 13:27:35.023 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7ab7b899-f2e7-49c4-9451-79cee686b56a] received connection request\n2025-07-16 13:27:35.024 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:27:35.047 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7ab7b899-f2e7-49c4-9451-79cee686b56a] socks forwarding established\n2025-07-16 13:27:35.075 [info] [command][3217f505-0c0f-41f2-a0c1-5da64c95407c] Process exited with code 0\n2025-07-16 13:27:35.075 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7ab7b899-f2e7-49c4-9451-79cee686b56a] socks connection closed\n2025-07-16 13:27:35.075 [info] [command][3217f505-0c0f-41f2-a0c1-5da64c95407c] Socket close event received\n2025-07-16 13:27:35.098 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60737 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:28:35.080 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:28:35.081 [info] [command][d584e9c9-6cc3-46fa-9ee9-4c41b693be41] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d584e9c9-6cc3-46fa-9ee9-4c41b693be41""}\n2025-07-16 13:28:35.082 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b69af210-f1f5-4ded-b7eb-68bac26e758d] received connection request\n2025-07-16 13:28:35.083 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:28:35.184 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b69af210-f1f5-4ded-b7eb-68bac26e758d] socks forwarding established\n2025-07-16 13:28:35.213 [info] [command][d584e9c9-6cc3-46fa-9ee9-4c41b693be41] Process exited with code 0\n2025-07-16 13:28:35.213 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b69af210-f1f5-4ded-b7eb-68bac26e758d] socks connection closed\n2025-07-16 13:28:35.213 [info] [command][d584e9c9-6cc3-46fa-9ee9-4c41b693be41] Socket close event received\n2025-07-16 13:28:35.237 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60766 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:29:35.216 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:29:35.219 [info] [command][1e4eaf48-acd7-4565-be37-a132171224a6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1e4eaf48-acd7-4565-be37-a132171224a6""}\n2025-07-16 13:29:35.219 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][803785fa-25ae-458b-b43c-806e086ea92f] received connection request\n2025-07-16 13:29:35.220 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:29:35.245 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][803785fa-25ae-458b-b43c-806e086ea92f] socks forwarding established\n2025-07-16 13:29:35.270 [info] [command][1e4eaf48-acd7-4565-be37-a132171224a6] Process exited with code 0\n2025-07-16 13:29:35.270 [info] [command][1e4eaf48-acd7-4565-be37-a132171224a6] Socket close event received\n2025-07-16 13:29:35.271 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][803785fa-25ae-458b-b43c-806e086ea92f] socks connection closed\n2025-07-16 13:29:35.294 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60812 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:30:35.273 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:30:35.275 [info] [command][4c646d89-d06a-49be-b157-2e0b05c8b4a8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4c646d89-d06a-49be-b157-2e0b05c8b4a8""}\n2025-07-16 13:30:35.276 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e57e1d73-9120-4488-8501-f01803f7716b] received connection request\n2025-07-16 13:30:35.276 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:30:35.300 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e57e1d73-9120-4488-8501-f01803f7716b] socks forwarding established\n2025-07-16 13:30:35.328 [info] [command][4c646d89-d06a-49be-b157-2e0b05c8b4a8] Process exited with code 0\n2025-07-16 13:30:35.328 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e57e1d73-9120-4488-8501-f01803f7716b] socks connection closed\n2025-07-16 13:30:35.328 [info] [command][4c646d89-d06a-49be-b157-2e0b05c8b4a8] Socket close event received\n2025-07-16 13:30:35.353 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60854 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:31:35.330 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:31:35.331 [info] [command][b5640771-eeed-4b9a-9d72-5865ee180731] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b5640771-eeed-4b9a-9d72-5865ee180731""}\n2025-07-16 13:31:35.332 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0a6da67c-33a9-44f6-a1c4-ca132f4f0aa8] received connection request\n2025-07-16 13:31:35.332 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:31:35.356 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0a6da67c-33a9-44f6-a1c4-ca132f4f0aa8] socks forwarding established\n2025-07-16 13:31:35.383 [info] [command][b5640771-eeed-4b9a-9d72-5865ee180731] Process exited with code 0\n2025-07-16 13:31:35.383 [info] [command][b5640771-eeed-4b9a-9d72-5865ee180731] Socket close event received\n2025-07-16 13:31:35.384 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0a6da67c-33a9-44f6-a1c4-ca132f4f0aa8] socks connection closed\n2025-07-16 13:31:35.408 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60877 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:32:35.391 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:32:35.393 [info] [command][3a0ed10d-17cf-4379-b3d8-15f80b389fd5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3a0ed10d-17cf-4379-b3d8-15f80b389fd5""}\n2025-07-16 13:32:35.394 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][94aaf0f6-2e8f-42e5-b92b-960c7d6edfdf] received connection request\n2025-07-16 13:32:35.395 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:32:35.418 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][94aaf0f6-2e8f-42e5-b92b-960c7d6edfdf] socks forwarding established\n2025-07-16 13:32:35.445 [info] [command][3a0ed10d-17cf-4379-b3d8-15f80b389fd5] Process exited with code 0\n2025-07-16 13:32:35.446 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][94aaf0f6-2e8f-42e5-b92b-960c7d6edfdf] socks connection closed\n2025-07-16 13:32:35.446 [info] [command][3a0ed10d-17cf-4379-b3d8-15f80b389fd5] Socket close event received\n2025-07-16 13:32:35.468 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60938 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:33:35.451 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:33:35.453 [info] [command][4c78a4b9-9e99-4b90-b67d-4fda378665ba] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4c78a4b9-9e99-4b90-b67d-4fda378665ba""}\n2025-07-16 13:33:35.454 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b5cb9da6-cee4-45a2-bb79-965445b399e2] received connection request\n2025-07-16 13:33:35.455 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:33:35.479 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b5cb9da6-cee4-45a2-bb79-965445b399e2] socks forwarding established\n2025-07-16 13:33:35.506 [info] [command][4c78a4b9-9e99-4b90-b67d-4fda378665ba] Process exited with code 0\n2025-07-16 13:33:35.506 [info] [command][4c78a4b9-9e99-4b90-b67d-4fda378665ba] Socket close event received\n2025-07-16 13:33:35.507 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b5cb9da6-cee4-45a2-bb79-965445b399e2] socks connection closed\n2025-07-16 13:33:35.531 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60978 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:34:35.512 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:34:35.513 [info] [command][1d767930-5c6c-4676-baa0-4ff541bc07db] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1d767930-5c6c-4676-baa0-4ff541bc07db""}\n2025-07-16 13:34:35.513 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7e25ef02-c7a3-4f7c-bfbd-fe7bb2950513] received connection request\n2025-07-16 13:34:35.513 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 13:34:35.513 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:34:35.536 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7e25ef02-c7a3-4f7c-bfbd-fe7bb2950513] socks forwarding established\n2025-07-16 13:34:35.561 [info] [command][1d767930-5c6c-4676-baa0-4ff541bc07db] Process exited with code 0\n2025-07-16 13:34:35.561 [info] [command][1d767930-5c6c-4676-baa0-4ff541bc07db] Socket close event received\n2025-07-16 13:34:35.562 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7e25ef02-c7a3-4f7c-bfbd-fe7bb2950513] socks connection closed\n2025-07-16 13:34:35.584 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61018 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:35:35.567 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:35:35.569 [info] [command][58e0dc55-18e3-4cff-a167-06a677323c00] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""58e0dc55-18e3-4cff-a167-06a677323c00""}\n2025-07-16 13:35:35.570 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][91c55f65-1dda-4a9d-beab-f5e0d3a7c640] received connection request\n2025-07-16 13:35:35.571 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:35:35.594 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][91c55f65-1dda-4a9d-beab-f5e0d3a7c640] socks forwarding established\n2025-07-16 13:35:35.621 [info] [command][58e0dc55-18e3-4cff-a167-06a677323c00] Process exited with code 0\n2025-07-16 13:35:35.622 [info] [command][58e0dc55-18e3-4cff-a167-06a677323c00] Socket close event received\n2025-07-16 13:35:35.622 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][91c55f65-1dda-4a9d-beab-f5e0d3a7c640] socks connection closed\n2025-07-16 13:35:35.645 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61047 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:36:35.626 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:36:35.627 [info] [command][3cb50b9f-3989-465e-ab36-e4f850f7cdca] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3cb50b9f-3989-465e-ab36-e4f850f7cdca""}\n2025-07-16 13:36:35.628 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][fde6243b-35a2-41f2-a3d1-b245ccf1dc86] received connection request\n2025-07-16 13:36:35.628 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:36:35.652 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fde6243b-35a2-41f2-a3d1-b245ccf1dc86] socks forwarding established\n2025-07-16 13:36:35.680 [info] [command][3cb50b9f-3989-465e-ab36-e4f850f7cdca] Process exited with code 0\n2025-07-16 13:36:35.680 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fde6243b-35a2-41f2-a3d1-b245ccf1dc86] socks connection closed\n2025-07-16 13:36:35.681 [info] [command][3cb50b9f-3989-465e-ab36-e4f850f7cdca] Socket close event received\n2025-07-16 13:36:35.709 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61072 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:37:35.686 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:37:35.687 [info] [command][f08c1615-942a-4b9a-9be0-c4e71878b665] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f08c1615-942a-4b9a-9be0-c4e71878b665""}\n2025-07-16 13:37:35.688 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][25bc952b-8c95-44bd-bf3d-7bd47e45811b] received connection request\n2025-07-16 13:37:35.689 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:37:35.717 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][25bc952b-8c95-44bd-bf3d-7bd47e45811b] socks forwarding established\n2025-07-16 13:37:35.744 [info] [command][f08c1615-942a-4b9a-9be0-c4e71878b665] Process exited with code 0\n2025-07-16 13:37:35.745 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][25bc952b-8c95-44bd-bf3d-7bd47e45811b] socks connection closed\n2025-07-16 13:37:35.745 [info] [command][f08c1615-942a-4b9a-9be0-c4e71878b665] Socket close event received\n2025-07-16 13:37:35.767 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61135 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:38:35.750 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:38:35.751 [info] [command][60550068-1a77-463e-98c4-8346598976bc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""60550068-1a77-463e-98c4-8346598976bc""}\n2025-07-16 13:38:35.752 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][71fc2e36-7284-437b-9127-82359a31b363] received connection request\n2025-07-16 13:38:35.752 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:38:35.916 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][71fc2e36-7284-437b-9127-82359a31b363] socks forwarding established\n2025-07-16 13:38:35.942 [info] [command][60550068-1a77-463e-98c4-8346598976bc] Process exited with code 0\n2025-07-16 13:38:35.942 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][71fc2e36-7284-437b-9127-82359a31b363] socks connection closed\n2025-07-16 13:38:35.942 [info] [command][60550068-1a77-463e-98c4-8346598976bc] Socket close event received\n2025-07-16 13:38:36.096 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61163 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:39:35.947 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:39:35.949 [info] [command][9739260c-513f-4e4b-a29a-c9a78289cbab] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9739260c-513f-4e4b-a29a-c9a78289cbab""}\n2025-07-16 13:39:35.950 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ec0cf114-1ff3-4a7a-a86d-6d959e6b908b] received connection request\n2025-07-16 13:39:35.950 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:39:35.973 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ec0cf114-1ff3-4a7a-a86d-6d959e6b908b] socks forwarding established\n2025-07-16 13:39:36.000 [info] [command][9739260c-513f-4e4b-a29a-c9a78289cbab] Process exited with code 0\n2025-07-16 13:39:36.001 [info] [command][9739260c-513f-4e4b-a29a-c9a78289cbab] Socket close event received\n2025-07-16 13:39:36.001 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ec0cf114-1ff3-4a7a-a86d-6d959e6b908b] socks connection closed\n2025-07-16 13:39:36.024 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61205 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:40:36.005 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:40:36.006 [info] [command][3ee3d5ac-92b2-4e66-8ad4-19cc504efe28] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3ee3d5ac-92b2-4e66-8ad4-19cc504efe28""}\n2025-07-16 13:40:36.007 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1c15e49a-9309-4de4-ae45-2de7da822dad] received connection request\n2025-07-16 13:40:36.007 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:40:36.030 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1c15e49a-9309-4de4-ae45-2de7da822dad] socks forwarding established\n2025-07-16 13:40:36.059 [info] [command][3ee3d5ac-92b2-4e66-8ad4-19cc504efe28] Process exited with code 0\n2025-07-16 13:40:36.059 [info] [command][3ee3d5ac-92b2-4e66-8ad4-19cc504efe28] Socket close event received\n2025-07-16 13:40:36.059 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1c15e49a-9309-4de4-ae45-2de7da822dad] socks connection closed\n2025-07-16 13:40:36.083 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61228 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:41:36.065 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:41:36.068 [info] [command][ec0b23c9-0ba0-4470-b794-b50397fbba35] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ec0b23c9-0ba0-4470-b794-b50397fbba35""}\n2025-07-16 13:41:36.068 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8287411d-eea8-4b05-a74c-e4a4f4e6a707] received connection request\n2025-07-16 13:41:36.069 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:41:36.092 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8287411d-eea8-4b05-a74c-e4a4f4e6a707] socks forwarding established\n2025-07-16 13:41:36.119 [info] [command][ec0b23c9-0ba0-4470-b794-b50397fbba35] Process exited with code 0\n2025-07-16 13:41:36.119 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8287411d-eea8-4b05-a74c-e4a4f4e6a707] socks connection closed\n2025-07-16 13:41:36.119 [info] [command][ec0b23c9-0ba0-4470-b794-b50397fbba35] Socket close event received\n2025-07-16 13:41:36.142 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61247 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:42:36.122 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:42:36.124 [info] [command][f061ec45-e96c-478e-a84e-9640611ab7da] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f061ec45-e96c-478e-a84e-9640611ab7da""}\n2025-07-16 13:42:36.124 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][33464e36-5a41-4eae-9ba2-b67195159f17] received connection request\n2025-07-16 13:42:36.125 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:42:36.223 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][33464e36-5a41-4eae-9ba2-b67195159f17] socks forwarding established\n2025-07-16 13:42:36.386 [info] [command][f061ec45-e96c-478e-a84e-9640611ab7da] Process exited with code 0\n2025-07-16 13:42:36.387 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][33464e36-5a41-4eae-9ba2-b67195159f17] socks connection closed\n2025-07-16 13:42:36.387 [info] [command][f061ec45-e96c-478e-a84e-9640611ab7da] Socket close event received\n2025-07-16 13:42:36.701 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61283 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:43:36.392 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:43:36.395 [info] [command][ee0e27e0-ef87-4e42-8f53-066551568498] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ee0e27e0-ef87-4e42-8f53-066551568498""}\n2025-07-16 13:43:36.396 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][fdb388b8-8dde-4217-a9f0-bb0b7e9af37e] received connection request\n2025-07-16 13:43:36.396 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:43:36.421 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fdb388b8-8dde-4217-a9f0-bb0b7e9af37e] socks forwarding established\n2025-07-16 13:43:36.452 [info] [command][ee0e27e0-ef87-4e42-8f53-066551568498] Process exited with code 0\n2025-07-16 13:43:36.453 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fdb388b8-8dde-4217-a9f0-bb0b7e9af37e] socks connection closed\n2025-07-16 13:43:36.453 [info] [command][ee0e27e0-ef87-4e42-8f53-066551568498] Socket close event received\n2025-07-16 13:43:36.478 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61327 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:44:36.458 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:44:36.459 [info] [command][8f99b2ff-0b71-4866-bb78-33c9f7b96f22] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8f99b2ff-0b71-4866-bb78-33c9f7b96f22""}\n2025-07-16 13:44:36.461 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][010e1ced-c631-4570-bfb7-46d941b391a7] received connection request\n2025-07-16 13:44:36.461 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:44:36.485 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][010e1ced-c631-4570-bfb7-46d941b391a7] socks forwarding established\n2025-07-16 13:44:36.512 [info] [command][8f99b2ff-0b71-4866-bb78-33c9f7b96f22] Process exited with code 0\n2025-07-16 13:44:36.512 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][010e1ced-c631-4570-bfb7-46d941b391a7] socks connection closed\n2025-07-16 13:44:36.512 [info] [command][8f99b2ff-0b71-4866-bb78-33c9f7b96f22] Socket close event received\n2025-07-16 13:44:36.536 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61368 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:45:36.513 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:45:36.514 [info] [command][36be5c96-f307-4cf2-9d00-07071614a14b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""36be5c96-f307-4cf2-9d00-07071614a14b""}\n2025-07-16 13:45:36.514 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6b2a9ec8-9b1e-4524-be66-9d496d2726e4] received connection request\n2025-07-16 13:45:36.515 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:45:36.538 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6b2a9ec8-9b1e-4524-be66-9d496d2726e4] socks forwarding established\n2025-07-16 13:45:36.564 [info] [command][36be5c96-f307-4cf2-9d00-07071614a14b] Process exited with code 0\n2025-07-16 13:45:36.564 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6b2a9ec8-9b1e-4524-be66-9d496d2726e4] socks connection closed\n2025-07-16 13:45:36.564 [info] [command][36be5c96-f307-4cf2-9d00-07071614a14b] Socket close event received\n2025-07-16 13:45:36.588 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61393 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:46:36.574 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:46:36.575 [info] [command][d71ca4a3-d2d0-443a-86d4-3aa18fd897d8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d71ca4a3-d2d0-443a-86d4-3aa18fd897d8""}\n2025-07-16 13:46:36.576 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][cafd9f33-5517-475d-8b79-fbfcb57b777a] received connection request\n2025-07-16 13:46:36.577 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:46:36.642 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cafd9f33-5517-475d-8b79-fbfcb57b777a] socks forwarding established\n2025-07-16 13:46:36.677 [info] [command][d71ca4a3-d2d0-443a-86d4-3aa18fd897d8] Process exited with code 0\n2025-07-16 13:46:36.677 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cafd9f33-5517-475d-8b79-fbfcb57b777a] socks connection closed\n2025-07-16 13:46:36.677 [info] [command][d71ca4a3-d2d0-443a-86d4-3aa18fd897d8] Socket close event received\n2025-07-16 13:46:36.704 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61411 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:47:36.687 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:47:36.688 [info] [command][c7e2316e-82f1-4840-a29a-d48cc31d5abd] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c7e2316e-82f1-4840-a29a-d48cc31d5abd""}\n2025-07-16 13:47:36.689 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8b1cebb1-eb32-4130-8a08-466bcab53f11] received connection request\n2025-07-16 13:47:36.689 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:47:36.715 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8b1cebb1-eb32-4130-8a08-466bcab53f11] socks forwarding established\n2025-07-16 13:47:36.829 [info] [command][c7e2316e-82f1-4840-a29a-d48cc31d5abd] Process exited with code 0\n2025-07-16 13:47:36.829 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8b1cebb1-eb32-4130-8a08-466bcab53f11] socks connection closed\n2025-07-16 13:47:36.829 [info] [command][c7e2316e-82f1-4840-a29a-d48cc31d5abd] Socket close event received\n2025-07-16 13:47:36.852 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61434 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:48:36.839 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:48:36.840 [info] [command][aed54258-4c6c-4b2f-a1bf-e0ba8e7a6fe0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""aed54258-4c6c-4b2f-a1bf-e0ba8e7a6fe0""}\n2025-07-16 13:48:36.841 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][efc33a4f-9a76-4d7a-8bbc-29406117eeb7] received connection request\n2025-07-16 13:48:36.842 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:48:36.867 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][efc33a4f-9a76-4d7a-8bbc-29406117eeb7] socks forwarding established\n2025-07-16 13:48:36.894 [info] [command][aed54258-4c6c-4b2f-a1bf-e0ba8e7a6fe0] Process exited with code 0\n2025-07-16 13:48:36.894 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][efc33a4f-9a76-4d7a-8bbc-29406117eeb7] socks connection closed\n2025-07-16 13:48:36.894 [info] [command][aed54258-4c6c-4b2f-a1bf-e0ba8e7a6fe0] Socket close event received\n2025-07-16 13:48:36.918 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61482 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:49:36.896 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:49:36.898 [info] [command][cfedf650-bf2e-46d0-8371-ba444ab2de7d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""cfedf650-bf2e-46d0-8371-ba444ab2de7d""}\n2025-07-16 13:49:36.898 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1cf5052d-632d-491d-91bf-3e644ccfbf43] received connection request\n2025-07-16 13:49:36.898 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:49:36.922 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1cf5052d-632d-491d-91bf-3e644ccfbf43] socks forwarding established\n2025-07-16 13:49:36.950 [info] [command][cfedf650-bf2e-46d0-8371-ba444ab2de7d] Process exited with code 0\n2025-07-16 13:49:36.950 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1cf5052d-632d-491d-91bf-3e644ccfbf43] socks connection closed\n2025-07-16 13:49:36.950 [info] [command][cfedf650-bf2e-46d0-8371-ba444ab2de7d] Socket close event received\n2025-07-16 13:49:37.050 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61558 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:50:36.953 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:50:36.956 [info] [command][8b0a6c69-f1a1-4d76-a4a2-ed97a08c9570] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8b0a6c69-f1a1-4d76-a4a2-ed97a08c9570""}\n2025-07-16 13:50:36.957 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1ab0e6f0-a672-4d0a-ace9-6c8119146792] received connection request\n2025-07-16 13:50:36.957 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:50:37.030 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1ab0e6f0-a672-4d0a-ace9-6c8119146792] socks forwarding established\n2025-07-16 13:50:37.311 [info] [command][8b0a6c69-f1a1-4d76-a4a2-ed97a08c9570] Process exited with code 0\n2025-07-16 13:50:37.365 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1ab0e6f0-a672-4d0a-ace9-6c8119146792] socks connection closed\n2025-07-16 13:50:37.365 [info] [command][8b0a6c69-f1a1-4d76-a4a2-ed97a08c9570] Socket close event received\n2025-07-16 13:50:37.413 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61607 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:51:37.315 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:51:37.315 [info] [command][0bf74b8d-2d3c-4e3a-b6c7-de6eaf93a1b8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""0bf74b8d-2d3c-4e3a-b6c7-de6eaf93a1b8""}\n2025-07-16 13:51:37.316 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7e2f3dce-ce45-4d47-816d-ae5618163eb9] received connection request\n2025-07-16 13:51:37.316 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:51:37.433 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7e2f3dce-ce45-4d47-816d-ae5618163eb9] socks forwarding established\n2025-07-16 13:51:37.592 [info] [command][0bf74b8d-2d3c-4e3a-b6c7-de6eaf93a1b8] Process exited with code 0\n2025-07-16 13:51:37.593 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7e2f3dce-ce45-4d47-816d-ae5618163eb9] socks connection closed\n2025-07-16 13:51:37.593 [info] [command][0bf74b8d-2d3c-4e3a-b6c7-de6eaf93a1b8] Socket close event received\n2025-07-16 13:51:37.617 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61671 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:52:37.598 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:52:37.600 [info] [command][2dfbc471-43b4-48c8-bff4-eb56bfc723b1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2dfbc471-43b4-48c8-bff4-eb56bfc723b1""}\n2025-07-16 13:52:37.601 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][47afc42a-54d7-438e-b4cc-2f833d077551] received connection request\n2025-07-16 13:52:37.601 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:52:37.626 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][47afc42a-54d7-438e-b4cc-2f833d077551] socks forwarding established\n2025-07-16 13:52:37.660 [info] [command][2dfbc471-43b4-48c8-bff4-eb56bfc723b1] Process exited with code 0\n2025-07-16 13:52:37.660 [info] [command][2dfbc471-43b4-48c8-bff4-eb56bfc723b1] Socket close event received\n2025-07-16 13:52:37.684 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][47afc42a-54d7-438e-b4cc-2f833d077551] socks connection closed\n2025-07-16 13:52:37.686 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61724 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:53:37.664 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:53:37.666 [info] [command][1574107b-e667-4280-98ee-6c023add1f97] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1574107b-e667-4280-98ee-6c023add1f97""}\n2025-07-16 13:53:37.667 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0e310618-f10a-4580-b4df-a5796d16ee31] received connection request\n2025-07-16 13:53:37.667 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:53:37.693 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0e310618-f10a-4580-b4df-a5796d16ee31] socks forwarding established\n2025-07-16 13:53:37.721 [info] [command][1574107b-e667-4280-98ee-6c023add1f97] Process exited with code 0\n2025-07-16 13:53:37.722 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0e310618-f10a-4580-b4df-a5796d16ee31] socks connection closed\n2025-07-16 13:53:37.722 [info] [command][1574107b-e667-4280-98ee-6c023add1f97] Socket close event received\n2025-07-16 13:53:37.746 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61754 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:54:37.724 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:54:37.728 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3defc2eb-3c4b-4a71-8ca0-f7f15e24e7bb] received connection request\n2025-07-16 13:54:37.728 [info] [command][7aae6dd4-44c6-402e-803b-65c5ce508890] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7aae6dd4-44c6-402e-803b-65c5ce508890""}\n2025-07-16 13:54:37.728 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:54:37.753 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3defc2eb-3c4b-4a71-8ca0-f7f15e24e7bb] socks forwarding established\n2025-07-16 13:54:37.780 [info] [command][7aae6dd4-44c6-402e-803b-65c5ce508890] Process exited with code 0\n2025-07-16 13:54:37.780 [info] [command][7aae6dd4-44c6-402e-803b-65c5ce508890] Socket close event received\n2025-07-16 13:54:37.781 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3defc2eb-3c4b-4a71-8ca0-f7f15e24e7bb] socks connection closed\n2025-07-16 13:54:37.804 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61801 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:55:37.783 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:55:37.785 [info] [command][8c37f24f-6a76-4438-95a3-31db96ce2268] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8c37f24f-6a76-4438-95a3-31db96ce2268""}\n2025-07-16 13:55:37.785 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8303dea6-6779-43d9-b97a-487bc393a0d0] received connection request\n2025-07-16 13:55:37.786 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 13:55:37.786 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:55:37.810 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8303dea6-6779-43d9-b97a-487bc393a0d0] socks forwarding established\n2025-07-16 13:55:37.837 [info] [command][8c37f24f-6a76-4438-95a3-31db96ce2268] Process exited with code 0\n2025-07-16 13:55:37.837 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8303dea6-6779-43d9-b97a-487bc393a0d0] socks connection closed\n2025-07-16 13:55:37.837 [info] [command][8c37f24f-6a76-4438-95a3-31db96ce2268] Socket close event received\n2025-07-16 13:55:37.862 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61832 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:56:37.842 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:56:37.844 [info] [command][eb2c17e1-34ca-490f-8fc2-3e0a99179529] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""eb2c17e1-34ca-490f-8fc2-3e0a99179529""}\n2025-07-16 13:56:37.844 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ada81f2a-aab4-49f5-99ed-9784c223bc98] received connection request\n2025-07-16 13:56:37.845 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:56:37.871 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ada81f2a-aab4-49f5-99ed-9784c223bc98] socks forwarding established\n2025-07-16 13:56:37.896 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ada81f2a-aab4-49f5-99ed-9784c223bc98] socks connection closed\n2025-07-16 13:56:37.897 [info] [command][eb2c17e1-34ca-490f-8fc2-3e0a99179529] Process exited with code 0\n2025-07-16 13:56:37.897 [info] [command][eb2c17e1-34ca-490f-8fc2-3e0a99179529] Socket close event received\n2025-07-16 13:56:37.920 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61857 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:57:37.898 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:57:37.901 [info] [command][2d075e48-1869-459a-9c98-9eb2a8d6b2f2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2d075e48-1869-459a-9c98-9eb2a8d6b2f2""}\n2025-07-16 13:57:37.902 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][32de4aa2-0089-4d34-959b-e88e325fed18] received connection request\n2025-07-16 13:57:37.902 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:57:37.928 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][32de4aa2-0089-4d34-959b-e88e325fed18] socks forwarding established\n2025-07-16 13:57:37.956 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][32de4aa2-0089-4d34-959b-e88e325fed18] socks connection closed\n2025-07-16 13:57:37.956 [info] [command][2d075e48-1869-459a-9c98-9eb2a8d6b2f2] Process exited with code 0\n2025-07-16 13:57:37.956 [info] [command][2d075e48-1869-459a-9c98-9eb2a8d6b2f2] Socket close event received\n2025-07-16 13:57:37.980 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61919 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:58:37.962 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:58:37.964 [info] [command][0b9a3f91-cd17-4ebd-82f8-6a80dc0e0064] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""0b9a3f91-cd17-4ebd-82f8-6a80dc0e0064""}\n2025-07-16 13:58:37.964 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5c4bd88d-062c-4cc1-9e03-f8f5b6c66bc2] received connection request\n2025-07-16 13:58:37.965 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:58:37.988 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5c4bd88d-062c-4cc1-9e03-f8f5b6c66bc2] socks forwarding established\n2025-07-16 13:58:38.034 [info] [command][0b9a3f91-cd17-4ebd-82f8-6a80dc0e0064] Process exited with code 0\n2025-07-16 13:58:38.035 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5c4bd88d-062c-4cc1-9e03-f8f5b6c66bc2] socks connection closed\n2025-07-16 13:58:38.035 [info] [command][0b9a3f91-cd17-4ebd-82f8-6a80dc0e0064] Socket close event received\n2025-07-16 13:58:38.058 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61946 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 13:59:38.037 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 13:59:38.037 [info] [command][e5678c61-411c-4aa5-b79d-905745355b40] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e5678c61-411c-4aa5-b79d-905745355b40""}\n2025-07-16 13:59:38.037 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][cbaeff77-1b9d-48a0-8693-a8d94312c5ce] received connection request\n2025-07-16 13:59:38.038 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 13:59:38.061 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cbaeff77-1b9d-48a0-8693-a8d94312c5ce] socks forwarding established\n2025-07-16 13:59:38.088 [info] [command][e5678c61-411c-4aa5-b79d-905745355b40] Process exited with code 0\n2025-07-16 13:59:38.088 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cbaeff77-1b9d-48a0-8693-a8d94312c5ce] socks connection closed\n2025-07-16 13:59:38.088 [info] [command][e5678c61-411c-4aa5-b79d-905745355b40] Socket close event received\n2025-07-16 13:59:38.112 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61985 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:00:38.091 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:00:38.092 [info] [command][9dd5d459-c6b8-4632-ac00-62a61bbe7dfb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9dd5d459-c6b8-4632-ac00-62a61bbe7dfb""}\n2025-07-16 14:00:38.093 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3bd754e2-b659-46b8-b68b-3a70a53d7a6e] received connection request\n2025-07-16 14:00:38.093 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 14:00:38.093 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:00:38.117 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3bd754e2-b659-46b8-b68b-3a70a53d7a6e] socks forwarding established\n2025-07-16 14:00:38.146 [info] [command][9dd5d459-c6b8-4632-ac00-62a61bbe7dfb] Process exited with code 0\n2025-07-16 14:00:38.147 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3bd754e2-b659-46b8-b68b-3a70a53d7a6e] socks connection closed\n2025-07-16 14:00:38.147 [info] [command][9dd5d459-c6b8-4632-ac00-62a61bbe7dfb] Socket close event received\n2025-07-16 14:00:38.172 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62033 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:01:38.152 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:01:38.154 [info] [command][db530381-80a8-4005-ace8-a38e97222fde] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""db530381-80a8-4005-ace8-a38e97222fde""}\n2025-07-16 14:01:38.155 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][88be499e-8d04-4260-8241-778b6f94dbd7] received connection request\n2025-07-16 14:01:38.155 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:01:38.180 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][88be499e-8d04-4260-8241-778b6f94dbd7] socks forwarding established\n2025-07-16 14:01:38.230 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][88be499e-8d04-4260-8241-778b6f94dbd7] socks connection closed\n2025-07-16 14:01:38.230 [info] [command][db530381-80a8-4005-ace8-a38e97222fde] Process exited with code 0\n2025-07-16 14:01:38.230 [info] [command][db530381-80a8-4005-ace8-a38e97222fde] Socket close event received\n2025-07-16 14:01:38.253 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62081 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:02:38.233 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:02:38.234 [info] [command][f8821792-4e26-4287-8503-4f7b47d91ca8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f8821792-4e26-4287-8503-4f7b47d91ca8""}\n2025-07-16 14:02:38.234 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][91c00107-e73a-46ef-88bd-dfd1aa405f09] received connection request\n2025-07-16 14:02:38.234 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:02:38.258 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][91c00107-e73a-46ef-88bd-dfd1aa405f09] socks forwarding established\n2025-07-16 14:02:38.285 [info] [command][f8821792-4e26-4287-8503-4f7b47d91ca8] Process exited with code 0\n2025-07-16 14:02:38.285 [info] [command][f8821792-4e26-4287-8503-4f7b47d91ca8] Socket close event received\n2025-07-16 14:02:38.307 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][91c00107-e73a-46ef-88bd-dfd1aa405f09] socks connection closed\n2025-07-16 14:02:38.309 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62138 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:03:38.291 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:03:38.293 [info] [command][2fc6566e-80ec-42e7-aab1-a0cdb9da1389] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2fc6566e-80ec-42e7-aab1-a0cdb9da1389""}\n2025-07-16 14:03:38.293 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][493f2cad-1fa5-47d2-8602-c23bd0055593] received connection request\n2025-07-16 14:03:38.293 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:03:38.395 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][493f2cad-1fa5-47d2-8602-c23bd0055593] socks forwarding established\n2025-07-16 14:03:38.422 [info] [command][2fc6566e-80ec-42e7-aab1-a0cdb9da1389] Process exited with code 0\n2025-07-16 14:03:38.422 [info] [command][2fc6566e-80ec-42e7-aab1-a0cdb9da1389] Socket close event received\n2025-07-16 14:03:38.445 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][493f2cad-1fa5-47d2-8602-c23bd0055593] socks connection closed\n2025-07-16 14:03:38.447 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62162 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:04:38.424 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:04:38.425 [info] [command][65b51377-733a-43dd-85d3-f854f07d93fd] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""65b51377-733a-43dd-85d3-f854f07d93fd""}\n2025-07-16 14:04:38.426 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ff372c2d-15b3-4583-a0f6-b0c87773442d] received connection request\n2025-07-16 14:04:38.426 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:04:38.451 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ff372c2d-15b3-4583-a0f6-b0c87773442d] socks forwarding established\n2025-07-16 14:04:38.480 [info] [command][65b51377-733a-43dd-85d3-f854f07d93fd] Process exited with code 0\n2025-07-16 14:04:38.480 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ff372c2d-15b3-4583-a0f6-b0c87773442d] socks connection closed\n2025-07-16 14:04:38.481 [info] [command][65b51377-733a-43dd-85d3-f854f07d93fd] Socket close event received\n2025-07-16 14:04:38.505 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62200 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:05:38.483 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:05:38.484 [info] [command][cc447e93-8cde-457f-b163-7e18da6e7cae] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""cc447e93-8cde-457f-b163-7e18da6e7cae""}\n2025-07-16 14:05:38.484 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][db0fe83c-27aa-4020-b8c6-8ad1c5e8318b] received connection request\n2025-07-16 14:05:38.485 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:05:38.511 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][db0fe83c-27aa-4020-b8c6-8ad1c5e8318b] socks forwarding established\n2025-07-16 14:05:38.543 [info] [command][cc447e93-8cde-457f-b163-7e18da6e7cae] Process exited with code 0\n2025-07-16 14:05:38.543 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][db0fe83c-27aa-4020-b8c6-8ad1c5e8318b] socks connection closed\n2025-07-16 14:05:38.543 [info] [command][cc447e93-8cde-457f-b163-7e18da6e7cae] Socket close event received\n2025-07-16 14:05:38.567 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62227 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:06:38.545 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:06:38.547 [info] [command][228d54e1-39c8-46d5-87a0-c346208de626] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""228d54e1-39c8-46d5-87a0-c346208de626""}\n2025-07-16 14:06:38.548 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][77487245-fd7f-4bf8-a2d5-eea590a875ab] received connection request\n2025-07-16 14:06:38.549 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 14:06:38.549 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:06:38.584 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][77487245-fd7f-4bf8-a2d5-eea590a875ab] socks forwarding established\n2025-07-16 14:06:38.615 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][77487245-fd7f-4bf8-a2d5-eea590a875ab] socks connection closed\n2025-07-16 14:06:38.616 [info] [command][228d54e1-39c8-46d5-87a0-c346208de626] Process exited with code 0\n2025-07-16 14:06:38.616 [info] [command][228d54e1-39c8-46d5-87a0-c346208de626] Socket close event received\n2025-07-16 14:06:38.640 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62259 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:07:38.620 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:07:38.622 [info] [command][bee981db-4a64-4f53-913e-c0a7d285410f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""bee981db-4a64-4f53-913e-c0a7d285410f""}\n2025-07-16 14:07:38.622 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][aef22078-646c-417c-a0cf-90aff2fa351f] received connection request\n2025-07-16 14:07:38.622 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:07:38.669 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][aef22078-646c-417c-a0cf-90aff2fa351f] socks forwarding established\n2025-07-16 14:07:38.773 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][aef22078-646c-417c-a0cf-90aff2fa351f] socks connection closed\n2025-07-16 14:07:38.773 [info] [command][bee981db-4a64-4f53-913e-c0a7d285410f] Process exited with code 0\n2025-07-16 14:07:38.773 [info] [command][bee981db-4a64-4f53-913e-c0a7d285410f] Socket close event received\n2025-07-16 14:07:38.797 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62320 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:08:38.776 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:08:38.778 [info] [command][ec240878-d005-419f-b541-f1c6fcfa4a27] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ec240878-d005-419f-b541-f1c6fcfa4a27""}\n2025-07-16 14:08:38.778 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][85aa1219-28b6-4f2f-9ea9-e73aae95b850] received connection request\n2025-07-16 14:08:38.778 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:08:38.821 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][85aa1219-28b6-4f2f-9ea9-e73aae95b850] socks forwarding established\n2025-07-16 14:08:38.981 [info] [command][ec240878-d005-419f-b541-f1c6fcfa4a27] Process exited with code 0\n2025-07-16 14:08:38.982 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][85aa1219-28b6-4f2f-9ea9-e73aae95b850] socks connection closed\n2025-07-16 14:08:38.982 [info] [command][ec240878-d005-419f-b541-f1c6fcfa4a27] Socket close event received\n2025-07-16 14:08:39.006 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62364 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:09:38.984 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:09:38.986 [info] [command][7648cea8-98cc-4518-9bde-81404468d26f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7648cea8-98cc-4518-9bde-81404468d26f""}\n2025-07-16 14:09:38.986 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][2c7c1bee-229d-4926-901b-3fdab559f32d] received connection request\n2025-07-16 14:09:38.987 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 14:09:38.987 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:09:39.022 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2c7c1bee-229d-4926-901b-3fdab559f32d] socks forwarding established\n2025-07-16 14:09:39.060 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2c7c1bee-229d-4926-901b-3fdab559f32d] socks connection closed\n2025-07-16 14:09:39.060 [info] [command][7648cea8-98cc-4518-9bde-81404468d26f] Process exited with code 0\n2025-07-16 14:09:39.060 [info] [command][7648cea8-98cc-4518-9bde-81404468d26f] Socket close event received\n2025-07-16 14:09:39.083 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62432 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:10:39.061 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:10:39.063 [info] [command][717f03f4-55bb-491b-b913-315d2772c5ad] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""717f03f4-55bb-491b-b913-315d2772c5ad""}\n2025-07-16 14:10:39.064 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][12480051-99c6-43dd-9e85-d0f9001b3b9a] received connection request\n2025-07-16 14:10:39.064 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:10:39.090 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][12480051-99c6-43dd-9e85-d0f9001b3b9a] socks forwarding established\n2025-07-16 14:10:39.241 [info] [command][717f03f4-55bb-491b-b913-315d2772c5ad] Process exited with code 0\n2025-07-16 14:10:39.241 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][12480051-99c6-43dd-9e85-d0f9001b3b9a] socks connection closed\n2025-07-16 14:10:39.242 [info] [command][717f03f4-55bb-491b-b913-315d2772c5ad] Socket close event received\n2025-07-16 14:10:39.265 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62461 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:11:39.247 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:11:39.249 [info] [command][23299e42-5887-4920-8264-560a361dbb1b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""23299e42-5887-4920-8264-560a361dbb1b""}\n2025-07-16 14:11:39.250 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][dbfd4135-ce80-4418-9ce3-1560579c8a03] received connection request\n2025-07-16 14:11:39.251 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:11:39.278 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][dbfd4135-ce80-4418-9ce3-1560579c8a03] socks forwarding established\n2025-07-16 14:11:39.344 [info] [command][23299e42-5887-4920-8264-560a361dbb1b] Process exited with code 0\n2025-07-16 14:11:39.344 [info] [command][23299e42-5887-4920-8264-560a361dbb1b] Socket close event received\n2025-07-16 14:11:39.345 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][dbfd4135-ce80-4418-9ce3-1560579c8a03] socks connection closed\n2025-07-16 14:11:39.445 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62486 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:12:39.347 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:12:39.349 [info] [command][413cab2f-4f16-414a-a90c-c93d49251350] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""413cab2f-4f16-414a-a90c-c93d49251350""}\n2025-07-16 14:12:39.350 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][242e499d-4587-4cfd-abb0-2c0f9bf2065c] received connection request\n2025-07-16 14:12:39.350 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:12:39.391 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][242e499d-4587-4cfd-abb0-2c0f9bf2065c] socks forwarding established\n2025-07-16 14:12:39.419 [info] [command][413cab2f-4f16-414a-a90c-c93d49251350] Process exited with code 0\n2025-07-16 14:12:39.419 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][242e499d-4587-4cfd-abb0-2c0f9bf2065c] socks connection closed\n2025-07-16 14:12:39.419 [info] [command][413cab2f-4f16-414a-a90c-c93d49251350] Socket close event received\n2025-07-16 14:12:39.445 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62552 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:13:39.422 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:13:39.423 [info] [command][88773b61-e2c0-4ca8-8e5d-1b8c23a4b419] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""88773b61-e2c0-4ca8-8e5d-1b8c23a4b419""}\n2025-07-16 14:13:39.424 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a864af27-9653-4d64-bba7-ab7ef4ba17a8] received connection request\n2025-07-16 14:13:39.424 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:13:39.576 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a864af27-9653-4d64-bba7-ab7ef4ba17a8] socks forwarding established\n2025-07-16 14:13:39.700 [info] [command][88773b61-e2c0-4ca8-8e5d-1b8c23a4b419] Process exited with code 0\n2025-07-16 14:13:39.700 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a864af27-9653-4d64-bba7-ab7ef4ba17a8] socks connection closed\n2025-07-16 14:13:39.700 [info] [command][88773b61-e2c0-4ca8-8e5d-1b8c23a4b419] Socket close event received\n2025-07-16 14:13:39.724 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62580 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:14:39.701 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:14:39.701 [info] [command][ab641dde-694d-4d04-8ec1-80deb35b7391] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ab641dde-694d-4d04-8ec1-80deb35b7391""}\n2025-07-16 14:14:39.702 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][39b75c47-69a3-45a8-bd20-50260548d55b] received connection request\n2025-07-16 14:14:39.702 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:14:39.746 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][39b75c47-69a3-45a8-bd20-50260548d55b] socks forwarding established\n2025-07-16 14:14:39.778 [info] [command][ab641dde-694d-4d04-8ec1-80deb35b7391] Process exited with code 0\n2025-07-16 14:14:39.778 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][39b75c47-69a3-45a8-bd20-50260548d55b] socks connection closed\n2025-07-16 14:14:39.778 [info] [command][ab641dde-694d-4d04-8ec1-80deb35b7391] Socket close event received\n2025-07-16 14:14:39.802 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62628 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:15:39.782 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:15:39.783 [info] [command][dac8e508-0c00-4265-b202-3a5f3814b89b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""dac8e508-0c00-4265-b202-3a5f3814b89b""}\n2025-07-16 14:15:39.784 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d844324b-5110-4f96-af86-0aab501b6e87] received connection request\n2025-07-16 14:15:39.784 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:15:39.807 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d844324b-5110-4f96-af86-0aab501b6e87] socks forwarding established\n2025-07-16 14:15:39.835 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d844324b-5110-4f96-af86-0aab501b6e87] socks connection closed\n2025-07-16 14:15:39.836 [info] [command][dac8e508-0c00-4265-b202-3a5f3814b89b] Process exited with code 0\n2025-07-16 14:15:39.836 [info] [command][dac8e508-0c00-4265-b202-3a5f3814b89b] Socket close event received\n2025-07-16 14:15:39.860 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62657 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:16:39.840 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:16:39.842 [info] [command][a7593e30-ca2d-47f0-b4f8-8c339329eb1d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a7593e30-ca2d-47f0-b4f8-8c339329eb1d""}\n2025-07-16 14:16:39.842 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4d07aa5d-b8cf-4deb-b5c5-1d7afdf5ec3c] received connection request\n2025-07-16 14:16:39.842 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 14:16:39.842 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:16:39.866 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4d07aa5d-b8cf-4deb-b5c5-1d7afdf5ec3c] socks forwarding established\n2025-07-16 14:16:39.910 [info] [command][a7593e30-ca2d-47f0-b4f8-8c339329eb1d] Process exited with code 0\n2025-07-16 14:16:39.910 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4d07aa5d-b8cf-4deb-b5c5-1d7afdf5ec3c] socks connection closed\n2025-07-16 14:16:39.910 [info] [command][a7593e30-ca2d-47f0-b4f8-8c339329eb1d] Socket close event received\n2025-07-16 14:16:39.934 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62704 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:17:39.911 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:17:39.913 [info] [command][302b6a21-cc18-417c-b21c-ebfacbe9d12c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""302b6a21-cc18-417c-b21c-ebfacbe9d12c""}\n2025-07-16 14:17:39.914 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][662cf50a-5a13-4200-ad86-058f461e3bf5] received connection request\n2025-07-16 14:17:39.915 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:17:39.939 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][662cf50a-5a13-4200-ad86-058f461e3bf5] socks forwarding established\n2025-07-16 14:17:39.966 [info] [command][302b6a21-cc18-417c-b21c-ebfacbe9d12c] Process exited with code 0\n2025-07-16 14:17:39.966 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][662cf50a-5a13-4200-ad86-058f461e3bf5] socks connection closed\n2025-07-16 14:17:39.966 [info] [command][302b6a21-cc18-417c-b21c-ebfacbe9d12c] Socket close event received\n2025-07-16 14:17:39.990 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62764 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:18:39.968 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:18:39.970 [info] [command][167422f0-5cd8-4bef-8b9a-31787eeac319] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""167422f0-5cd8-4bef-8b9a-31787eeac319""}\n2025-07-16 14:18:39.971 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][05573b50-993b-4662-a169-a71528cdaed4] received connection request\n2025-07-16 14:18:39.971 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:18:40.004 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][05573b50-993b-4662-a169-a71528cdaed4] socks forwarding established\n2025-07-16 14:18:40.058 [info] [command][167422f0-5cd8-4bef-8b9a-31787eeac319] Process exited with code 0\n2025-07-16 14:18:40.058 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][05573b50-993b-4662-a169-a71528cdaed4] socks connection closed\n2025-07-16 14:18:40.058 [info] [command][167422f0-5cd8-4bef-8b9a-31787eeac319] Socket close event received\n2025-07-16 14:18:40.083 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62793 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:19:40.062 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:19:40.064 [info] [command][a868e9ae-094e-42e6-8e31-d32f8f7c1aca] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a868e9ae-094e-42e6-8e31-d32f8f7c1aca""}\n2025-07-16 14:19:40.064 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0c0ca220-ca33-46ad-bb41-5fb90d9493dc] received connection request\n2025-07-16 14:19:40.064 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:19:40.088 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0c0ca220-ca33-46ad-bb41-5fb90d9493dc] socks forwarding established\n2025-07-16 14:19:40.115 [info] [command][a868e9ae-094e-42e6-8e31-d32f8f7c1aca] Process exited with code 0\n2025-07-16 14:19:40.115 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0c0ca220-ca33-46ad-bb41-5fb90d9493dc] socks connection closed\n2025-07-16 14:19:40.115 [info] [command][a868e9ae-094e-42e6-8e31-d32f8f7c1aca] Socket close event received\n2025-07-16 14:19:40.139 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62832 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:20:40.120 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:20:40.121 [info] [command][eeab18f7-fbef-4f30-a43d-c84381e5dee5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""eeab18f7-fbef-4f30-a43d-c84381e5dee5""}\n2025-07-16 14:20:40.121 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4cd2ca1d-50bb-4531-9684-eaf9607bc5c9] received connection request\n2025-07-16 14:20:40.121 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:20:40.148 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4cd2ca1d-50bb-4531-9684-eaf9607bc5c9] socks forwarding established\n2025-07-16 14:20:40.173 [info] [command][eeab18f7-fbef-4f30-a43d-c84381e5dee5] Process exited with code 0\n2025-07-16 14:20:40.173 [info] [command][eeab18f7-fbef-4f30-a43d-c84381e5dee5] Socket close event received\n2025-07-16 14:20:40.173 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4cd2ca1d-50bb-4531-9684-eaf9607bc5c9] socks connection closed\n2025-07-16 14:20:40.198 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62861 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:21:40.175 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:21:40.175 [info] [command][9b9457f4-7a9c-4fb8-9ef5-72ad3a14c9db] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9b9457f4-7a9c-4fb8-9ef5-72ad3a14c9db""}\n2025-07-16 14:21:40.176 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][673f8e60-7f4c-4f8c-8036-e18a541b0b42] received connection request\n2025-07-16 14:21:40.176 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:21:40.200 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][673f8e60-7f4c-4f8c-8036-e18a541b0b42] socks forwarding established\n2025-07-16 14:21:40.299 [info] [command][9b9457f4-7a9c-4fb8-9ef5-72ad3a14c9db] Process exited with code 0\n2025-07-16 14:21:40.299 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][673f8e60-7f4c-4f8c-8036-e18a541b0b42] socks connection closed\n2025-07-16 14:21:40.299 [info] [command][9b9457f4-7a9c-4fb8-9ef5-72ad3a14c9db] Socket close event received\n2025-07-16 14:21:40.324 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62889 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:22:40.304 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:22:40.307 [info] [command][4c8b7bf4-30ed-4a08-b70f-e7f048a577eb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4c8b7bf4-30ed-4a08-b70f-e7f048a577eb""}\n2025-07-16 14:22:40.308 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][359e100c-af92-48a5-8a4c-c5c5eebedc0e] received connection request\n2025-07-16 14:22:40.308 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:22:40.373 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][359e100c-af92-48a5-8a4c-c5c5eebedc0e] socks forwarding established\n2025-07-16 14:22:40.532 [info] [command][4c8b7bf4-30ed-4a08-b70f-e7f048a577eb] Process exited with code 0\n2025-07-16 14:22:40.533 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][359e100c-af92-48a5-8a4c-c5c5eebedc0e] socks connection closed\n2025-07-16 14:22:40.533 [info] [command][4c8b7bf4-30ed-4a08-b70f-e7f048a577eb] Socket close event received\n2025-07-16 14:22:40.558 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62949 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:23:40.534 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:23:40.536 [info] [command][66fe7358-1685-41bf-bc5c-290e8a107071] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""66fe7358-1685-41bf-bc5c-290e8a107071""}\n2025-07-16 14:23:40.536 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][afad6bda-f105-42a3-b75e-a65b6a302e0d] received connection request\n2025-07-16 14:23:40.536 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:23:40.559 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][afad6bda-f105-42a3-b75e-a65b6a302e0d] socks forwarding established\n2025-07-16 14:23:40.585 [info] [command][66fe7358-1685-41bf-bc5c-290e8a107071] Process exited with code 0\n2025-07-16 14:23:40.585 [info] [command][66fe7358-1685-41bf-bc5c-290e8a107071] Socket close event received\n2025-07-16 14:23:40.586 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][afad6bda-f105-42a3-b75e-a65b6a302e0d] socks connection closed\n2025-07-16 14:23:40.615 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 62971 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:24:40.590 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:24:40.592 [info] [command][6a057e7e-b88f-436c-8c3d-3b57e123af6b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6a057e7e-b88f-436c-8c3d-3b57e123af6b""}\n2025-07-16 14:24:40.592 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f61e33d5-1528-4d88-a7f8-63ca22b5ce13] received connection request\n2025-07-16 14:24:40.592 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:24:40.620 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f61e33d5-1528-4d88-a7f8-63ca22b5ce13] socks forwarding established\n2025-07-16 14:24:40.649 [info] [command][6a057e7e-b88f-436c-8c3d-3b57e123af6b] Process exited with code 0\n2025-07-16 14:24:40.650 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f61e33d5-1528-4d88-a7f8-63ca22b5ce13] socks connection closed\n2025-07-16 14:24:40.650 [info] [command][6a057e7e-b88f-436c-8c3d-3b57e123af6b] Socket close event received\n2025-07-16 14:24:40.675 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63033 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:25:40.651 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:25:40.654 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b22ca9cc-b0f5-48ba-bc4a-aa8b66d95dd4] received connection request\n2025-07-16 14:25:40.654 [info] [command][e5765723-0277-4ebb-ae0a-71beefcfb578] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e5765723-0277-4ebb-ae0a-71beefcfb578""}\n2025-07-16 14:25:40.654 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:25:40.773 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b22ca9cc-b0f5-48ba-bc4a-aa8b66d95dd4] socks forwarding established\n2025-07-16 14:25:40.931 [info] [command][e5765723-0277-4ebb-ae0a-71beefcfb578] Process exited with code 0\n2025-07-16 14:25:40.932 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b22ca9cc-b0f5-48ba-bc4a-aa8b66d95dd4] socks connection closed\n2025-07-16 14:25:40.932 [info] [command][e5765723-0277-4ebb-ae0a-71beefcfb578] Socket close event received\n2025-07-16 14:25:40.954 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63066 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:26:40.932 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:26:40.933 [info] [command][2dde2eb3-b6b1-4d8b-ad92-8255f25fd609] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2dde2eb3-b6b1-4d8b-ad92-8255f25fd609""}\n2025-07-16 14:26:40.934 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][2b0cb117-ffff-4d02-b2f9-6fe28296a842] received connection request\n2025-07-16 14:26:40.934 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:26:41.041 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2b0cb117-ffff-4d02-b2f9-6fe28296a842] socks forwarding established\n2025-07-16 14:26:41.200 [info] [command][2dde2eb3-b6b1-4d8b-ad92-8255f25fd609] Process exited with code 0\n2025-07-16 14:26:41.200 [info] [command][2dde2eb3-b6b1-4d8b-ad92-8255f25fd609] Socket close event received\n2025-07-16 14:26:41.204 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2b0cb117-ffff-4d02-b2f9-6fe28296a842] socks connection closed\n2025-07-16 14:26:41.223 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63101 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:27:41.205 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:27:41.208 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0120f4fc-fad8-4149-bb8d-4f56d3db9bc3] received connection request\n2025-07-16 14:27:41.209 [info] [command][02f67242-5afd-46ce-bbb8-2d62df963a62] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""02f67242-5afd-46ce-bbb8-2d62df963a62""}\n2025-07-16 14:27:41.209 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:27:41.234 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0120f4fc-fad8-4149-bb8d-4f56d3db9bc3] socks forwarding established\n2025-07-16 14:27:41.263 [info] [command][02f67242-5afd-46ce-bbb8-2d62df963a62] Process exited with code 0\n2025-07-16 14:27:41.263 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0120f4fc-fad8-4149-bb8d-4f56d3db9bc3] socks connection closed\n2025-07-16 14:27:41.263 [info] [command][02f67242-5afd-46ce-bbb8-2d62df963a62] Socket close event received\n2025-07-16 14:27:41.287 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63159 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:28:41.265 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:28:41.266 [info] [command][8f3ed456-208f-43f1-943e-8ffd5acc467c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8f3ed456-208f-43f1-943e-8ffd5acc467c""}\n2025-07-16 14:28:41.267 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3d4fa212-a34f-4700-add1-a1406fcb50fa] received connection request\n2025-07-16 14:28:41.267 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:28:41.327 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3d4fa212-a34f-4700-add1-a1406fcb50fa] socks forwarding established\n2025-07-16 14:28:41.352 [info] [command][8f3ed456-208f-43f1-943e-8ffd5acc467c] Process exited with code 0\n2025-07-16 14:28:41.353 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3d4fa212-a34f-4700-add1-a1406fcb50fa] socks connection closed\n2025-07-16 14:28:41.353 [info] [command][8f3ed456-208f-43f1-943e-8ffd5acc467c] Socket close event received\n2025-07-16 14:28:41.375 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63187 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:29:41.358 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:29:41.359 [info] [command][8fdf3200-aeb0-44a9-a72f-d0d91a80eb49] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8fdf3200-aeb0-44a9-a72f-d0d91a80eb49""}\n2025-07-16 14:29:41.360 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][88899740-e105-425c-9603-eb0a4251674b] received connection request\n2025-07-16 14:29:41.361 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:29:41.428 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][88899740-e105-425c-9603-eb0a4251674b] socks forwarding established\n2025-07-16 14:29:41.540 [info] [command][8fdf3200-aeb0-44a9-a72f-d0d91a80eb49] Process exited with code 0\n2025-07-16 14:29:41.540 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][88899740-e105-425c-9603-eb0a4251674b] socks connection closed\n2025-07-16 14:29:41.540 [info] [command][8fdf3200-aeb0-44a9-a72f-d0d91a80eb49] Socket close event received\n2025-07-16 14:29:41.566 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63235 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:30:41.542 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:30:41.543 [info] [command][c05af8b5-70c0-49b1-b263-9eda157582ff] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c05af8b5-70c0-49b1-b263-9eda157582ff""}\n2025-07-16 14:30:41.543 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7bdc3754-127a-4c43-8bd7-1377faacbad5] received connection request\n2025-07-16 14:30:41.543 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:30:41.569 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7bdc3754-127a-4c43-8bd7-1377faacbad5] socks forwarding established\n2025-07-16 14:30:41.595 [info] [command][c05af8b5-70c0-49b1-b263-9eda157582ff] Process exited with code 0\n2025-07-16 14:30:41.595 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7bdc3754-127a-4c43-8bd7-1377faacbad5] socks connection closed\n2025-07-16 14:30:41.595 [info] [command][c05af8b5-70c0-49b1-b263-9eda157582ff] Socket close event received\n2025-07-16 14:30:41.620 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63277 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:31:41.597 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:31:41.599 [info] [command][7195b6d5-94da-49fd-a4b8-bd00f77c3e74] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7195b6d5-94da-49fd-a4b8-bd00f77c3e74""}\n2025-07-16 14:31:41.600 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ff30b98f-abdc-4ea8-8bef-36b94a903dfa] received connection request\n2025-07-16 14:31:41.601 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:31:41.629 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ff30b98f-abdc-4ea8-8bef-36b94a903dfa] socks forwarding established\n2025-07-16 14:31:41.656 [info] [command][7195b6d5-94da-49fd-a4b8-bd00f77c3e74] Process exited with code 0\n2025-07-16 14:31:41.656 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ff30b98f-abdc-4ea8-8bef-36b94a903dfa] socks connection closed\n2025-07-16 14:31:41.656 [info] [command][7195b6d5-94da-49fd-a4b8-bd00f77c3e74] Socket close event received\n2025-07-16 14:31:41.679 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63313 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:32:41.661 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:32:41.663 [info] [command][bd2c4a17-c5fd-4f04-9c5e-889cc8efd5e2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""bd2c4a17-c5fd-4f04-9c5e-889cc8efd5e2""}\n2025-07-16 14:32:41.664 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][861d817b-c0d0-4c3a-94a9-cc7fd749b407] received connection request\n2025-07-16 14:32:41.664 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:32:41.690 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][861d817b-c0d0-4c3a-94a9-cc7fd749b407] socks forwarding established\n2025-07-16 14:32:41.718 [info] [command][bd2c4a17-c5fd-4f04-9c5e-889cc8efd5e2] Process exited with code 0\n2025-07-16 14:32:41.718 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][861d817b-c0d0-4c3a-94a9-cc7fd749b407] socks connection closed\n2025-07-16 14:32:41.718 [info] [command][bd2c4a17-c5fd-4f04-9c5e-889cc8efd5e2] Socket close event received\n2025-07-16 14:32:41.742 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63373 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:33:41.722 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:33:41.725 [info] [command][7ad63e8b-43a7-44d5-9cfa-7140d58fca27] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7ad63e8b-43a7-44d5-9cfa-7140d58fca27""}\n2025-07-16 14:33:41.726 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][840879ec-cd54-4c61-a4d5-289b30786240] received connection request\n2025-07-16 14:33:41.726 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:33:41.752 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][840879ec-cd54-4c61-a4d5-289b30786240] socks forwarding established\n2025-07-16 14:33:41.780 [info] [command][7ad63e8b-43a7-44d5-9cfa-7140d58fca27] Process exited with code 0\n2025-07-16 14:33:41.780 [info] [command][7ad63e8b-43a7-44d5-9cfa-7140d58fca27] Socket close event received\n2025-07-16 14:33:41.787 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][840879ec-cd54-4c61-a4d5-289b30786240] socks connection closed\n2025-07-16 14:33:41.811 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63395 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:34:41.785 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:34:41.786 [info] [command][a9fe8113-802c-447b-af58-118ad4424d83] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a9fe8113-802c-447b-af58-118ad4424d83""}\n2025-07-16 14:34:41.787 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][62463bdb-8cf5-474c-b6a7-cf710c05babb] received connection request\n2025-07-16 14:34:41.788 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:34:41.814 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][62463bdb-8cf5-474c-b6a7-cf710c05babb] socks forwarding established\n2025-07-16 14:34:41.841 [info] [command][a9fe8113-802c-447b-af58-118ad4424d83] Process exited with code 0\n2025-07-16 14:34:41.842 [info] [command][a9fe8113-802c-447b-af58-118ad4424d83] Socket close event received\n2025-07-16 14:34:41.842 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][62463bdb-8cf5-474c-b6a7-cf710c05babb] socks connection closed\n2025-07-16 14:34:41.867 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63439 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:35:41.848 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:35:41.850 [info] [command][88e608ed-cfdc-4022-ad1f-b1c771269269] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""88e608ed-cfdc-4022-ad1f-b1c771269269""}\n2025-07-16 14:35:41.850 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][af807d01-a475-4331-bf7b-7f993b1738ec] received connection request\n2025-07-16 14:35:41.851 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:35:41.875 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][af807d01-a475-4331-bf7b-7f993b1738ec] socks forwarding established\n2025-07-16 14:35:41.904 [info] [command][88e608ed-cfdc-4022-ad1f-b1c771269269] Process exited with code 0\n2025-07-16 14:35:41.904 [info] [command][88e608ed-cfdc-4022-ad1f-b1c771269269] Socket close event received\n2025-07-16 14:35:41.905 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][af807d01-a475-4331-bf7b-7f993b1738ec] socks connection closed\n2025-07-16 14:35:41.930 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63491 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:36:41.907 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:36:41.908 [info] [command][983158c4-53a0-4ff1-b075-33522424a90b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""983158c4-53a0-4ff1-b075-33522424a90b""}\n2025-07-16 14:36:41.909 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c7339a87-1298-4672-b847-144190cd79d2] received connection request\n2025-07-16 14:36:41.909 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:36:41.934 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c7339a87-1298-4672-b847-144190cd79d2] socks forwarding established\n2025-07-16 14:36:41.963 [info] [command][983158c4-53a0-4ff1-b075-33522424a90b] Process exited with code 0\n2025-07-16 14:36:41.963 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c7339a87-1298-4672-b847-144190cd79d2] socks connection closed\n2025-07-16 14:36:41.963 [info] [command][983158c4-53a0-4ff1-b075-33522424a90b] Socket close event received\n2025-07-16 14:36:41.989 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63546 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:37:41.964 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:37:41.966 [info] [command][e8916afc-fa82-441b-b4ad-6d92cc7cb897] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e8916afc-fa82-441b-b4ad-6d92cc7cb897""}\n2025-07-16 14:37:41.967 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0a46fe78-8131-4f79-b3fd-e6c93e9620da] received connection request\n2025-07-16 14:37:41.967 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:37:41.991 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0a46fe78-8131-4f79-b3fd-e6c93e9620da] socks forwarding established\n2025-07-16 14:37:42.019 [info] [command][e8916afc-fa82-441b-b4ad-6d92cc7cb897] Process exited with code 0\n2025-07-16 14:37:42.019 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0a46fe78-8131-4f79-b3fd-e6c93e9620da] socks connection closed\n2025-07-16 14:37:42.019 [info] [command][e8916afc-fa82-441b-b4ad-6d92cc7cb897] Socket close event received\n2025-07-16 14:37:42.042 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63599 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:38:42.024 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:38:42.026 [info] [command][a115a098-8307-4787-a1f9-b62dfd6c564a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a115a098-8307-4787-a1f9-b62dfd6c564a""}\n2025-07-16 14:38:42.026 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3c6ff8bd-db5d-4cc7-900d-c57c8fa8a1a3] received connection request\n2025-07-16 14:38:42.026 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:38:42.050 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3c6ff8bd-db5d-4cc7-900d-c57c8fa8a1a3] socks forwarding established\n2025-07-16 14:38:42.076 [info] [command][a115a098-8307-4787-a1f9-b62dfd6c564a] Process exited with code 0\n2025-07-16 14:38:42.076 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3c6ff8bd-db5d-4cc7-900d-c57c8fa8a1a3] socks connection closed\n2025-07-16 14:38:42.076 [info] [command][a115a098-8307-4787-a1f9-b62dfd6c564a] Socket close event received\n2025-07-16 14:38:42.099 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63621 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:39:42.076 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:39:42.078 [info] [command][a0032c69-790a-48a9-a097-8c49cef9ad60] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a0032c69-790a-48a9-a097-8c49cef9ad60""}\n2025-07-16 14:39:42.078 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][76a11a68-14a8-462e-9a45-2ce64a44f7cf] received connection request\n2025-07-16 14:39:42.078 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:39:42.103 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][76a11a68-14a8-462e-9a45-2ce64a44f7cf] socks forwarding established\n2025-07-16 14:39:42.130 [info] [command][a0032c69-790a-48a9-a097-8c49cef9ad60] Process exited with code 0\n2025-07-16 14:39:42.130 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][76a11a68-14a8-462e-9a45-2ce64a44f7cf] socks connection closed\n2025-07-16 14:39:42.130 [info] [command][a0032c69-790a-48a9-a097-8c49cef9ad60] Socket close event received\n2025-07-16 14:39:42.153 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63661 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:40:42.132 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:40:42.134 [info] [command][547e3f37-1d1a-49e3-bcf8-2abba6a9d3f9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""547e3f37-1d1a-49e3-bcf8-2abba6a9d3f9""}\n2025-07-16 14:40:42.135 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][39002893-2dba-4a6e-8bad-bece870f0e0b] received connection request\n2025-07-16 14:40:42.135 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:40:42.162 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][39002893-2dba-4a6e-8bad-bece870f0e0b] socks forwarding established\n2025-07-16 14:40:42.189 [info] [command][547e3f37-1d1a-49e3-bcf8-2abba6a9d3f9] Process exited with code 0\n2025-07-16 14:40:42.190 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][39002893-2dba-4a6e-8bad-bece870f0e0b] socks connection closed\n2025-07-16 14:40:42.190 [info] [command][547e3f37-1d1a-49e3-bcf8-2abba6a9d3f9] Socket close event received\n2025-07-16 14:40:42.212 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63694 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:41:42.198 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:41:42.200 [info] [command][48d84b18-e390-475f-b988-be2bd1948ab3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""48d84b18-e390-475f-b988-be2bd1948ab3""}\n2025-07-16 14:41:42.200 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c34c0a76-6cce-436e-894f-92c68d5ae62d] received connection request\n2025-07-16 14:41:42.200 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:41:42.223 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c34c0a76-6cce-436e-894f-92c68d5ae62d] socks forwarding established\n2025-07-16 14:41:42.248 [info] [command][48d84b18-e390-475f-b988-be2bd1948ab3] Process exited with code 0\n2025-07-16 14:41:42.249 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c34c0a76-6cce-436e-894f-92c68d5ae62d] socks connection closed\n2025-07-16 14:41:42.249 [info] [command][48d84b18-e390-475f-b988-be2bd1948ab3] Socket close event received\n2025-07-16 14:41:42.271 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63716 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:42:42.254 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:42:42.257 [info] [command][90e2ea48-087e-4946-85f1-c7620b49992f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""90e2ea48-087e-4946-85f1-c7620b49992f""}\n2025-07-16 14:42:42.258 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][19369873-1811-4634-b06e-bf85e40f54b6] received connection request\n2025-07-16 14:42:42.258 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:42:42.283 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][19369873-1811-4634-b06e-bf85e40f54b6] socks forwarding established\n2025-07-16 14:42:42.312 [info] [command][90e2ea48-087e-4946-85f1-c7620b49992f] Process exited with code 0\n2025-07-16 14:42:42.313 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][19369873-1811-4634-b06e-bf85e40f54b6] socks connection closed\n2025-07-16 14:42:42.313 [info] [command][90e2ea48-087e-4946-85f1-c7620b49992f] Socket close event received\n2025-07-16 14:42:42.336 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63769 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:43:42.315 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:43:42.317 [info] [command][de8b77d6-981e-4f99-a7d3-aa1cbcb3e2f2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""de8b77d6-981e-4f99-a7d3-aa1cbcb3e2f2""}\n2025-07-16 14:43:42.318 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][cff6e3a9-f1bb-44b6-9bd4-e04a2058c155] received connection request\n2025-07-16 14:43:42.318 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:43:42.342 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cff6e3a9-f1bb-44b6-9bd4-e04a2058c155] socks forwarding established\n2025-07-16 14:43:42.368 [info] [command][de8b77d6-981e-4f99-a7d3-aa1cbcb3e2f2] Process exited with code 0\n2025-07-16 14:43:42.369 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cff6e3a9-f1bb-44b6-9bd4-e04a2058c155] socks connection closed\n2025-07-16 14:43:42.369 [info] [command][de8b77d6-981e-4f99-a7d3-aa1cbcb3e2f2] Socket close event received\n2025-07-16 14:43:42.391 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63795 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:44:42.370 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:44:42.373 [info] [command][e222430a-33ed-4120-b242-6b4194847b12] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e222430a-33ed-4120-b242-6b4194847b12""}\n2025-07-16 14:44:42.374 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f21acfc3-4146-4687-87ec-b5f77b590ae6] received connection request\n2025-07-16 14:44:42.374 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:44:42.399 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f21acfc3-4146-4687-87ec-b5f77b590ae6] socks forwarding established\n2025-07-16 14:44:42.426 [info] [command][e222430a-33ed-4120-b242-6b4194847b12] Process exited with code 0\n2025-07-16 14:44:42.426 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f21acfc3-4146-4687-87ec-b5f77b590ae6] socks connection closed\n2025-07-16 14:44:42.426 [info] [command][e222430a-33ed-4120-b242-6b4194847b12] Socket close event received\n2025-07-16 14:44:42.450 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63861 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:45:42.427 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:45:42.428 [info] [command][39892df0-8a06-412c-8f8b-9b0415eedfa0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""39892df0-8a06-412c-8f8b-9b0415eedfa0""}\n2025-07-16 14:45:42.429 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0558e7e3-a2e0-42ac-aad8-caa1b1a3a64d] received connection request\n2025-07-16 14:45:42.429 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:45:42.456 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0558e7e3-a2e0-42ac-aad8-caa1b1a3a64d] socks forwarding established\n2025-07-16 14:45:42.485 [info] [command][39892df0-8a06-412c-8f8b-9b0415eedfa0] Process exited with code 0\n2025-07-16 14:45:42.485 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0558e7e3-a2e0-42ac-aad8-caa1b1a3a64d] socks connection closed\n2025-07-16 14:45:42.485 [info] [command][39892df0-8a06-412c-8f8b-9b0415eedfa0] Socket close event received\n2025-07-16 14:45:42.510 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63915 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:46:42.491 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:46:42.494 [info] [command][44ff7dee-1620-434c-968f-2740274f73e9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""44ff7dee-1620-434c-968f-2740274f73e9""}\n2025-07-16 14:46:42.495 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1f866119-fe16-4945-b9e1-64875a9cfacb] received connection request\n2025-07-16 14:46:42.495 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:46:42.519 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1f866119-fe16-4945-b9e1-64875a9cfacb] socks forwarding established\n2025-07-16 14:46:42.545 [info] [command][44ff7dee-1620-434c-968f-2740274f73e9] Process exited with code 0\n2025-07-16 14:46:42.545 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1f866119-fe16-4945-b9e1-64875a9cfacb] socks connection closed\n2025-07-16 14:46:42.546 [info] [command][44ff7dee-1620-434c-968f-2740274f73e9] Socket close event received\n2025-07-16 14:46:42.568 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 63943 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:47:42.551 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:47:42.554 [info] [command][7681e54c-182e-479d-95e2-4e964ad2feb3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7681e54c-182e-479d-95e2-4e964ad2feb3""}\n2025-07-16 14:47:42.555 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0f0c1648-5fae-4f52-8a8b-875f9856731d] received connection request\n2025-07-16 14:47:42.555 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:47:42.583 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0f0c1648-5fae-4f52-8a8b-875f9856731d] socks forwarding established\n2025-07-16 14:47:42.632 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0f0c1648-5fae-4f52-8a8b-875f9856731d] socks connection closed\n2025-07-16 14:47:42.633 [info] [command][7681e54c-182e-479d-95e2-4e964ad2feb3] Process exited with code 0\n2025-07-16 14:47:42.633 [info] [command][7681e54c-182e-479d-95e2-4e964ad2feb3] Socket close event received\n2025-07-16 14:47:42.656 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64041 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:48:42.635 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:48:42.637 [info] [command][e1bffb54-bca5-4231-9926-3d772b18d384] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e1bffb54-bca5-4231-9926-3d772b18d384""}\n2025-07-16 14:48:42.638 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b7f55b8b-f7f5-45a3-a3a4-62c5bc647c25] received connection request\n2025-07-16 14:48:42.639 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:48:42.663 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b7f55b8b-f7f5-45a3-a3a4-62c5bc647c25] socks forwarding established\n2025-07-16 14:48:42.691 [info] [command][e1bffb54-bca5-4231-9926-3d772b18d384] Process exited with code 0\n2025-07-16 14:48:42.691 [info] [command][e1bffb54-bca5-4231-9926-3d772b18d384] Socket close event received\n2025-07-16 14:48:42.715 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b7f55b8b-f7f5-45a3-a3a4-62c5bc647c25] socks connection closed\n2025-07-16 14:48:42.715 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64065 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:49:42.697 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:49:42.700 [info] [command][7d8c7df6-6268-4a32-a583-9e0a8ce6377f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7d8c7df6-6268-4a32-a583-9e0a8ce6377f""}\n2025-07-16 14:49:42.700 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][22b9cb90-54b0-4999-99c0-fa17a8987ccc] received connection request\n2025-07-16 14:49:42.701 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:49:42.726 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][22b9cb90-54b0-4999-99c0-fa17a8987ccc] socks forwarding established\n2025-07-16 14:49:42.777 [info] [command][7d8c7df6-6268-4a32-a583-9e0a8ce6377f] Process exited with code 0\n2025-07-16 14:49:42.778 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][22b9cb90-54b0-4999-99c0-fa17a8987ccc] socks connection closed\n2025-07-16 14:49:42.778 [info] [command][7d8c7df6-6268-4a32-a583-9e0a8ce6377f] Socket close event received\n2025-07-16 14:49:42.801 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64114 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:50:42.782 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:50:42.784 [info] [command][19ee7f8b-8ffe-4d7b-ace6-52de1329bb1f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""19ee7f8b-8ffe-4d7b-ace6-52de1329bb1f""}\n2025-07-16 14:50:42.784 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7ff503cf-3def-45c3-9fda-b4ae261a12e4] received connection request\n2025-07-16 14:50:42.785 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:50:42.809 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7ff503cf-3def-45c3-9fda-b4ae261a12e4] socks forwarding established\n2025-07-16 14:50:42.836 [info] [command][19ee7f8b-8ffe-4d7b-ace6-52de1329bb1f] Process exited with code 0\n2025-07-16 14:50:42.837 [info] [command][19ee7f8b-8ffe-4d7b-ace6-52de1329bb1f] Socket close event received\n2025-07-16 14:50:42.860 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7ff503cf-3def-45c3-9fda-b4ae261a12e4] socks connection closed\n2025-07-16 14:50:42.865 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64153 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:51:42.838 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:51:42.840 [info] [command][6082211e-f890-453e-9f08-a2308f84292c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6082211e-f890-453e-9f08-a2308f84292c""}\n2025-07-16 14:51:42.841 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f207f265-cd1c-4172-90b9-927a0aebd99c] received connection request\n2025-07-16 14:51:42.841 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:51:42.865 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f207f265-cd1c-4172-90b9-927a0aebd99c] socks forwarding established\n2025-07-16 14:51:42.891 [info] [command][6082211e-f890-453e-9f08-a2308f84292c] Process exited with code 0\n2025-07-16 14:51:42.891 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f207f265-cd1c-4172-90b9-927a0aebd99c] socks connection closed\n2025-07-16 14:51:42.891 [info] [command][6082211e-f890-453e-9f08-a2308f84292c] Socket close event received\n2025-07-16 14:51:42.914 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64184 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:52:42.893 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:52:42.895 [info] [command][b0f66d84-89fe-4c6c-a7e7-1e0e3c9c6674] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b0f66d84-89fe-4c6c-a7e7-1e0e3c9c6674""}\n2025-07-16 14:52:42.896 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][01086060-f791-4bf9-9efb-4f9e55ef8071] received connection request\n2025-07-16 14:52:42.896 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:52:42.921 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][01086060-f791-4bf9-9efb-4f9e55ef8071] socks forwarding established\n2025-07-16 14:52:42.950 [info] [command][b0f66d84-89fe-4c6c-a7e7-1e0e3c9c6674] Process exited with code 0\n2025-07-16 14:52:42.950 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][01086060-f791-4bf9-9efb-4f9e55ef8071] socks connection closed\n2025-07-16 14:52:42.950 [info] [command][b0f66d84-89fe-4c6c-a7e7-1e0e3c9c6674] Socket close event received\n2025-07-16 14:52:42.974 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64262 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:53:42.953 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:53:42.955 [info] [command][afa826e4-5241-479d-bed3-24690ca091ef] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""afa826e4-5241-479d-bed3-24690ca091ef""}\n2025-07-16 14:53:42.955 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][776a5be9-d188-4224-9d18-49727e18cbae] received connection request\n2025-07-16 14:53:42.956 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:53:42.980 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][776a5be9-d188-4224-9d18-49727e18cbae] socks forwarding established\n2025-07-16 14:53:43.010 [info] [command][afa826e4-5241-479d-bed3-24690ca091ef] Process exited with code 0\n2025-07-16 14:53:43.011 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][776a5be9-d188-4224-9d18-49727e18cbae] socks connection closed\n2025-07-16 14:53:43.011 [info] [command][afa826e4-5241-479d-bed3-24690ca091ef] Socket close event received\n2025-07-16 14:53:43.033 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64285 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:54:43.016 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:54:43.018 [info] [command][33aef4c5-6faa-4c22-ae0a-d1cd4c5bedc2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""33aef4c5-6faa-4c22-ae0a-d1cd4c5bedc2""}\n2025-07-16 14:54:43.019 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][50db0b3d-5631-413f-bdda-ea021b292dd6] received connection request\n2025-07-16 14:54:43.019 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:54:43.044 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][50db0b3d-5631-413f-bdda-ea021b292dd6] socks forwarding established\n2025-07-16 14:54:43.070 [info] [command][33aef4c5-6faa-4c22-ae0a-d1cd4c5bedc2] Process exited with code 0\n2025-07-16 14:54:43.071 [info] [command][33aef4c5-6faa-4c22-ae0a-d1cd4c5bedc2] Socket close event received\n2025-07-16 14:54:43.081 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][50db0b3d-5631-413f-bdda-ea021b292dd6] socks connection closed\n2025-07-16 14:54:43.094 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64341 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:55:43.075 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:55:43.077 [info] [command][33081359-e5b3-4e2f-8033-7a47802e1918] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""33081359-e5b3-4e2f-8033-7a47802e1918""}\n2025-07-16 14:55:43.077 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1b213920-44bd-4130-aece-afc1f818566a] received connection request\n2025-07-16 14:55:43.078 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:55:43.101 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1b213920-44bd-4130-aece-afc1f818566a] socks forwarding established\n2025-07-16 14:55:43.127 [info] [command][33081359-e5b3-4e2f-8033-7a47802e1918] Process exited with code 0\n2025-07-16 14:55:43.128 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1b213920-44bd-4130-aece-afc1f818566a] socks connection closed\n2025-07-16 14:55:43.128 [info] [command][33081359-e5b3-4e2f-8033-7a47802e1918] Socket close event received\n2025-07-16 14:55:43.150 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64371 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:56:43.133 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:56:43.135 [info] [command][561c4ec2-1dd7-4cc2-b874-27693044ac44] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""561c4ec2-1dd7-4cc2-b874-27693044ac44""}\n2025-07-16 14:56:43.136 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0fdf9c19-cbd1-43a0-99c0-8150e01544d2] received connection request\n2025-07-16 14:56:43.136 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:56:43.159 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0fdf9c19-cbd1-43a0-99c0-8150e01544d2] socks forwarding established\n2025-07-16 14:56:43.187 [info] [command][561c4ec2-1dd7-4cc2-b874-27693044ac44] Process exited with code 0\n2025-07-16 14:56:43.187 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0fdf9c19-cbd1-43a0-99c0-8150e01544d2] socks connection closed\n2025-07-16 14:56:43.187 [info] [command][561c4ec2-1dd7-4cc2-b874-27693044ac44] Socket close event received\n2025-07-16 14:56:43.213 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64397 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:57:43.188 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:57:43.191 [info] [command][91754617-262f-415c-8230-0c3a71536824] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""91754617-262f-415c-8230-0c3a71536824""}\n2025-07-16 14:57:43.192 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0e5105df-cdcb-4980-bd9f-48dbdf606279] received connection request\n2025-07-16 14:57:43.192 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:57:43.216 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0e5105df-cdcb-4980-bd9f-48dbdf606279] socks forwarding established\n2025-07-16 14:57:43.262 [info] [command][91754617-262f-415c-8230-0c3a71536824] Process exited with code 0\n2025-07-16 14:57:43.262 [info] [command][91754617-262f-415c-8230-0c3a71536824] Socket close event received\n2025-07-16 14:57:43.263 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0e5105df-cdcb-4980-bd9f-48dbdf606279] socks connection closed\n2025-07-16 14:57:43.287 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64464 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:58:43.267 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:58:43.269 [info] [command][4116dfb0-415c-4d79-ad5c-601cb3391c9f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4116dfb0-415c-4d79-ad5c-601cb3391c9f""}\n2025-07-16 14:58:43.270 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][30db68d9-8b9e-4f64-a72e-893b5dabcb6d] received connection request\n2025-07-16 14:58:43.271 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:58:43.297 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][30db68d9-8b9e-4f64-a72e-893b5dabcb6d] socks forwarding established\n2025-07-16 14:58:43.324 [info] [command][4116dfb0-415c-4d79-ad5c-601cb3391c9f] Process exited with code 0\n2025-07-16 14:58:43.324 [info] [command][4116dfb0-415c-4d79-ad5c-601cb3391c9f] Socket close event received\n2025-07-16 14:58:43.325 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][30db68d9-8b9e-4f64-a72e-893b5dabcb6d] socks connection closed\n2025-07-16 14:58:43.348 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64490 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 14:59:43.328 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 14:59:43.330 [info] [command][050af5d8-6619-42c5-abe8-4a0729635534] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""050af5d8-6619-42c5-abe8-4a0729635534""}\n2025-07-16 14:59:43.331 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d2efeeb5-ec9f-439f-92c6-81caf3a1ea8f] received connection request\n2025-07-16 14:59:43.331 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 14:59:43.355 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d2efeeb5-ec9f-439f-92c6-81caf3a1ea8f] socks forwarding established\n2025-07-16 14:59:43.383 [info] [command][050af5d8-6619-42c5-abe8-4a0729635534] Process exited with code 0\n2025-07-16 14:59:43.384 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d2efeeb5-ec9f-439f-92c6-81caf3a1ea8f] socks connection closed\n2025-07-16 14:59:43.384 [info] [command][050af5d8-6619-42c5-abe8-4a0729635534] Socket close event received\n2025-07-16 14:59:43.406 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64533 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:00:43.386 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:00:43.388 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7348382c-6fbf-4cc7-8738-09f015ea90ca] received connection request\n2025-07-16 15:00:43.388 [info] [command][2fbd883c-e0eb-4306-9568-f15498a3adab] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2fbd883c-e0eb-4306-9568-f15498a3adab""}\n2025-07-16 15:00:43.388 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:00:43.413 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7348382c-6fbf-4cc7-8738-09f015ea90ca] socks forwarding established\n2025-07-16 15:00:43.439 [info] [command][2fbd883c-e0eb-4306-9568-f15498a3adab] Process exited with code 0\n2025-07-16 15:00:43.439 [info] [command][2fbd883c-e0eb-4306-9568-f15498a3adab] Socket close event received\n2025-07-16 15:00:43.440 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7348382c-6fbf-4cc7-8738-09f015ea90ca] socks connection closed\n2025-07-16 15:00:43.462 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64563 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:01:43.443 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:01:43.445 [info] [command][be1f1e9f-c2a9-4434-a23c-9d05118131de] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""be1f1e9f-c2a9-4434-a23c-9d05118131de""}\n2025-07-16 15:01:43.446 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][46b60c6d-64d9-40bf-8845-c919975b177b] received connection request\n2025-07-16 15:01:43.446 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:01:43.470 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][46b60c6d-64d9-40bf-8845-c919975b177b] socks forwarding established\n2025-07-16 15:01:43.498 [info] [command][be1f1e9f-c2a9-4434-a23c-9d05118131de] Process exited with code 0\n2025-07-16 15:01:43.498 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][46b60c6d-64d9-40bf-8845-c919975b177b] socks connection closed\n2025-07-16 15:01:43.498 [info] [command][be1f1e9f-c2a9-4434-a23c-9d05118131de] Socket close event received\n2025-07-16 15:01:43.521 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64589 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:02:43.503 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:02:43.504 [info] [command][0f829029-977f-4ca0-ba22-7fb1050b49ba] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""0f829029-977f-4ca0-ba22-7fb1050b49ba""}\n2025-07-16 15:02:43.504 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][86b4f553-a7e9-4ad0-898a-627973a54352] received connection request\n2025-07-16 15:02:43.505 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:02:43.529 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][86b4f553-a7e9-4ad0-898a-627973a54352] socks forwarding established\n2025-07-16 15:02:43.558 [info] [command][0f829029-977f-4ca0-ba22-7fb1050b49ba] Process exited with code 0\n2025-07-16 15:02:43.558 [info] [command][0f829029-977f-4ca0-ba22-7fb1050b49ba] Socket close event received\n2025-07-16 15:02:43.558 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][86b4f553-a7e9-4ad0-898a-627973a54352] socks connection closed\n2025-07-16 15:02:43.583 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64634 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:03:43.568 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:03:43.570 [info] [command][a918f860-4058-482c-aee4-79d626361a2b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a918f860-4058-482c-aee4-79d626361a2b""}\n2025-07-16 15:03:43.571 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1e8a917f-8496-42ad-bfb0-334883dd38e7] received connection request\n2025-07-16 15:03:43.571 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:03:43.595 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1e8a917f-8496-42ad-bfb0-334883dd38e7] socks forwarding established\n2025-07-16 15:03:43.621 [info] [command][a918f860-4058-482c-aee4-79d626361a2b] Process exited with code 0\n2025-07-16 15:03:43.622 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1e8a917f-8496-42ad-bfb0-334883dd38e7] socks connection closed\n2025-07-16 15:03:43.622 [info] [command][a918f860-4058-482c-aee4-79d626361a2b] Socket close event received\n2025-07-16 15:03:43.645 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64654 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:04:43.631 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:04:43.633 [info] [command][cfd765d6-719c-4031-96e6-1c34e0c92322] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""cfd765d6-719c-4031-96e6-1c34e0c92322""}\n2025-07-16 15:04:43.634 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8fbf9956-fee3-4f8a-8915-820fde9acb94] received connection request\n2025-07-16 15:04:43.635 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:04:43.658 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8fbf9956-fee3-4f8a-8915-820fde9acb94] socks forwarding established\n2025-07-16 15:04:43.684 [info] [command][cfd765d6-719c-4031-96e6-1c34e0c92322] Process exited with code 0\n2025-07-16 15:04:43.684 [info] [command][cfd765d6-719c-4031-96e6-1c34e0c92322] Socket close event received\n2025-07-16 15:04:43.686 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8fbf9956-fee3-4f8a-8915-820fde9acb94] socks connection closed\n2025-07-16 15:04:43.709 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64694 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:05:43.689 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:05:43.691 [info] [command][c965247c-26f0-414d-8ef4-1a795d0149ab] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c965247c-26f0-414d-8ef4-1a795d0149ab""}\n2025-07-16 15:05:43.692 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ea2b17a6-feb9-4ab6-8325-244883656b8f] received connection request\n2025-07-16 15:05:43.693 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:05:43.717 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ea2b17a6-feb9-4ab6-8325-244883656b8f] socks forwarding established\n2025-07-16 15:05:43.745 [info] [command][c965247c-26f0-414d-8ef4-1a795d0149ab] Process exited with code 0\n2025-07-16 15:05:43.745 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ea2b17a6-feb9-4ab6-8325-244883656b8f] socks connection closed\n2025-07-16 15:05:43.745 [info] [command][c965247c-26f0-414d-8ef4-1a795d0149ab] Socket close event received\n2025-07-16 15:05:43.769 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64718 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:06:43.748 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:06:43.749 [info] [command][34a60619-43ab-4786-a844-0dcbbd0485e7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""34a60619-43ab-4786-a844-0dcbbd0485e7""}\n2025-07-16 15:06:43.750 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3c665c59-7ca1-4fde-81d7-3168d6a38f36] received connection request\n2025-07-16 15:06:43.750 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:06:43.775 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3c665c59-7ca1-4fde-81d7-3168d6a38f36] socks forwarding established\n2025-07-16 15:06:43.804 [info] [command][34a60619-43ab-4786-a844-0dcbbd0485e7] Process exited with code 0\n2025-07-16 15:06:43.804 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3c665c59-7ca1-4fde-81d7-3168d6a38f36] socks connection closed\n2025-07-16 15:06:43.804 [info] [command][34a60619-43ab-4786-a844-0dcbbd0485e7] Socket close event received\n2025-07-16 15:06:43.828 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64742 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:07:43.810 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:07:43.812 [info] [command][364f738a-08ae-444a-88d2-48bd03dd658f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""364f738a-08ae-444a-88d2-48bd03dd658f""}\n2025-07-16 15:07:43.813 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0e2571c9-f588-482f-899e-a99c95f09356] received connection request\n2025-07-16 15:07:43.813 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:07:43.838 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0e2571c9-f588-482f-899e-a99c95f09356] socks forwarding established\n2025-07-16 15:07:43.867 [info] [command][364f738a-08ae-444a-88d2-48bd03dd658f] Process exited with code 0\n2025-07-16 15:07:43.868 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0e2571c9-f588-482f-899e-a99c95f09356] socks connection closed\n2025-07-16 15:07:43.868 [info] [command][364f738a-08ae-444a-88d2-48bd03dd658f] Socket close event received\n2025-07-16 15:07:43.891 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64801 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:08:43.873 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:08:43.874 [info] [command][64ecb394-4ba5-46f8-9871-2f1a55fd557d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""64ecb394-4ba5-46f8-9871-2f1a55fd557d""}\n2025-07-16 15:08:43.875 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a4c68569-2a60-4021-b1fa-f44fe5be04cc] received connection request\n2025-07-16 15:08:43.875 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:08:43.898 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a4c68569-2a60-4021-b1fa-f44fe5be04cc] socks forwarding established\n2025-07-16 15:08:43.925 [info] [command][64ecb394-4ba5-46f8-9871-2f1a55fd557d] Process exited with code 0\n2025-07-16 15:08:43.925 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a4c68569-2a60-4021-b1fa-f44fe5be04cc] socks connection closed\n2025-07-16 15:08:43.925 [info] [command][64ecb394-4ba5-46f8-9871-2f1a55fd557d] Socket close event received\n2025-07-16 15:08:43.947 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64837 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:09:43.925 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:09:43.926 [info] [command][9d2c723a-66e2-4ab0-88de-3298dff68cdb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9d2c723a-66e2-4ab0-88de-3298dff68cdb""}\n2025-07-16 15:09:43.927 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e12a791a-4c88-465d-abf6-4058f5f134ce] received connection request\n2025-07-16 15:09:43.927 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:09:43.956 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e12a791a-4c88-465d-abf6-4058f5f134ce] socks forwarding established\n2025-07-16 15:09:44.007 [info] [command][9d2c723a-66e2-4ab0-88de-3298dff68cdb] Process exited with code 0\n2025-07-16 15:09:44.008 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e12a791a-4c88-465d-abf6-4058f5f134ce] socks connection closed\n2025-07-16 15:09:44.008 [info] [command][9d2c723a-66e2-4ab0-88de-3298dff68cdb] Socket close event received\n2025-07-16 15:09:44.081 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64892 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:10:44.013 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:10:44.015 [info] [command][223d6b4f-2463-49f8-959c-d56c5ea17126] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""223d6b4f-2463-49f8-959c-d56c5ea17126""}\n2025-07-16 15:10:44.016 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a0db2650-6edb-4761-ac7e-cbb1a015a627] received connection request\n2025-07-16 15:10:44.017 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:10:44.042 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a0db2650-6edb-4761-ac7e-cbb1a015a627] socks forwarding established\n2025-07-16 15:10:44.070 [info] [command][223d6b4f-2463-49f8-959c-d56c5ea17126] Process exited with code 0\n2025-07-16 15:10:44.070 [info] [command][223d6b4f-2463-49f8-959c-d56c5ea17126] Socket close event received\n2025-07-16 15:10:44.070 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a0db2650-6edb-4761-ac7e-cbb1a015a627] socks connection closed\n2025-07-16 15:10:44.093 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64925 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:11:44.075 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:11:44.077 [info] [command][d428a1d1-b70a-44ce-9484-99d04921af2f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d428a1d1-b70a-44ce-9484-99d04921af2f""}\n2025-07-16 15:11:44.078 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][2e2d960c-d390-4d7b-be96-aaf7d20a2147] received connection request\n2025-07-16 15:11:44.079 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:11:44.102 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2e2d960c-d390-4d7b-be96-aaf7d20a2147] socks forwarding established\n2025-07-16 15:11:44.129 [info] [command][d428a1d1-b70a-44ce-9484-99d04921af2f] Process exited with code 0\n2025-07-16 15:11:44.129 [info] [command][d428a1d1-b70a-44ce-9484-99d04921af2f] Socket close event received\n2025-07-16 15:11:44.130 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2e2d960c-d390-4d7b-be96-aaf7d20a2147] socks connection closed\n2025-07-16 15:11:44.153 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 64961 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:12:44.133 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:12:44.134 [info] [command][8197be9b-523e-47f2-b0c6-3bc782c1bc36] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8197be9b-523e-47f2-b0c6-3bc782c1bc36""}\n2025-07-16 15:12:44.134 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c1f2cc84-0287-4000-963c-be476effae54] received connection request\n2025-07-16 15:12:44.135 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:12:44.159 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c1f2cc84-0287-4000-963c-be476effae54] socks forwarding established\n2025-07-16 15:12:44.187 [info] [command][8197be9b-523e-47f2-b0c6-3bc782c1bc36] Process exited with code 0\n2025-07-16 15:12:44.187 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c1f2cc84-0287-4000-963c-be476effae54] socks connection closed\n2025-07-16 15:12:44.187 [info] [command][8197be9b-523e-47f2-b0c6-3bc782c1bc36] Socket close event received\n2025-07-16 15:12:44.211 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65013 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:13:44.192 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:13:44.194 [info] [command][81b21a05-15c5-484a-ba75-6aa1c07599fe] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""81b21a05-15c5-484a-ba75-6aa1c07599fe""}\n2025-07-16 15:13:44.195 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6a6a5e0c-43b7-4265-a572-e0eedcb745ad] received connection request\n2025-07-16 15:13:44.196 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:13:44.220 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6a6a5e0c-43b7-4265-a572-e0eedcb745ad] socks forwarding established\n2025-07-16 15:13:44.246 [info] [command][81b21a05-15c5-484a-ba75-6aa1c07599fe] Process exited with code 0\n2025-07-16 15:13:44.246 [info] [command][81b21a05-15c5-484a-ba75-6aa1c07599fe] Socket close event received\n2025-07-16 15:13:44.246 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6a6a5e0c-43b7-4265-a572-e0eedcb745ad] socks connection closed\n2025-07-16 15:13:44.269 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65043 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:14:44.249 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:14:44.251 [info] [command][bb21d218-4e8b-4b33-a5ea-7eb18352f69d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""bb21d218-4e8b-4b33-a5ea-7eb18352f69d""}\n2025-07-16 15:14:44.251 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][db871ae2-cdca-4c10-a836-f3c56d3b59d2] received connection request\n2025-07-16 15:14:44.252 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:14:44.277 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][db871ae2-cdca-4c10-a836-f3c56d3b59d2] socks forwarding established\n2025-07-16 15:14:44.305 [info] [command][bb21d218-4e8b-4b33-a5ea-7eb18352f69d] Process exited with code 0\n2025-07-16 15:14:44.306 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][db871ae2-cdca-4c10-a836-f3c56d3b59d2] socks connection closed\n2025-07-16 15:14:44.306 [info] [command][bb21d218-4e8b-4b33-a5ea-7eb18352f69d] Socket close event received\n2025-07-16 15:14:44.330 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65101 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:15:44.312 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:15:44.314 [info] [command][5ba3cc04-c82c-4f68-8505-c29a3ea28175] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5ba3cc04-c82c-4f68-8505-c29a3ea28175""}\n2025-07-16 15:15:44.314 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3d1d9c7d-50bb-4d9e-a903-8c81b46b26c1] received connection request\n2025-07-16 15:15:44.315 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:15:44.339 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3d1d9c7d-50bb-4d9e-a903-8c81b46b26c1] socks forwarding established\n2025-07-16 15:15:44.389 [info] [command][5ba3cc04-c82c-4f68-8505-c29a3ea28175] Process exited with code 0\n2025-07-16 15:15:44.390 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3d1d9c7d-50bb-4d9e-a903-8c81b46b26c1] socks connection closed\n2025-07-16 15:15:44.390 [info] [command][5ba3cc04-c82c-4f68-8505-c29a3ea28175] Socket close event received\n2025-07-16 15:15:44.413 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65135 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:16:44.395 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:16:44.396 [info] [command][d1f99d27-28ae-4ad6-b34f-c25474e33782] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d1f99d27-28ae-4ad6-b34f-c25474e33782""}\n2025-07-16 15:16:44.397 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7a69be06-7721-4908-a03b-39f438d393f8] received connection request\n2025-07-16 15:16:44.397 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:16:44.422 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7a69be06-7721-4908-a03b-39f438d393f8] socks forwarding established\n2025-07-16 15:16:44.448 [info] [command][d1f99d27-28ae-4ad6-b34f-c25474e33782] Process exited with code 0\n2025-07-16 15:16:44.448 [info] [command][d1f99d27-28ae-4ad6-b34f-c25474e33782] Socket close event received\n2025-07-16 15:16:44.470 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7a69be06-7721-4908-a03b-39f438d393f8] socks connection closed\n2025-07-16 15:16:44.470 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65160 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:17:44.454 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:17:44.455 [info] [command][10d117a1-62be-4b70-a906-7b416d60b147] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""10d117a1-62be-4b70-a906-7b416d60b147""}\n2025-07-16 15:17:44.456 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][49ed5826-44b3-4bcf-9677-3cf3213dfc9a] received connection request\n2025-07-16 15:17:44.457 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:17:44.483 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][49ed5826-44b3-4bcf-9677-3cf3213dfc9a] socks forwarding established\n2025-07-16 15:17:44.510 [info] [command][10d117a1-62be-4b70-a906-7b416d60b147] Process exited with code 0\n2025-07-16 15:17:44.510 [info] [command][10d117a1-62be-4b70-a906-7b416d60b147] Socket close event received\n2025-07-16 15:17:44.532 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][49ed5826-44b3-4bcf-9677-3cf3213dfc9a] socks connection closed\n2025-07-16 15:17:44.533 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65213 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:18:44.513 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:18:44.515 [info] [command][b785f20f-23d3-45b4-b660-3e480041f8d8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b785f20f-23d3-45b4-b660-3e480041f8d8""}\n2025-07-16 15:18:44.516 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d9fdd0a3-ad8f-4d48-92d7-0ed9f0f6df01] received connection request\n2025-07-16 15:18:44.516 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:18:44.542 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d9fdd0a3-ad8f-4d48-92d7-0ed9f0f6df01] socks forwarding established\n2025-07-16 15:18:44.570 [info] [command][b785f20f-23d3-45b4-b660-3e480041f8d8] Process exited with code 0\n2025-07-16 15:18:44.570 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d9fdd0a3-ad8f-4d48-92d7-0ed9f0f6df01] socks connection closed\n2025-07-16 15:18:44.570 [info] [command][b785f20f-23d3-45b4-b660-3e480041f8d8] Socket close event received\n2025-07-16 15:18:44.594 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65239 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:19:44.575 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:19:44.577 [info] [command][6a7078c3-cd6c-4839-8cdf-17cd68e1cf47] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6a7078c3-cd6c-4839-8cdf-17cd68e1cf47""}\n2025-07-16 15:19:44.578 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b059ba6c-f53e-4941-98fc-2ab62b0d1197] received connection request\n2025-07-16 15:19:44.578 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:19:44.604 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b059ba6c-f53e-4941-98fc-2ab62b0d1197] socks forwarding established\n2025-07-16 15:19:44.632 [info] [command][6a7078c3-cd6c-4839-8cdf-17cd68e1cf47] Process exited with code 0\n2025-07-16 15:19:44.632 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b059ba6c-f53e-4941-98fc-2ab62b0d1197] socks connection closed\n2025-07-16 15:19:44.633 [info] [command][6a7078c3-cd6c-4839-8cdf-17cd68e1cf47] Socket close event received\n2025-07-16 15:19:44.656 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65294 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:20:44.638 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:20:44.640 [info] [command][3e32f401-e19f-43f3-84b5-598352891cdb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3e32f401-e19f-43f3-84b5-598352891cdb""}\n2025-07-16 15:20:44.640 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b082f0ef-4e0b-43fe-807d-f967abab9462] received connection request\n2025-07-16 15:20:44.641 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:20:44.664 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b082f0ef-4e0b-43fe-807d-f967abab9462] socks forwarding established\n2025-07-16 15:20:44.690 [info] [command][3e32f401-e19f-43f3-84b5-598352891cdb] Process exited with code 0\n2025-07-16 15:20:44.691 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b082f0ef-4e0b-43fe-807d-f967abab9462] socks connection closed\n2025-07-16 15:20:44.691 [info] [command][3e32f401-e19f-43f3-84b5-598352891cdb] Socket close event received\n2025-07-16 15:20:44.714 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65342 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:21:44.693 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:21:44.696 [info] [command][758c59bd-4f3f-46bc-bae9-443af17a137d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""758c59bd-4f3f-46bc-bae9-443af17a137d""}\n2025-07-16 15:21:44.696 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ffebbf60-4fac-44cc-867f-df7745fd9a7f] received connection request\n2025-07-16 15:21:44.697 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:21:44.721 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ffebbf60-4fac-44cc-867f-df7745fd9a7f] socks forwarding established\n2025-07-16 15:21:44.750 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ffebbf60-4fac-44cc-867f-df7745fd9a7f] socks connection closed\n2025-07-16 15:21:44.750 [info] [command][758c59bd-4f3f-46bc-bae9-443af17a137d] Process exited with code 0\n2025-07-16 15:21:44.751 [info] [command][758c59bd-4f3f-46bc-bae9-443af17a137d] Socket close event received\n2025-07-16 15:21:44.774 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65367 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:22:44.755 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:22:44.757 [info] [command][b16c568f-07a1-4fab-a3a2-928b1927b6f3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b16c568f-07a1-4fab-a3a2-928b1927b6f3""}\n2025-07-16 15:22:44.757 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1f7b210f-47ea-49d7-958f-bf222eb8c85d] received connection request\n2025-07-16 15:22:44.758 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:22:44.782 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1f7b210f-47ea-49d7-958f-bf222eb8c85d] socks forwarding established\n2025-07-16 15:22:44.808 [info] [command][b16c568f-07a1-4fab-a3a2-928b1927b6f3] Process exited with code 0\n2025-07-16 15:22:44.808 [info] [command][b16c568f-07a1-4fab-a3a2-928b1927b6f3] Socket close event received\n2025-07-16 15:22:44.808 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1f7b210f-47ea-49d7-958f-bf222eb8c85d] socks connection closed\n2025-07-16 15:22:44.840 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65427 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:23:44.814 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:23:44.816 [info] [command][09847975-c15a-4686-af4a-d6cc8b53c154] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""09847975-c15a-4686-af4a-d6cc8b53c154""}\n2025-07-16 15:23:44.816 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b04ca92f-4a14-45f6-8b55-5e5a3354a160] received connection request\n2025-07-16 15:23:44.817 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:23:44.842 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b04ca92f-4a14-45f6-8b55-5e5a3354a160] socks forwarding established\n2025-07-16 15:23:44.869 [info] [command][09847975-c15a-4686-af4a-d6cc8b53c154] Process exited with code 0\n2025-07-16 15:23:44.869 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b04ca92f-4a14-45f6-8b55-5e5a3354a160] socks connection closed\n2025-07-16 15:23:44.870 [info] [command][09847975-c15a-4686-af4a-d6cc8b53c154] Socket close event received\n2025-07-16 15:23:44.893 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65456 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:24:44.874 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:24:44.877 [info] [command][dae8c3ac-3c2f-40eb-af52-caae128e9c9e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""dae8c3ac-3c2f-40eb-af52-caae128e9c9e""}\n2025-07-16 15:24:44.877 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][dcce0a95-26cb-498b-9d37-caee095afe75] received connection request\n2025-07-16 15:24:44.878 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:24:44.904 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][dcce0a95-26cb-498b-9d37-caee095afe75] socks forwarding established\n2025-07-16 15:24:44.933 [info] [command][dae8c3ac-3c2f-40eb-af52-caae128e9c9e] Process exited with code 0\n2025-07-16 15:24:44.933 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][dcce0a95-26cb-498b-9d37-caee095afe75] socks connection closed\n2025-07-16 15:24:44.933 [info] [command][dae8c3ac-3c2f-40eb-af52-caae128e9c9e] Socket close event received\n2025-07-16 15:24:44.957 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 65529 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:25:44.936 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:25:44.938 [info] [command][d9a6f9dd-9d8b-45a8-8a61-e0e1473d5164] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d9a6f9dd-9d8b-45a8-8a61-e0e1473d5164""}\n2025-07-16 15:25:44.939 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][2da3e435-4520-4e77-8544-31d5f6388977] received connection request\n2025-07-16 15:25:44.940 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:25:44.967 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2da3e435-4520-4e77-8544-31d5f6388977] socks forwarding established\n2025-07-16 15:25:44.993 [info] [command][d9a6f9dd-9d8b-45a8-8a61-e0e1473d5164] Process exited with code 0\n2025-07-16 15:25:44.993 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2da3e435-4520-4e77-8544-31d5f6388977] socks connection closed\n2025-07-16 15:25:44.993 [info] [command][d9a6f9dd-9d8b-45a8-8a61-e0e1473d5164] Socket close event received\n2025-07-16 15:25:45.019 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49176 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:26:44.998 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:26:45.000 [info] [command][9a0d2cdd-8f4a-4075-a2fe-b79d4411668f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9a0d2cdd-8f4a-4075-a2fe-b79d4411668f""}\n2025-07-16 15:26:45.001 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c9d1bad7-6f21-44d8-8265-a9a25e2d290a] received connection request\n2025-07-16 15:26:45.001 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:26:45.024 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c9d1bad7-6f21-44d8-8265-a9a25e2d290a] socks forwarding established\n2025-07-16 15:26:45.051 [info] [command][9a0d2cdd-8f4a-4075-a2fe-b79d4411668f] Process exited with code 0\n2025-07-16 15:26:45.051 [info] [command][9a0d2cdd-8f4a-4075-a2fe-b79d4411668f] Socket close event received\n2025-07-16 15:26:45.051 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c9d1bad7-6f21-44d8-8265-a9a25e2d290a] socks connection closed\n2025-07-16 15:26:45.073 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49222 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:27:45.057 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:27:45.059 [info] [command][fa7fe5a7-bf3b-4c69-bc1f-2584fc593638] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""fa7fe5a7-bf3b-4c69-bc1f-2584fc593638""}\n2025-07-16 15:27:45.060 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d766450d-f0fc-4968-9065-a73c07a461ce] received connection request\n2025-07-16 15:27:45.061 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:27:45.085 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d766450d-f0fc-4968-9065-a73c07a461ce] socks forwarding established\n2025-07-16 15:27:45.114 [info] [command][fa7fe5a7-bf3b-4c69-bc1f-2584fc593638] Process exited with code 0\n2025-07-16 15:27:45.114 [info] [command][fa7fe5a7-bf3b-4c69-bc1f-2584fc593638] Socket close event received\n2025-07-16 15:27:45.115 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d766450d-f0fc-4968-9065-a73c07a461ce] socks connection closed\n2025-07-16 15:27:45.139 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49289 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:28:45.119 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:28:45.122 [info] [command][2f0ac320-00b2-4f0f-916e-dbf5e3e5f110] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2f0ac320-00b2-4f0f-916e-dbf5e3e5f110""}\n2025-07-16 15:28:45.122 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][23ac3589-e604-4a08-8966-8fdc999aed0a] received connection request\n2025-07-16 15:28:45.123 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:28:45.150 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][23ac3589-e604-4a08-8966-8fdc999aed0a] socks forwarding established\n2025-07-16 15:28:45.176 [info] [command][2f0ac320-00b2-4f0f-916e-dbf5e3e5f110] Process exited with code 0\n2025-07-16 15:28:45.176 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][23ac3589-e604-4a08-8966-8fdc999aed0a] socks connection closed\n2025-07-16 15:28:45.177 [info] [command][2f0ac320-00b2-4f0f-916e-dbf5e3e5f110] Socket close event received\n2025-07-16 15:28:45.200 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49320 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:29:45.177 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:29:45.179 [info] [command][0f84d409-87d4-40ba-a3fb-e9f7966ec000] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""0f84d409-87d4-40ba-a3fb-e9f7966ec000""}\n2025-07-16 15:29:45.179 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ad79001d-2c77-4b93-a445-a504fd9c5ce4] received connection request\n2025-07-16 15:29:45.180 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 15:29:45.180 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:29:45.203 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ad79001d-2c77-4b93-a445-a504fd9c5ce4] socks forwarding established\n2025-07-16 15:29:45.230 [info] [command][0f84d409-87d4-40ba-a3fb-e9f7966ec000] Process exited with code 0\n2025-07-16 15:29:45.230 [info] [command][0f84d409-87d4-40ba-a3fb-e9f7966ec000] Socket close event received\n2025-07-16 15:29:45.230 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ad79001d-2c77-4b93-a445-a504fd9c5ce4] socks connection closed\n2025-07-16 15:29:45.252 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49386 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:30:45.231 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:30:45.234 [info] [command][aee93ea2-f099-47ef-826d-5659be4cb53d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""aee93ea2-f099-47ef-826d-5659be4cb53d""}\n2025-07-16 15:30:45.235 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][49bee78f-5644-47a6-9e64-2ab30551e254] received connection request\n2025-07-16 15:30:45.235 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:30:45.260 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][49bee78f-5644-47a6-9e64-2ab30551e254] socks forwarding established\n2025-07-16 15:30:45.290 [info] [command][aee93ea2-f099-47ef-826d-5659be4cb53d] Process exited with code 0\n2025-07-16 15:30:45.290 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][49bee78f-5644-47a6-9e64-2ab30551e254] socks connection closed\n2025-07-16 15:30:45.290 [info] [command][aee93ea2-f099-47ef-826d-5659be4cb53d] Socket close event received\n2025-07-16 15:30:45.314 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49432 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:31:45.292 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:31:45.294 [info] [command][7ec1c270-4f35-46d9-9843-d9af94f2142a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7ec1c270-4f35-46d9-9843-d9af94f2142a""}\n2025-07-16 15:31:45.295 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a52825fe-dc83-4004-8b09-f65d017e7445] received connection request\n2025-07-16 15:31:45.296 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:31:45.320 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a52825fe-dc83-4004-8b09-f65d017e7445] socks forwarding established\n2025-07-16 15:31:45.348 [info] [command][7ec1c270-4f35-46d9-9843-d9af94f2142a] Process exited with code 0\n2025-07-16 15:31:45.348 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a52825fe-dc83-4004-8b09-f65d017e7445] socks connection closed\n2025-07-16 15:31:45.348 [info] [command][7ec1c270-4f35-46d9-9843-d9af94f2142a] Socket close event received\n2025-07-16 15:31:45.374 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49474 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:32:45.353 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:32:45.353 [info] [command][3e8f5ff0-01d9-49fa-a647-927381de81e7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3e8f5ff0-01d9-49fa-a647-927381de81e7""}\n2025-07-16 15:32:45.354 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][19242e24-a288-46db-8469-0c074d4a3858] received connection request\n2025-07-16 15:32:45.354 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:32:45.377 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][19242e24-a288-46db-8469-0c074d4a3858] socks forwarding established\n2025-07-16 15:32:45.402 [info] [command][3e8f5ff0-01d9-49fa-a647-927381de81e7] Process exited with code 0\n2025-07-16 15:32:45.402 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][19242e24-a288-46db-8469-0c074d4a3858] socks connection closed\n2025-07-16 15:32:45.402 [info] [command][3e8f5ff0-01d9-49fa-a647-927381de81e7] Socket close event received\n2025-07-16 15:32:45.425 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49553 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:33:45.405 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:33:45.406 [info] [command][17581265-9ee2-4394-bf3c-25b3c8729ec2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""17581265-9ee2-4394-bf3c-25b3c8729ec2""}\n2025-07-16 15:33:45.408 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][fff5b5b4-e545-4250-9750-0ae13522cdc3] received connection request\n2025-07-16 15:33:45.409 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 15:33:45.410 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:33:45.434 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fff5b5b4-e545-4250-9750-0ae13522cdc3] socks forwarding established\n2025-07-16 15:33:45.462 [info] [command][17581265-9ee2-4394-bf3c-25b3c8729ec2] Process exited with code 0\n2025-07-16 15:33:45.462 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fff5b5b4-e545-4250-9750-0ae13522cdc3] socks connection closed\n2025-07-16 15:33:45.462 [info] [command][17581265-9ee2-4394-bf3c-25b3c8729ec2] Socket close event received\n2025-07-16 15:33:45.485 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49584 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:34:45.468 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:34:45.470 [info] [command][dd43c762-d8b6-4db2-88d6-defd2fa624aa] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""dd43c762-d8b6-4db2-88d6-defd2fa624aa""}\n2025-07-16 15:34:45.471 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6afe291e-aa3d-45f9-b1ea-595c5c62ed6b] received connection request\n2025-07-16 15:34:45.471 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:34:45.496 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6afe291e-aa3d-45f9-b1ea-595c5c62ed6b] socks forwarding established\n2025-07-16 15:34:45.522 [info] [command][dd43c762-d8b6-4db2-88d6-defd2fa624aa] Process exited with code 0\n2025-07-16 15:34:45.522 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6afe291e-aa3d-45f9-b1ea-595c5c62ed6b] socks connection closed\n2025-07-16 15:34:45.522 [info] [command][dd43c762-d8b6-4db2-88d6-defd2fa624aa] Socket close event received\n2025-07-16 15:34:45.546 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49634 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:35:45.524 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:35:45.525 [info] [command][ac90bdb9-35cc-4bc8-9b4a-c7d53bc83a5d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ac90bdb9-35cc-4bc8-9b4a-c7d53bc83a5d""}\n2025-07-16 15:35:45.526 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][853d41e4-464d-4be5-902c-525429c9c30f] received connection request\n2025-07-16 15:35:45.526 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 15:35:45.526 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:35:45.549 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][853d41e4-464d-4be5-902c-525429c9c30f] socks forwarding established\n2025-07-16 15:35:45.574 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][853d41e4-464d-4be5-902c-525429c9c30f] socks connection closed\n2025-07-16 15:35:45.574 [info] [command][ac90bdb9-35cc-4bc8-9b4a-c7d53bc83a5d] Process exited with code 0\n2025-07-16 15:35:45.574 [info] [command][ac90bdb9-35cc-4bc8-9b4a-c7d53bc83a5d] Socket close event received\n2025-07-16 15:35:45.597 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49675 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:36:45.579 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:36:45.581 [info] [command][ff0f096f-4b4a-41f5-b765-4826edd3fce2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ff0f096f-4b4a-41f5-b765-4826edd3fce2""}\n2025-07-16 15:36:45.582 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d51a749d-e429-43f1-91e2-1740eb68f3a7] received connection request\n2025-07-16 15:36:45.583 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:36:45.607 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d51a749d-e429-43f1-91e2-1740eb68f3a7] socks forwarding established\n2025-07-16 15:36:45.635 [info] [command][ff0f096f-4b4a-41f5-b765-4826edd3fce2] Process exited with code 0\n2025-07-16 15:36:45.635 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d51a749d-e429-43f1-91e2-1740eb68f3a7] socks connection closed\n2025-07-16 15:36:45.635 [info] [command][ff0f096f-4b4a-41f5-b765-4826edd3fce2] Socket close event received\n2025-07-16 15:36:45.660 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49724 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:37:45.636 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:37:45.638 [info] [command][9cc766d6-b35f-4127-b13b-23afe4310b09] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9cc766d6-b35f-4127-b13b-23afe4310b09""}\n2025-07-16 15:37:45.639 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8bcb6718-98ca-4358-9942-f1adfe3ed603] received connection request\n2025-07-16 15:37:45.639 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:37:45.664 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8bcb6718-98ca-4358-9942-f1adfe3ed603] socks forwarding established\n2025-07-16 15:37:45.690 [info] [command][9cc766d6-b35f-4127-b13b-23afe4310b09] Process exited with code 0\n2025-07-16 15:37:45.690 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8bcb6718-98ca-4358-9942-f1adfe3ed603] socks connection closed\n2025-07-16 15:37:45.690 [info] [command][9cc766d6-b35f-4127-b13b-23afe4310b09] Socket close event received\n2025-07-16 15:37:45.712 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49775 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:38:45.693 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:38:45.695 [info] [command][58d617e1-09cf-4b8a-bb47-4d2855b28035] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""58d617e1-09cf-4b8a-bb47-4d2855b28035""}\n2025-07-16 15:38:45.696 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d128bfd8-b143-48b1-a016-19636d83a8a8] received connection request\n2025-07-16 15:38:45.696 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:38:45.720 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d128bfd8-b143-48b1-a016-19636d83a8a8] socks forwarding established\n2025-07-16 15:38:45.746 [info] [command][58d617e1-09cf-4b8a-bb47-4d2855b28035] Process exited with code 0\n2025-07-16 15:38:45.746 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d128bfd8-b143-48b1-a016-19636d83a8a8] socks connection closed\n2025-07-16 15:38:45.746 [info] [command][58d617e1-09cf-4b8a-bb47-4d2855b28035] Socket close event received\n2025-07-16 15:38:45.769 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49801 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:39:45.753 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:39:45.754 [info] [command][c0db0a81-7694-49d2-9cce-759c42a3ab8b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c0db0a81-7694-49d2-9cce-759c42a3ab8b""}\n2025-07-16 15:39:45.755 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][fb442efb-27a2-43fb-97de-01044e64e44c] received connection request\n2025-07-16 15:39:45.755 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:39:45.781 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fb442efb-27a2-43fb-97de-01044e64e44c] socks forwarding established\n2025-07-16 15:39:45.808 [info] [command][c0db0a81-7694-49d2-9cce-759c42a3ab8b] Process exited with code 0\n2025-07-16 15:39:45.809 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fb442efb-27a2-43fb-97de-01044e64e44c] socks connection closed\n2025-07-16 15:39:45.809 [info] [command][c0db0a81-7694-49d2-9cce-759c42a3ab8b] Socket close event received\n2025-07-16 15:39:45.831 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49875 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:40:45.810 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:40:45.812 [info] [command][aa686890-192c-4d3e-8c3e-3545ee549a43] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""aa686890-192c-4d3e-8c3e-3545ee549a43""}\n2025-07-16 15:40:45.812 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3e707de1-1ba1-4027-a10b-18547890d3b3] received connection request\n2025-07-16 15:40:45.813 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:40:45.839 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3e707de1-1ba1-4027-a10b-18547890d3b3] socks forwarding established\n2025-07-16 15:40:45.867 [info] [command][aa686890-192c-4d3e-8c3e-3545ee549a43] Process exited with code 0\n2025-07-16 15:40:45.867 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3e707de1-1ba1-4027-a10b-18547890d3b3] socks connection closed\n2025-07-16 15:40:45.867 [info] [command][aa686890-192c-4d3e-8c3e-3545ee549a43] Socket close event received\n2025-07-16 15:40:45.891 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49912 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:41:45.872 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:41:45.874 [info] [command][ed1406ac-d8e8-41f0-b162-625f4599d070] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ed1406ac-d8e8-41f0-b162-625f4599d070""}\n2025-07-16 15:41:45.874 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4b8c5c0d-0201-4e06-bfdc-cac201a361d0] received connection request\n2025-07-16 15:41:45.875 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:41:45.899 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4b8c5c0d-0201-4e06-bfdc-cac201a361d0] socks forwarding established\n2025-07-16 15:41:45.926 [info] [command][ed1406ac-d8e8-41f0-b162-625f4599d070] Process exited with code 0\n2025-07-16 15:41:45.926 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4b8c5c0d-0201-4e06-bfdc-cac201a361d0] socks connection closed\n2025-07-16 15:41:45.926 [info] [command][ed1406ac-d8e8-41f0-b162-625f4599d070] Socket close event received\n2025-07-16 15:41:45.949 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49938 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:42:45.931 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:42:45.934 [info] [command][20f739b9-c360-4d89-a90c-fb1c8b554050] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""20f739b9-c360-4d89-a90c-fb1c8b554050""}\n2025-07-16 15:42:45.934 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f464204c-e4d0-4528-b7b9-a01b3795a796] received connection request\n2025-07-16 15:42:45.934 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:42:45.958 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f464204c-e4d0-4528-b7b9-a01b3795a796] socks forwarding established\n2025-07-16 15:42:45.984 [info] [command][20f739b9-c360-4d89-a90c-fb1c8b554050] Process exited with code 0\n2025-07-16 15:42:45.985 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f464204c-e4d0-4528-b7b9-a01b3795a796] socks connection closed\n2025-07-16 15:42:45.985 [info] [command][20f739b9-c360-4d89-a90c-fb1c8b554050] Socket close event received\n2025-07-16 15:42:46.008 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 49990 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:43:45.990 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:43:45.994 [info] [command][815521e9-2dc9-4748-9374-8049f1c4ce0b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""815521e9-2dc9-4748-9374-8049f1c4ce0b""}\n2025-07-16 15:43:45.994 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f1868046-84be-4288-b624-59232dc020f3] received connection request\n2025-07-16 15:43:45.995 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:43:46.019 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f1868046-84be-4288-b624-59232dc020f3] socks forwarding established\n2025-07-16 15:43:46.046 [info] [command][815521e9-2dc9-4748-9374-8049f1c4ce0b] Process exited with code 0\n2025-07-16 15:43:46.047 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f1868046-84be-4288-b624-59232dc020f3] socks connection closed\n2025-07-16 15:43:46.047 [info] [command][815521e9-2dc9-4748-9374-8049f1c4ce0b] Socket close event received\n2025-07-16 15:43:46.070 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50014 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:44:46.048 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:44:46.050 [info] [command][5d830812-4fe5-4d22-9337-9395b3b3a9f2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5d830812-4fe5-4d22-9337-9395b3b3a9f2""}\n2025-07-16 15:44:46.051 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][aae7e85c-55ff-4d5c-bf5b-c70e5af03778] received connection request\n2025-07-16 15:44:46.051 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:44:46.076 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][aae7e85c-55ff-4d5c-bf5b-c70e5af03778] socks forwarding established\n2025-07-16 15:44:46.102 [info] [command][5d830812-4fe5-4d22-9337-9395b3b3a9f2] Process exited with code 0\n2025-07-16 15:44:46.102 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][aae7e85c-55ff-4d5c-bf5b-c70e5af03778] socks connection closed\n2025-07-16 15:44:46.102 [info] [command][5d830812-4fe5-4d22-9337-9395b3b3a9f2] Socket close event received\n2025-07-16 15:44:46.125 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50058 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:45:46.106 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:45:46.107 [info] [command][d46646da-2ef1-465b-952e-0522ff036ba3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d46646da-2ef1-465b-952e-0522ff036ba3""}\n2025-07-16 15:45:46.108 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f08eba75-c939-4ba4-81ae-a740e606c990] received connection request\n2025-07-16 15:45:46.108 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:45:46.132 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f08eba75-c939-4ba4-81ae-a740e606c990] socks forwarding established\n2025-07-16 15:45:46.157 [info] [command][d46646da-2ef1-465b-952e-0522ff036ba3] Process exited with code 0\n2025-07-16 15:45:46.158 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f08eba75-c939-4ba4-81ae-a740e606c990] socks connection closed\n2025-07-16 15:45:46.158 [info] [command][d46646da-2ef1-465b-952e-0522ff036ba3] Socket close event received\n2025-07-16 15:45:46.182 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50099 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:46:46.161 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:46:46.163 [info] [command][1f1c75cb-cd38-4adf-b106-5dcd22ffebba] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1f1c75cb-cd38-4adf-b106-5dcd22ffebba""}\n2025-07-16 15:46:46.164 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][06c7db7d-0092-479f-81b7-bc94dc29bc6b] received connection request\n2025-07-16 15:46:46.165 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:46:46.190 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][06c7db7d-0092-479f-81b7-bc94dc29bc6b] socks forwarding established\n2025-07-16 15:46:46.243 [info] [command][1f1c75cb-cd38-4adf-b106-5dcd22ffebba] Process exited with code 0\n2025-07-16 15:46:46.243 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][06c7db7d-0092-479f-81b7-bc94dc29bc6b] socks connection closed\n2025-07-16 15:46:46.244 [info] [command][1f1c75cb-cd38-4adf-b106-5dcd22ffebba] Socket close event received\n2025-07-16 15:46:46.267 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50136 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:47:46.246 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:47:46.249 [info] [command][ae6dbe30-3665-43d0-9bf6-4172591a524f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ae6dbe30-3665-43d0-9bf6-4172591a524f""}\n2025-07-16 15:47:46.249 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][81319aef-840d-4f87-8432-9bf25af28047] received connection request\n2025-07-16 15:47:46.250 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:47:46.274 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][81319aef-840d-4f87-8432-9bf25af28047] socks forwarding established\n2025-07-16 15:47:46.300 [info] [command][ae6dbe30-3665-43d0-9bf6-4172591a524f] Process exited with code 0\n2025-07-16 15:47:46.301 [info] [command][ae6dbe30-3665-43d0-9bf6-4172591a524f] Socket close event received\n2025-07-16 15:47:46.324 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][81319aef-840d-4f87-8432-9bf25af28047] socks connection closed\n2025-07-16 15:47:46.325 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50186 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:48:46.306 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:48:46.308 [info] [command][c3d6b345-2267-4a32-8bfb-1c00cddc9b5f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c3d6b345-2267-4a32-8bfb-1c00cddc9b5f""}\n2025-07-16 15:48:46.308 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5e11d905-d1d9-42a4-abfd-5ac222878a80] received connection request\n2025-07-16 15:48:46.308 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:48:46.332 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5e11d905-d1d9-42a4-abfd-5ac222878a80] socks forwarding established\n2025-07-16 15:48:46.358 [info] [command][c3d6b345-2267-4a32-8bfb-1c00cddc9b5f] Process exited with code 0\n2025-07-16 15:48:46.358 [info] [command][c3d6b345-2267-4a32-8bfb-1c00cddc9b5f] Socket close event received\n2025-07-16 15:48:46.380 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5e11d905-d1d9-42a4-abfd-5ac222878a80] socks connection closed\n2025-07-16 15:48:46.382 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50215 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:49:46.359 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:49:46.361 [info] [command][2a62772a-11f8-495e-b233-9e6af9c8223d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2a62772a-11f8-495e-b233-9e6af9c8223d""}\n2025-07-16 15:49:46.362 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][54546088-3705-42a1-a7f5-481770c8ed75] received connection request\n2025-07-16 15:49:46.362 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:49:46.386 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][54546088-3705-42a1-a7f5-481770c8ed75] socks forwarding established\n2025-07-16 15:49:46.412 [info] [command][2a62772a-11f8-495e-b233-9e6af9c8223d] Process exited with code 0\n2025-07-16 15:49:46.412 [info] [command][2a62772a-11f8-495e-b233-9e6af9c8223d] Socket close event received\n2025-07-16 15:49:46.413 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][54546088-3705-42a1-a7f5-481770c8ed75] socks connection closed\n2025-07-16 15:49:46.437 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50262 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:50:46.415 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:50:46.418 [info] [command][1a2b3ce3-a073-4a12-99e6-15470db36dcb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1a2b3ce3-a073-4a12-99e6-15470db36dcb""}\n2025-07-16 15:50:46.418 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4f8c2177-ee17-49fa-b324-0da523faedef] received connection request\n2025-07-16 15:50:46.419 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:50:46.576 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4f8c2177-ee17-49fa-b324-0da523faedef] socks forwarding established\n2025-07-16 15:50:46.737 [info] [command][1a2b3ce3-a073-4a12-99e6-15470db36dcb] Process exited with code 0\n2025-07-16 15:50:46.737 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4f8c2177-ee17-49fa-b324-0da523faedef] socks connection closed\n2025-07-16 15:50:46.738 [info] [command][1a2b3ce3-a073-4a12-99e6-15470db36dcb] Socket close event received\n2025-07-16 15:50:46.763 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50309 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:51:46.742 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:51:46.744 [info] [command][5eec7fba-5433-46fd-bd86-8ae0e2c9f7c6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5eec7fba-5433-46fd-bd86-8ae0e2c9f7c6""}\n2025-07-16 15:51:46.745 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f47698f7-d478-432a-ba56-51b091ec8b5d] received connection request\n2025-07-16 15:51:46.745 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:51:46.792 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f47698f7-d478-432a-ba56-51b091ec8b5d] socks forwarding established\n2025-07-16 15:51:46.902 [info] [command][5eec7fba-5433-46fd-bd86-8ae0e2c9f7c6] Process exited with code 0\n2025-07-16 15:51:46.903 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f47698f7-d478-432a-ba56-51b091ec8b5d] socks connection closed\n2025-07-16 15:51:46.903 [info] [command][5eec7fba-5433-46fd-bd86-8ae0e2c9f7c6] Socket close event received\n2025-07-16 15:51:46.932 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50343 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:52:46.906 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:52:46.908 [info] [command][d80c5aba-bff1-4c7c-901c-ae9864e86c29] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d80c5aba-bff1-4c7c-901c-ae9864e86c29""}\n2025-07-16 15:52:46.908 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8dec49b0-b3f4-4ca3-84a9-2c988c66f53d] received connection request\n2025-07-16 15:52:46.909 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:52:46.979 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8dec49b0-b3f4-4ca3-84a9-2c988c66f53d] socks forwarding established\n2025-07-16 15:52:47.086 [info] [command][d80c5aba-bff1-4c7c-901c-ae9864e86c29] Process exited with code 0\n2025-07-16 15:52:47.086 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8dec49b0-b3f4-4ca3-84a9-2c988c66f53d] socks connection closed\n2025-07-16 15:52:47.087 [info] [command][d80c5aba-bff1-4c7c-901c-ae9864e86c29] Socket close event received\n2025-07-16 15:52:47.110 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50395 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:53:47.087 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:53:47.089 [info] [command][3ff56e6b-3b94-4d0c-a61b-bf09396d0c0c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3ff56e6b-3b94-4d0c-a61b-bf09396d0c0c""}\n2025-07-16 15:53:47.090 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][cd22af66-928f-4a16-880c-1e1924b7558b] received connection request\n2025-07-16 15:53:47.090 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:53:47.115 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cd22af66-928f-4a16-880c-1e1924b7558b] socks forwarding established\n2025-07-16 15:53:47.142 [info] [command][3ff56e6b-3b94-4d0c-a61b-bf09396d0c0c] Process exited with code 0\n2025-07-16 15:53:47.143 [info] [command][3ff56e6b-3b94-4d0c-a61b-bf09396d0c0c] Socket close event received\n2025-07-16 15:53:47.143 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cd22af66-928f-4a16-880c-1e1924b7558b] socks connection closed\n2025-07-16 15:53:47.291 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50421 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:54:47.150 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:54:47.152 [info] [command][50cb9db7-b4b0-4154-9514-773e8e4c4494] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""50cb9db7-b4b0-4154-9514-773e8e4c4494""}\n2025-07-16 15:54:47.153 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ecb9c209-f075-490d-b637-00c880519523] received connection request\n2025-07-16 15:54:47.154 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:54:47.296 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ecb9c209-f075-490d-b637-00c880519523] socks forwarding established\n2025-07-16 15:54:47.330 [info] [command][50cb9db7-b4b0-4154-9514-773e8e4c4494] Process exited with code 0\n2025-07-16 15:54:47.330 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ecb9c209-f075-490d-b637-00c880519523] socks connection closed\n2025-07-16 15:54:47.330 [info] [command][50cb9db7-b4b0-4154-9514-773e8e4c4494] Socket close event received\n2025-07-16 15:54:47.356 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50471 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:55:47.331 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:55:47.334 [info] [command][cf92ecb2-f60d-41ed-be47-7f25ebc50211] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""cf92ecb2-f60d-41ed-be47-7f25ebc50211""}\n2025-07-16 15:55:47.335 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][abd0c8ec-5004-4b59-b53f-24c32cacc3af] received connection request\n2025-07-16 15:55:47.335 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:55:47.360 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][abd0c8ec-5004-4b59-b53f-24c32cacc3af] socks forwarding established\n2025-07-16 15:55:47.392 [info] [command][cf92ecb2-f60d-41ed-be47-7f25ebc50211] Process exited with code 0\n2025-07-16 15:55:47.392 [info] [command][cf92ecb2-f60d-41ed-be47-7f25ebc50211] Socket close event received\n2025-07-16 15:55:47.419 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50502 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:55:47.419 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][abd0c8ec-5004-4b59-b53f-24c32cacc3af] socks connection closed\n2025-07-16 15:56:47.396 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:56:47.399 [info] [command][6fb35a12-b462-46e1-9711-463dd0d472dc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6fb35a12-b462-46e1-9711-463dd0d472dc""}\n2025-07-16 15:56:47.399 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f33b6715-5eed-4066-b8b7-5878ad528c48] received connection request\n2025-07-16 15:56:47.400 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:56:47.423 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f33b6715-5eed-4066-b8b7-5878ad528c48] socks forwarding established\n2025-07-16 15:56:47.449 [info] [command][6fb35a12-b462-46e1-9711-463dd0d472dc] Process exited with code 0\n2025-07-16 15:56:47.449 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f33b6715-5eed-4066-b8b7-5878ad528c48] socks connection closed\n2025-07-16 15:56:47.449 [info] [command][6fb35a12-b462-46e1-9711-463dd0d472dc] Socket close event received\n2025-07-16 15:56:47.473 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50528 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:57:47.451 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:57:47.453 [info] [command][79423cf7-3c5e-4c8b-a4e1-a0ad326e4efa] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""79423cf7-3c5e-4c8b-a4e1-a0ad326e4efa""}\n2025-07-16 15:57:47.453 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8a5050ca-e8a4-43f7-9540-39ec27921a9d] received connection request\n2025-07-16 15:57:47.454 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:57:47.478 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8a5050ca-e8a4-43f7-9540-39ec27921a9d] socks forwarding established\n2025-07-16 15:57:47.503 [info] [command][79423cf7-3c5e-4c8b-a4e1-a0ad326e4efa] Process exited with code 0\n2025-07-16 15:57:47.503 [info] [command][79423cf7-3c5e-4c8b-a4e1-a0ad326e4efa] Socket close event received\n2025-07-16 15:57:47.504 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8a5050ca-e8a4-43f7-9540-39ec27921a9d] socks connection closed\n2025-07-16 15:57:47.527 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50585 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:58:47.506 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:58:47.509 [info] [command][d8dbda32-e939-4c18-9496-8375a0d01723] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d8dbda32-e939-4c18-9496-8375a0d01723""}\n2025-07-16 15:58:47.509 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6b8156ca-1eb9-445c-a659-8d94c5b8cee3] received connection request\n2025-07-16 15:58:47.510 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:58:47.537 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6b8156ca-1eb9-445c-a659-8d94c5b8cee3] socks forwarding established\n2025-07-16 15:58:47.564 [info] [command][d8dbda32-e939-4c18-9496-8375a0d01723] Process exited with code 0\n2025-07-16 15:58:47.565 [info] [command][d8dbda32-e939-4c18-9496-8375a0d01723] Socket close event received\n2025-07-16 15:58:47.565 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6b8156ca-1eb9-445c-a659-8d94c5b8cee3] socks connection closed\n2025-07-16 15:58:47.587 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50607 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 15:59:47.568 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 15:59:47.571 [info] [command][a9ef7340-7723-41d0-948f-fba209361cd0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a9ef7340-7723-41d0-948f-fba209361cd0""}\n2025-07-16 15:59:47.572 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][806be239-2151-4bec-868f-e413e23124a3] received connection request\n2025-07-16 15:59:47.572 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 15:59:47.597 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][806be239-2151-4bec-868f-e413e23124a3] socks forwarding established\n2025-07-16 15:59:47.648 [info] [command][a9ef7340-7723-41d0-948f-fba209361cd0] Process exited with code 0\n2025-07-16 15:59:47.648 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][806be239-2151-4bec-868f-e413e23124a3] socks connection closed\n2025-07-16 15:59:47.649 [info] [command][a9ef7340-7723-41d0-948f-fba209361cd0] Socket close event received\n2025-07-16 15:59:47.673 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50656 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:00:47.654 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:00:47.655 [info] [command][9dac38a7-391c-4250-9de1-392db9384ddb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9dac38a7-391c-4250-9de1-392db9384ddb""}\n2025-07-16 16:00:47.656 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][106835bf-26ba-4d11-b16d-48d0f74860b4] received connection request\n2025-07-16 16:00:47.657 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:00:47.717 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][106835bf-26ba-4d11-b16d-48d0f74860b4] socks forwarding established\n2025-07-16 16:00:47.777 [info] [command][9dac38a7-391c-4250-9de1-392db9384ddb] Process exited with code 0\n2025-07-16 16:00:47.778 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][106835bf-26ba-4d11-b16d-48d0f74860b4] socks connection closed\n2025-07-16 16:00:47.778 [info] [command][9dac38a7-391c-4250-9de1-392db9384ddb] Socket close event received\n2025-07-16 16:00:47.851 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50686 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:01:47.783 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:01:47.785 [info] [command][6acafaa2-41d4-40cf-a997-e2b4ef7f2591] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6acafaa2-41d4-40cf-a997-e2b4ef7f2591""}\n2025-07-16 16:01:47.785 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5f600aba-1f4e-429b-8218-117b812f090e] received connection request\n2025-07-16 16:01:47.785 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:01:47.867 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5f600aba-1f4e-429b-8218-117b812f090e] socks forwarding established\n2025-07-16 16:01:47.986 [info] [command][6acafaa2-41d4-40cf-a997-e2b4ef7f2591] Process exited with code 0\n2025-07-16 16:01:47.986 [info] [command][6acafaa2-41d4-40cf-a997-e2b4ef7f2591] Socket close event received\n2025-07-16 16:01:48.139 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50717 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:01:48.139 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5f600aba-1f4e-429b-8218-117b812f090e] socks connection closed\n2025-07-16 16:02:47.992 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:02:47.994 [info] [command][f7ba2744-31c8-46f7-8c1b-c6162d702de3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f7ba2744-31c8-46f7-8c1b-c6162d702de3""}\n2025-07-16 16:02:47.995 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][26f2a010-3000-4d27-b928-8021cd0c011d] received connection request\n2025-07-16 16:02:47.995 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:02:48.018 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][26f2a010-3000-4d27-b928-8021cd0c011d] socks forwarding established\n2025-07-16 16:02:48.046 [info] [command][f7ba2744-31c8-46f7-8c1b-c6162d702de3] Process exited with code 0\n2025-07-16 16:02:48.046 [info] [command][f7ba2744-31c8-46f7-8c1b-c6162d702de3] Socket close event received\n2025-07-16 16:02:48.067 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][26f2a010-3000-4d27-b928-8021cd0c011d] socks connection closed\n2025-07-16 16:02:48.068 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50772 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:03:48.051 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:03:48.054 [info] [command][02a36eea-be18-40b0-b5b0-48d2b8927824] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""02a36eea-be18-40b0-b5b0-48d2b8927824""}\n2025-07-16 16:03:48.055 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][10bee56f-f718-4ebf-9442-4cdb163f0052] received connection request\n2025-07-16 16:03:48.055 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:03:48.236 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][10bee56f-f718-4ebf-9442-4cdb163f0052] socks forwarding established\n2025-07-16 16:03:48.313 [info] [command][02a36eea-be18-40b0-b5b0-48d2b8927824] Process exited with code 0\n2025-07-16 16:03:48.313 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][10bee56f-f718-4ebf-9442-4cdb163f0052] socks connection closed\n2025-07-16 16:03:48.313 [info] [command][02a36eea-be18-40b0-b5b0-48d2b8927824] Socket close event received\n2025-07-16 16:03:48.336 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50797 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:04:48.316 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:04:48.317 [info] [command][b8dab3df-082c-4885-9288-e4cb7aad67d0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b8dab3df-082c-4885-9288-e4cb7aad67d0""}\n2025-07-16 16:04:48.318 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][78f523ae-52fa-45dd-b39a-3b7e71f4d483] received connection request\n2025-07-16 16:04:48.318 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:04:48.342 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][78f523ae-52fa-45dd-b39a-3b7e71f4d483] socks forwarding established\n2025-07-16 16:04:48.493 [info] [command][b8dab3df-082c-4885-9288-e4cb7aad67d0] Process exited with code 0\n2025-07-16 16:04:48.493 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][78f523ae-52fa-45dd-b39a-3b7e71f4d483] socks connection closed\n2025-07-16 16:04:48.493 [info] [command][b8dab3df-082c-4885-9288-e4cb7aad67d0] Socket close event received\n2025-07-16 16:04:48.519 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50864 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:05:48.498 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:05:48.500 [info] [command][c8d61d67-82d4-47f8-bdb2-bfc57dea2130] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c8d61d67-82d4-47f8-bdb2-bfc57dea2130""}\n2025-07-16 16:05:48.501 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7ebdb160-3fa7-46da-b662-b0356776043a] received connection request\n2025-07-16 16:05:48.501 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:05:48.547 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7ebdb160-3fa7-46da-b662-b0356776043a] socks forwarding established\n2025-07-16 16:05:48.706 [info] [command][c8d61d67-82d4-47f8-bdb2-bfc57dea2130] Process exited with code 0\n2025-07-16 16:05:48.706 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7ebdb160-3fa7-46da-b662-b0356776043a] socks connection closed\n2025-07-16 16:05:48.706 [info] [command][c8d61d67-82d4-47f8-bdb2-bfc57dea2130] Socket close event received\n2025-07-16 16:05:48.729 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50888 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:06:48.708 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:06:48.710 [info] [command][f1aefe46-7470-4d8f-91f0-7e75938f4659] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f1aefe46-7470-4d8f-91f0-7e75938f4659""}\n2025-07-16 16:06:48.711 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5cbe2485-f2a7-4a7e-86c4-43fa791a3cc5] received connection request\n2025-07-16 16:06:48.711 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:06:48.736 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5cbe2485-f2a7-4a7e-86c4-43fa791a3cc5] socks forwarding established\n2025-07-16 16:06:48.764 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5cbe2485-f2a7-4a7e-86c4-43fa791a3cc5] socks connection closed\n2025-07-16 16:06:48.764 [info] [command][f1aefe46-7470-4d8f-91f0-7e75938f4659] Process exited with code 0\n2025-07-16 16:06:48.764 [info] [command][f1aefe46-7470-4d8f-91f0-7e75938f4659] Socket close event received\n2025-07-16 16:06:48.919 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50927 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:07:48.769 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:07:48.772 [info] [command][7ee888d8-300d-4a65-b316-20a10b072c2e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7ee888d8-300d-4a65-b316-20a10b072c2e""}\n2025-07-16 16:07:48.772 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1f2e6c3d-6fc9-4848-93b4-3037e8c624a6] received connection request\n2025-07-16 16:07:48.773 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:07:48.796 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1f2e6c3d-6fc9-4848-93b4-3037e8c624a6] socks forwarding established\n2025-07-16 16:07:48.823 [info] [command][7ee888d8-300d-4a65-b316-20a10b072c2e] Process exited with code 0\n2025-07-16 16:07:48.824 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1f2e6c3d-6fc9-4848-93b4-3037e8c624a6] socks connection closed\n2025-07-16 16:07:48.824 [info] [command][7ee888d8-300d-4a65-b316-20a10b072c2e] Socket close event received\n2025-07-16 16:07:48.846 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 50997 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:08:48.826 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:08:48.829 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][54a7d78d-db02-4d50-ae07-6f5b9363d1c7] received connection request\n2025-07-16 16:08:48.830 [info] [command][b6061ae9-4f41-4598-8168-8deb6b63dd5f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b6061ae9-4f41-4598-8168-8deb6b63dd5f""}\n2025-07-16 16:08:48.830 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 16:08:48.830 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:08:48.857 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][54a7d78d-db02-4d50-ae07-6f5b9363d1c7] socks forwarding established\n2025-07-16 16:08:48.982 [info] [command][b6061ae9-4f41-4598-8168-8deb6b63dd5f] Process exited with code 0\n2025-07-16 16:08:48.982 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][54a7d78d-db02-4d50-ae07-6f5b9363d1c7] socks connection closed\n2025-07-16 16:08:48.982 [info] [command][b6061ae9-4f41-4598-8168-8deb6b63dd5f] Socket close event received\n2025-07-16 16:08:49.007 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51045 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:09:48.983 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:09:48.985 [info] [command][6d252782-eabc-488c-8903-8bf59410a41b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6d252782-eabc-488c-8903-8bf59410a41b""}\n2025-07-16 16:09:48.986 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a825fdb1-853f-4cdb-882c-2cfc5b7ec64a] received connection request\n2025-07-16 16:09:48.987 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:09:49.010 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a825fdb1-853f-4cdb-882c-2cfc5b7ec64a] socks forwarding established\n2025-07-16 16:09:49.152 [info] [command][6d252782-eabc-488c-8903-8bf59410a41b] Process exited with code 0\n2025-07-16 16:09:49.153 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a825fdb1-853f-4cdb-882c-2cfc5b7ec64a] socks connection closed\n2025-07-16 16:09:49.153 [info] [command][6d252782-eabc-488c-8903-8bf59410a41b] Socket close event received\n2025-07-16 16:09:49.176 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51123 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:10:49.158 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:10:49.161 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0d213950-470c-43ce-8347-970ebc82e62e] received connection request\n2025-07-16 16:10:49.161 [info] [command][bdbfd786-2dd5-473b-ac7a-5a5ea4daf79a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""bdbfd786-2dd5-473b-ac7a-5a5ea4daf79a""}\n2025-07-16 16:10:49.162 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:10:49.189 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0d213950-470c-43ce-8347-970ebc82e62e] socks forwarding established\n2025-07-16 16:10:49.218 [info] [command][bdbfd786-2dd5-473b-ac7a-5a5ea4daf79a] Process exited with code 0\n2025-07-16 16:10:49.218 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0d213950-470c-43ce-8347-970ebc82e62e] socks connection closed\n2025-07-16 16:10:49.219 [info] [command][bdbfd786-2dd5-473b-ac7a-5a5ea4daf79a] Socket close event received\n2025-07-16 16:10:49.363 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51157 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:11:49.220 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:11:49.222 [info] [command][ced5c857-79f4-4a02-8c0c-4e0e67ab2c70] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ced5c857-79f4-4a02-8c0c-4e0e67ab2c70""}\n2025-07-16 16:11:49.222 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c6a0a90c-e4a4-466a-bd9a-8cd5de5cdd1d] received connection request\n2025-07-16 16:11:49.223 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:11:49.247 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c6a0a90c-e4a4-466a-bd9a-8cd5de5cdd1d] socks forwarding established\n2025-07-16 16:11:49.276 [info] [command][ced5c857-79f4-4a02-8c0c-4e0e67ab2c70] Process exited with code 0\n2025-07-16 16:11:49.276 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c6a0a90c-e4a4-466a-bd9a-8cd5de5cdd1d] socks connection closed\n2025-07-16 16:11:49.276 [info] [command][ced5c857-79f4-4a02-8c0c-4e0e67ab2c70] Socket close event received\n2025-07-16 16:11:49.301 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51185 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:12:49.277 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:12:49.279 [info] [command][117521b3-32b9-4b72-9d8e-42007f3be017] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""117521b3-32b9-4b72-9d8e-42007f3be017""}\n2025-07-16 16:12:49.280 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d29e5ff5-18da-4b75-81ef-e7e27d03406c] received connection request\n2025-07-16 16:12:49.282 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:12:49.306 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d29e5ff5-18da-4b75-81ef-e7e27d03406c] socks forwarding established\n2025-07-16 16:12:49.333 [info] [command][117521b3-32b9-4b72-9d8e-42007f3be017] Process exited with code 0\n2025-07-16 16:12:49.333 [info] [command][117521b3-32b9-4b72-9d8e-42007f3be017] Socket close event received\n2025-07-16 16:12:49.334 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d29e5ff5-18da-4b75-81ef-e7e27d03406c] socks connection closed\n2025-07-16 16:12:49.357 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51261 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:13:49.335 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:13:49.337 [info] [command][07227f46-b433-49d8-8a8a-e2bd692fc345] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""07227f46-b433-49d8-8a8a-e2bd692fc345""}\n2025-07-16 16:13:49.338 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][07a854fe-6630-4a18-a23a-73efa14cc9d0] received connection request\n2025-07-16 16:13:49.338 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:13:49.363 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][07a854fe-6630-4a18-a23a-73efa14cc9d0] socks forwarding established\n2025-07-16 16:13:49.392 [info] [command][07227f46-b433-49d8-8a8a-e2bd692fc345] Process exited with code 0\n2025-07-16 16:13:49.392 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][07a854fe-6630-4a18-a23a-73efa14cc9d0] socks connection closed\n2025-07-16 16:13:49.392 [info] [command][07227f46-b433-49d8-8a8a-e2bd692fc345] Socket close event received\n2025-07-16 16:13:49.417 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51312 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:14:49.398 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:14:49.400 [info] [command][90fd7736-b5e2-465d-a6a6-e586ca035499] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""90fd7736-b5e2-465d-a6a6-e586ca035499""}\n2025-07-16 16:14:49.400 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0ff461ac-1ab0-4865-a090-52fee2022551] received connection request\n2025-07-16 16:14:49.401 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:14:49.424 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0ff461ac-1ab0-4865-a090-52fee2022551] socks forwarding established\n2025-07-16 16:14:49.451 [info] [command][90fd7736-b5e2-465d-a6a6-e586ca035499] Process exited with code 0\n2025-07-16 16:14:49.451 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0ff461ac-1ab0-4865-a090-52fee2022551] socks connection closed\n2025-07-16 16:14:49.451 [info] [command][90fd7736-b5e2-465d-a6a6-e586ca035499] Socket close event received\n2025-07-16 16:14:49.474 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51378 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:15:49.456 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:15:49.457 [info] [command][c319a4df-c058-4c8f-8c83-629aa3fb7d52] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c319a4df-c058-4c8f-8c83-629aa3fb7d52""}\n2025-07-16 16:15:49.458 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][9a85ce75-5d11-49e0-a9a6-7f89bf35acd0] received connection request\n2025-07-16 16:15:49.459 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:15:49.482 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9a85ce75-5d11-49e0-a9a6-7f89bf35acd0] socks forwarding established\n2025-07-16 16:15:49.508 [info] [command][c319a4df-c058-4c8f-8c83-629aa3fb7d52] Process exited with code 0\n2025-07-16 16:15:49.509 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9a85ce75-5d11-49e0-a9a6-7f89bf35acd0] socks connection closed\n2025-07-16 16:15:49.509 [info] [command][c319a4df-c058-4c8f-8c83-629aa3fb7d52] Socket close event received\n2025-07-16 16:15:49.532 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51408 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:16:49.509 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:16:49.511 [info] [command][9a3784f9-42cf-4132-9688-e4bbb8bf7915] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9a3784f9-42cf-4132-9688-e4bbb8bf7915""}\n2025-07-16 16:16:49.512 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][880b1b62-a5ac-44d7-a0b9-3bdaef59cb88] received connection request\n2025-07-16 16:16:49.513 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:16:49.537 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][880b1b62-a5ac-44d7-a0b9-3bdaef59cb88] socks forwarding established\n2025-07-16 16:16:49.566 [info] [command][9a3784f9-42cf-4132-9688-e4bbb8bf7915] Process exited with code 0\n2025-07-16 16:16:49.566 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][880b1b62-a5ac-44d7-a0b9-3bdaef59cb88] socks connection closed\n2025-07-16 16:16:49.567 [info] [command][9a3784f9-42cf-4132-9688-e4bbb8bf7915] Socket close event received\n2025-07-16 16:16:49.591 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51439 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:17:49.571 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:17:49.573 [info] [command][91603ba9-411c-4f9d-b8c6-d71b88fe4ea9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""91603ba9-411c-4f9d-b8c6-d71b88fe4ea9""}\n2025-07-16 16:17:49.573 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7f5bd52e-92c4-4467-85cd-938725dde0c1] received connection request\n2025-07-16 16:17:49.573 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:17:49.597 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7f5bd52e-92c4-4467-85cd-938725dde0c1] socks forwarding established\n2025-07-16 16:17:49.623 [info] [command][91603ba9-411c-4f9d-b8c6-d71b88fe4ea9] Process exited with code 0\n2025-07-16 16:17:49.623 [info] [command][91603ba9-411c-4f9d-b8c6-d71b88fe4ea9] Socket close event received\n2025-07-16 16:17:49.645 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7f5bd52e-92c4-4467-85cd-938725dde0c1] socks connection closed\n2025-07-16 16:17:49.645 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51524 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:18:49.623 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:18:49.624 [info] [command][ae400f3e-59a7-4fb6-a90c-e2db99441a30] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ae400f3e-59a7-4fb6-a90c-e2db99441a30""}\n2025-07-16 16:18:49.625 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][fcf3cdbd-c856-4257-bdba-b7d140be31e8] received connection request\n2025-07-16 16:18:49.625 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:18:49.648 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fcf3cdbd-c856-4257-bdba-b7d140be31e8] socks forwarding established\n2025-07-16 16:18:49.696 [info] [command][ae400f3e-59a7-4fb6-a90c-e2db99441a30] Process exited with code 0\n2025-07-16 16:18:49.696 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fcf3cdbd-c856-4257-bdba-b7d140be31e8] socks connection closed\n2025-07-16 16:18:49.696 [info] [command][ae400f3e-59a7-4fb6-a90c-e2db99441a30] Socket close event received\n2025-07-16 16:18:49.718 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51582 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:19:49.703 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:19:49.703 [info] [command][3d0f38c0-9e97-451c-be03-ea59775a5c1f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3d0f38c0-9e97-451c-be03-ea59775a5c1f""}\n2025-07-16 16:19:49.704 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b6457057-689f-40ad-bbff-a1f2ac357c2e] received connection request\n2025-07-16 16:19:49.704 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:19:49.730 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b6457057-689f-40ad-bbff-a1f2ac357c2e] socks forwarding established\n2025-07-16 16:19:49.781 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b6457057-689f-40ad-bbff-a1f2ac357c2e] socks connection closed\n2025-07-16 16:19:49.781 [info] [command][3d0f38c0-9e97-451c-be03-ea59775a5c1f] Process exited with code 0\n2025-07-16 16:19:49.781 [info] [command][3d0f38c0-9e97-451c-be03-ea59775a5c1f] Socket close event received\n2025-07-16 16:19:49.807 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51626 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:20:49.783 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:20:49.785 [info] [command][880c0236-c546-4dbf-86f0-e110919ec118] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""880c0236-c546-4dbf-86f0-e110919ec118""}\n2025-07-16 16:20:49.786 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3e6c87d1-a260-4bfd-8f92-44b83550962e] received connection request\n2025-07-16 16:20:49.786 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:20:49.810 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3e6c87d1-a260-4bfd-8f92-44b83550962e] socks forwarding established\n2025-07-16 16:20:49.960 [info] [command][880c0236-c546-4dbf-86f0-e110919ec118] Process exited with code 0\n2025-07-16 16:20:49.961 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3e6c87d1-a260-4bfd-8f92-44b83550962e] socks connection closed\n2025-07-16 16:20:49.961 [info] [command][880c0236-c546-4dbf-86f0-e110919ec118] Socket close event received\n2025-07-16 16:20:49.983 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51654 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:21:49.965 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:21:49.967 [info] [command][e1480aa2-885e-4067-a3b5-668a41f2f81b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e1480aa2-885e-4067-a3b5-668a41f2f81b""}\n2025-07-16 16:21:49.967 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][03969058-7bed-453c-ae4e-f6e7047cc82f] received connection request\n2025-07-16 16:21:49.968 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:21:50.107 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][03969058-7bed-453c-ae4e-f6e7047cc82f] socks forwarding established\n2025-07-16 16:21:50.250 [info] [command][e1480aa2-885e-4067-a3b5-668a41f2f81b] Process exited with code 0\n2025-07-16 16:21:50.251 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][03969058-7bed-453c-ae4e-f6e7047cc82f] socks connection closed\n2025-07-16 16:21:50.251 [info] [command][e1480aa2-885e-4067-a3b5-668a41f2f81b] Socket close event received\n2025-07-16 16:21:50.274 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51675 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:22:50.256 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:22:50.258 [info] [command][e26498e8-cf69-4c8a-90a2-afe49cef4f0e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e26498e8-cf69-4c8a-90a2-afe49cef4f0e""}\n2025-07-16 16:22:50.259 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][54eb1dc6-3326-4561-ac5c-dba267c22bb7] received connection request\n2025-07-16 16:22:50.259 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:22:50.307 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][54eb1dc6-3326-4561-ac5c-dba267c22bb7] socks forwarding established\n2025-07-16 16:22:50.468 [info] [command][e26498e8-cf69-4c8a-90a2-afe49cef4f0e] Process exited with code 0\n2025-07-16 16:22:50.470 [info] [command][e26498e8-cf69-4c8a-90a2-afe49cef4f0e] Socket close event received\n2025-07-16 16:22:50.474 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][54eb1dc6-3326-4561-ac5c-dba267c22bb7] socks connection closed\n2025-07-16 16:22:50.494 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51742 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:23:50.474 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:23:50.477 [info] [command][5cbcbf54-3afc-4cc6-a821-ed44961b751d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5cbcbf54-3afc-4cc6-a821-ed44961b751d""}\n2025-07-16 16:23:50.478 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][2a75a2aa-b329-4474-9792-6b03a0bec6ff] received connection request\n2025-07-16 16:23:50.478 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:23:50.536 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2a75a2aa-b329-4474-9792-6b03a0bec6ff] socks forwarding established\n2025-07-16 16:23:50.562 [info] [command][5cbcbf54-3afc-4cc6-a821-ed44961b751d] Process exited with code 0\n2025-07-16 16:23:50.562 [info] [command][5cbcbf54-3afc-4cc6-a821-ed44961b751d] Socket close event received\n2025-07-16 16:23:50.562 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2a75a2aa-b329-4474-9792-6b03a0bec6ff] socks connection closed\n2025-07-16 16:23:50.585 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51765 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:24:50.565 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:24:50.567 [info] [command][7cbea0b0-3fd3-47c8-8ea2-a9eac0b0d4e7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7cbea0b0-3fd3-47c8-8ea2-a9eac0b0d4e7""}\n2025-07-16 16:24:50.567 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][204cf5d8-7d3d-4e4d-9916-6e5e559258aa] received connection request\n2025-07-16 16:24:50.568 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:24:50.592 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][204cf5d8-7d3d-4e4d-9916-6e5e559258aa] socks forwarding established\n2025-07-16 16:24:50.753 [info] [command][7cbea0b0-3fd3-47c8-8ea2-a9eac0b0d4e7] Process exited with code 0\n2025-07-16 16:24:50.753 [info] [command][7cbea0b0-3fd3-47c8-8ea2-a9eac0b0d4e7] Socket close event received\n2025-07-16 16:24:50.775 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][204cf5d8-7d3d-4e4d-9916-6e5e559258aa] socks connection closed\n2025-07-16 16:24:50.780 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51805 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:25:50.758 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:25:50.760 [info] [command][9e705f5b-37c8-46f8-b838-32c0f25fa244] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9e705f5b-37c8-46f8-b838-32c0f25fa244""}\n2025-07-16 16:25:50.762 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d8563fb0-d4e1-48b4-ba79-ec633a2dc95b] received connection request\n2025-07-16 16:25:50.762 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:25:50.866 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d8563fb0-d4e1-48b4-ba79-ec633a2dc95b] socks forwarding established\n2025-07-16 16:25:51.025 [info] [command][9e705f5b-37c8-46f8-b838-32c0f25fa244] Process exited with code 0\n2025-07-16 16:25:51.025 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d8563fb0-d4e1-48b4-ba79-ec633a2dc95b] socks connection closed\n2025-07-16 16:25:51.025 [info] [command][9e705f5b-37c8-46f8-b838-32c0f25fa244] Socket close event received\n2025-07-16 16:25:51.049 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51835 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:26:51.026 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:26:51.028 [info] [command][574ef925-23b6-4563-9e2d-52f38e3624dc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""574ef925-23b6-4563-9e2d-52f38e3624dc""}\n2025-07-16 16:26:51.029 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][34d92404-5bb6-4886-9bd0-ac49062c248c] received connection request\n2025-07-16 16:26:51.029 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:26:51.057 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][34d92404-5bb6-4886-9bd0-ac49062c248c] socks forwarding established\n2025-07-16 16:26:51.085 [info] [command][574ef925-23b6-4563-9e2d-52f38e3624dc] Process exited with code 0\n2025-07-16 16:26:51.085 [info] [command][574ef925-23b6-4563-9e2d-52f38e3624dc] Socket close event received\n2025-07-16 16:26:51.107 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51862 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:26:51.108 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][34d92404-5bb6-4886-9bd0-ac49062c248c] socks connection closed\n2025-07-16 16:27:51.091 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:27:51.093 [info] [command][ce62f74d-24ad-4c63-9eb1-ec8db50b4cc5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ce62f74d-24ad-4c63-9eb1-ec8db50b4cc5""}\n2025-07-16 16:27:51.094 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][badb91b8-5366-4882-ae2f-e07567a74371] received connection request\n2025-07-16 16:27:51.094 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:27:51.118 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][badb91b8-5366-4882-ae2f-e07567a74371] socks forwarding established\n2025-07-16 16:27:51.144 [info] [command][ce62f74d-24ad-4c63-9eb1-ec8db50b4cc5] Process exited with code 0\n2025-07-16 16:27:51.144 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][badb91b8-5366-4882-ae2f-e07567a74371] socks connection closed\n2025-07-16 16:27:51.144 [info] [command][ce62f74d-24ad-4c63-9eb1-ec8db50b4cc5] Socket close event received\n2025-07-16 16:27:51.167 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51917 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:28:51.150 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:28:51.152 [info] [command][9d898eb0-3385-4069-b0e4-496d81da1c90] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9d898eb0-3385-4069-b0e4-496d81da1c90""}\n2025-07-16 16:28:51.153 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][85efc205-1ab1-48bf-b44e-bcf2b2547ad0] received connection request\n2025-07-16 16:28:51.154 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:28:51.181 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][85efc205-1ab1-48bf-b44e-bcf2b2547ad0] socks forwarding established\n2025-07-16 16:28:51.208 [info] [command][9d898eb0-3385-4069-b0e4-496d81da1c90] Process exited with code 0\n2025-07-16 16:28:51.208 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][85efc205-1ab1-48bf-b44e-bcf2b2547ad0] socks connection closed\n2025-07-16 16:28:51.208 [info] [command][9d898eb0-3385-4069-b0e4-496d81da1c90] Socket close event received\n2025-07-16 16:28:51.230 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 51946 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:29:51.210 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:29:51.212 [info] [command][c27cdbc9-f9af-4274-befb-f94da455347c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c27cdbc9-f9af-4274-befb-f94da455347c""}\n2025-07-16 16:29:51.213 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][124519e4-b2fe-4985-89a8-d48947e96510] received connection request\n2025-07-16 16:29:51.213 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:29:51.325 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][124519e4-b2fe-4985-89a8-d48947e96510] socks forwarding established\n2025-07-16 16:29:51.353 [info] [command][c27cdbc9-f9af-4274-befb-f94da455347c] Process exited with code 0\n2025-07-16 16:29:51.353 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][124519e4-b2fe-4985-89a8-d48947e96510] socks connection closed\n2025-07-16 16:29:51.353 [info] [command][c27cdbc9-f9af-4274-befb-f94da455347c] Socket close event received\n2025-07-16 16:29:51.503 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52002 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:30:51.358 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:30:51.360 [info] [command][684ea35b-3bb6-40d4-9b5f-4b7008986a4b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""684ea35b-3bb6-40d4-9b5f-4b7008986a4b""}\n2025-07-16 16:30:51.361 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][137028ef-d9c5-491b-9a54-b7ebc747710f] received connection request\n2025-07-16 16:30:51.362 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:30:51.402 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][137028ef-d9c5-491b-9a54-b7ebc747710f] socks forwarding established\n2025-07-16 16:30:51.562 [info] [command][684ea35b-3bb6-40d4-9b5f-4b7008986a4b] Process exited with code 0\n2025-07-16 16:30:51.563 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][137028ef-d9c5-491b-9a54-b7ebc747710f] socks connection closed\n2025-07-16 16:30:51.563 [info] [command][684ea35b-3bb6-40d4-9b5f-4b7008986a4b] Socket close event received\n2025-07-16 16:30:51.585 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52032 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:31:51.573 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:31:51.576 [info] [command][191263aa-1381-4f0b-8f95-8572df8e5e3e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""191263aa-1381-4f0b-8f95-8572df8e5e3e""}\n2025-07-16 16:31:51.576 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7a49490c-2b8f-45a5-97ad-eb9e19ccad2d] received connection request\n2025-07-16 16:31:51.577 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:31:52.093 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7a49490c-2b8f-45a5-97ad-eb9e19ccad2d] socks forwarding established\n2025-07-16 16:31:52.124 [info] [command][191263aa-1381-4f0b-8f95-8572df8e5e3e] Process exited with code 0\n2025-07-16 16:31:52.124 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7a49490c-2b8f-45a5-97ad-eb9e19ccad2d] socks connection closed\n2025-07-16 16:31:52.124 [info] [command][191263aa-1381-4f0b-8f95-8572df8e5e3e] Socket close event received\n2025-07-16 16:31:52.174 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52065 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:32:52.131 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:32:52.132 [info] [command][e6f6cb42-4f46-4b54-b07f-9de987c3cb3e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e6f6cb42-4f46-4b54-b07f-9de987c3cb3e""}\n2025-07-16 16:32:52.133 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][907048a8-06a5-4c2a-9e14-969d2c8c39fe] received connection request\n2025-07-16 16:32:52.134 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:32:52.157 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][907048a8-06a5-4c2a-9e14-969d2c8c39fe] socks forwarding established\n2025-07-16 16:32:52.183 [info] [command][e6f6cb42-4f46-4b54-b07f-9de987c3cb3e] Process exited with code 0\n2025-07-16 16:32:52.183 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][907048a8-06a5-4c2a-9e14-969d2c8c39fe] socks connection closed\n2025-07-16 16:32:52.183 [info] [command][e6f6cb42-4f46-4b54-b07f-9de987c3cb3e] Socket close event received\n2025-07-16 16:32:52.207 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52132 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:33:52.193 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:33:52.195 [info] [command][3694cbb2-287f-4a47-a41e-b3190a2f869d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3694cbb2-287f-4a47-a41e-b3190a2f869d""}\n2025-07-16 16:33:52.197 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a9b9450d-e830-4768-aa7f-1f3027055a53] received connection request\n2025-07-16 16:33:52.199 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:33:52.223 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a9b9450d-e830-4768-aa7f-1f3027055a53] socks forwarding established\n2025-07-16 16:33:52.249 [info] [command][3694cbb2-287f-4a47-a41e-b3190a2f869d] Process exited with code 0\n2025-07-16 16:33:52.250 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a9b9450d-e830-4768-aa7f-1f3027055a53] socks connection closed\n2025-07-16 16:33:52.250 [info] [command][3694cbb2-287f-4a47-a41e-b3190a2f869d] Socket close event received\n2025-07-16 16:33:52.272 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52154 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:34:52.259 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:34:52.261 [info] [command][beb9d9d5-6892-4046-9338-0a72ded7cc49] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""beb9d9d5-6892-4046-9338-0a72ded7cc49""}\n2025-07-16 16:34:52.261 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][466ca445-4f0d-4592-9c1f-ec2a227ada3a] received connection request\n2025-07-16 16:34:52.262 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:34:52.286 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][466ca445-4f0d-4592-9c1f-ec2a227ada3a] socks forwarding established\n2025-07-16 16:34:52.314 [info] [command][beb9d9d5-6892-4046-9338-0a72ded7cc49] Process exited with code 0\n2025-07-16 16:34:52.315 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][466ca445-4f0d-4592-9c1f-ec2a227ada3a] socks connection closed\n2025-07-16 16:34:52.315 [info] [command][beb9d9d5-6892-4046-9338-0a72ded7cc49] Socket close event received\n2025-07-16 16:34:52.340 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52203 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:35:52.317 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:35:52.319 [info] [command][48e030f7-e6ec-46a5-882a-2793d347a739] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""48e030f7-e6ec-46a5-882a-2793d347a739""}\n2025-07-16 16:35:52.319 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][193d6bfb-1d76-499c-a02e-daba44d9cc71] received connection request\n2025-07-16 16:35:52.320 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:35:52.343 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][193d6bfb-1d76-499c-a02e-daba44d9cc71] socks forwarding established\n2025-07-16 16:35:52.369 [info] [command][48e030f7-e6ec-46a5-882a-2793d347a739] Process exited with code 0\n2025-07-16 16:35:52.370 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][193d6bfb-1d76-499c-a02e-daba44d9cc71] socks connection closed\n2025-07-16 16:35:52.370 [info] [command][48e030f7-e6ec-46a5-882a-2793d347a739] Socket close event received\n2025-07-16 16:35:52.392 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52226 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:36:52.380 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:36:52.382 [info] [command][32a82262-0610-4ee9-9ed9-1d648cc57524] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""32a82262-0610-4ee9-9ed9-1d648cc57524""}\n2025-07-16 16:36:52.383 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][97dbdc1d-b39f-4a47-a3e4-4899850ae55e] received connection request\n2025-07-16 16:36:52.384 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:36:52.408 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][97dbdc1d-b39f-4a47-a3e4-4899850ae55e] socks forwarding established\n2025-07-16 16:36:52.434 [info] [command][32a82262-0610-4ee9-9ed9-1d648cc57524] Process exited with code 0\n2025-07-16 16:36:52.435 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][97dbdc1d-b39f-4a47-a3e4-4899850ae55e] socks connection closed\n2025-07-16 16:36:52.435 [info] [command][32a82262-0610-4ee9-9ed9-1d648cc57524] Socket close event received\n2025-07-16 16:36:52.460 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52251 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:37:52.442 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:37:52.444 [info] [command][e37b1005-8dda-4d36-9122-542a2f8e965e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e37b1005-8dda-4d36-9122-542a2f8e965e""}\n2025-07-16 16:37:52.444 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b0f1ddf9-e65f-4749-bc01-497d5399493a] received connection request\n2025-07-16 16:37:52.445 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:37:52.470 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b0f1ddf9-e65f-4749-bc01-497d5399493a] socks forwarding established\n2025-07-16 16:37:52.495 [info] [command][e37b1005-8dda-4d36-9122-542a2f8e965e] Process exited with code 0\n2025-07-16 16:37:52.496 [info] [command][e37b1005-8dda-4d36-9122-542a2f8e965e] Socket close event received\n2025-07-16 16:37:52.496 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b0f1ddf9-e65f-4749-bc01-497d5399493a] socks connection closed\n2025-07-16 16:37:52.519 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52301 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:38:52.506 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:38:52.508 [info] [command][884e28e5-2b04-4f8b-8099-363335c734c1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""884e28e5-2b04-4f8b-8099-363335c734c1""}\n2025-07-16 16:38:52.509 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5f03381c-6137-4cc9-ae08-bbdcc6c0380b] received connection request\n2025-07-16 16:38:52.510 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:38:52.534 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5f03381c-6137-4cc9-ae08-bbdcc6c0380b] socks forwarding established\n2025-07-16 16:38:52.561 [info] [command][884e28e5-2b04-4f8b-8099-363335c734c1] Process exited with code 0\n2025-07-16 16:38:52.561 [info] [command][884e28e5-2b04-4f8b-8099-363335c734c1] Socket close event received\n2025-07-16 16:38:52.561 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5f03381c-6137-4cc9-ae08-bbdcc6c0380b] socks connection closed\n2025-07-16 16:38:52.585 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52323 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:39:52.561 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:39:52.563 [info] [command][50f15996-9f95-4fbc-b0d0-adff996ebc6a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""50f15996-9f95-4fbc-b0d0-adff996ebc6a""}\n2025-07-16 16:39:52.563 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b8017a37-f892-4d6d-bb86-638acf3276d8] received connection request\n2025-07-16 16:39:52.563 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:39:52.589 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b8017a37-f892-4d6d-bb86-638acf3276d8] socks forwarding established\n2025-07-16 16:39:52.639 [info] [command][50f15996-9f95-4fbc-b0d0-adff996ebc6a] Process exited with code 0\n2025-07-16 16:39:52.640 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b8017a37-f892-4d6d-bb86-638acf3276d8] socks connection closed\n2025-07-16 16:39:52.640 [info] [command][50f15996-9f95-4fbc-b0d0-adff996ebc6a] Socket close event received\n2025-07-16 16:39:52.665 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52387 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:40:52.645 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:40:52.646 [info] [command][57a9c22c-2cf8-476f-96da-7e36d502a46a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""57a9c22c-2cf8-476f-96da-7e36d502a46a""}\n2025-07-16 16:40:52.646 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6a3ee367-662a-4e44-96a8-d23df73e783a] received connection request\n2025-07-16 16:40:52.647 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 16:40:52.647 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:40:52.706 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6a3ee367-662a-4e44-96a8-d23df73e783a] socks forwarding established\n2025-07-16 16:40:52.733 [info] [command][57a9c22c-2cf8-476f-96da-7e36d502a46a] Process exited with code 0\n2025-07-16 16:40:52.733 [info] [command][57a9c22c-2cf8-476f-96da-7e36d502a46a] Socket close event received\n2025-07-16 16:40:52.756 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52439 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:40:52.756 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6a3ee367-662a-4e44-96a8-d23df73e783a] socks connection closed\n2025-07-16 16:41:52.738 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:41:52.740 [info] [command][dddff7ab-8e38-4af4-8a34-9c3a89c2df29] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""dddff7ab-8e38-4af4-8a34-9c3a89c2df29""}\n2025-07-16 16:41:52.740 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][9909e9b5-e739-4fb1-aa86-910766f4575f] received connection request\n2025-07-16 16:41:52.741 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 16:41:52.741 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:41:52.764 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9909e9b5-e739-4fb1-aa86-910766f4575f] socks forwarding established\n2025-07-16 16:41:52.790 [info] [command][dddff7ab-8e38-4af4-8a34-9c3a89c2df29] Process exited with code 0\n2025-07-16 16:41:52.790 [info] [command][dddff7ab-8e38-4af4-8a34-9c3a89c2df29] Socket close event received\n2025-07-16 16:41:52.813 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9909e9b5-e739-4fb1-aa86-910766f4575f] socks connection closed\n2025-07-16 16:41:52.814 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52503 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:42:52.795 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:42:52.797 [info] [command][c9455f15-3fb6-46cb-a1e0-d04cbd8735c5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c9455f15-3fb6-46cb-a1e0-d04cbd8735c5""}\n2025-07-16 16:42:52.798 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6e761a4e-389a-473e-959d-7d4ffe408c89] received connection request\n2025-07-16 16:42:52.798 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 16:42:52.798 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:42:52.825 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6e761a4e-389a-473e-959d-7d4ffe408c89] socks forwarding established\n2025-07-16 16:42:52.853 [info] [command][c9455f15-3fb6-46cb-a1e0-d04cbd8735c5] Process exited with code 0\n2025-07-16 16:42:52.854 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6e761a4e-389a-473e-959d-7d4ffe408c89] socks connection closed\n2025-07-16 16:42:52.854 [info] [command][c9455f15-3fb6-46cb-a1e0-d04cbd8735c5] Socket close event received\n2025-07-16 16:42:52.880 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52591 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:43:52.859 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:43:52.862 [info] [command][cc5fc2b1-4752-4662-945c-6cd853597ffa] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""cc5fc2b1-4752-4662-945c-6cd853597ffa""}\n2025-07-16 16:43:52.862 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][42183fb3-7f9b-4679-a564-87408be4c492] received connection request\n2025-07-16 16:43:52.863 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:43:52.890 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][42183fb3-7f9b-4679-a564-87408be4c492] socks forwarding established\n2025-07-16 16:43:52.918 [info] [command][cc5fc2b1-4752-4662-945c-6cd853597ffa] Process exited with code 0\n2025-07-16 16:43:52.919 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][42183fb3-7f9b-4679-a564-87408be4c492] socks connection closed\n2025-07-16 16:43:52.919 [info] [command][cc5fc2b1-4752-4662-945c-6cd853597ffa] Socket close event received\n2025-07-16 16:43:52.945 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52626 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:44:52.922 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:44:52.924 [info] [command][ede5655a-c412-4e81-a79f-882b164dcba9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ede5655a-c412-4e81-a79f-882b164dcba9""}\n2025-07-16 16:44:52.924 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][dcbc5549-45e2-4e85-818f-d3146e6034ca] received connection request\n2025-07-16 16:44:52.924 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:44:52.948 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][dcbc5549-45e2-4e85-818f-d3146e6034ca] socks forwarding established\n2025-07-16 16:44:52.974 [info] [command][ede5655a-c412-4e81-a79f-882b164dcba9] Process exited with code 0\n2025-07-16 16:44:52.974 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][dcbc5549-45e2-4e85-818f-d3146e6034ca] socks connection closed\n2025-07-16 16:44:52.974 [info] [command][ede5655a-c412-4e81-a79f-882b164dcba9] Socket close event received\n2025-07-16 16:44:52.998 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52681 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:45:52.975 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:45:52.977 [info] [command][69721142-b860-4c67-a03c-902719a0cf34] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""69721142-b860-4c67-a03c-902719a0cf34""}\n2025-07-16 16:45:52.977 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][eff848b8-92bc-4b11-8e91-8a6fe145d233] received connection request\n2025-07-16 16:45:52.978 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:45:53.007 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][eff848b8-92bc-4b11-8e91-8a6fe145d233] socks forwarding established\n2025-07-16 16:45:53.036 [info] [command][69721142-b860-4c67-a03c-902719a0cf34] Process exited with code 0\n2025-07-16 16:45:53.037 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][eff848b8-92bc-4b11-8e91-8a6fe145d233] socks connection closed\n2025-07-16 16:45:53.037 [info] [command][69721142-b860-4c67-a03c-902719a0cf34] Socket close event received\n2025-07-16 16:45:53.060 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52734 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:46:53.037 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:46:53.038 [info] [command][50938034-25f9-42e7-a0a9-31967c67d54c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""50938034-25f9-42e7-a0a9-31967c67d54c""}\n2025-07-16 16:46:53.039 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][962b0ec9-1858-4055-afaa-e92ba79eae71] received connection request\n2025-07-16 16:46:53.039 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:46:53.063 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][962b0ec9-1858-4055-afaa-e92ba79eae71] socks forwarding established\n2025-07-16 16:46:53.155 [info] [command][50938034-25f9-42e7-a0a9-31967c67d54c] Process exited with code 0\n2025-07-16 16:46:53.155 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][962b0ec9-1858-4055-afaa-e92ba79eae71] socks connection closed\n2025-07-16 16:46:53.155 [info] [command][50938034-25f9-42e7-a0a9-31967c67d54c] Socket close event received\n2025-07-16 16:46:53.223 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52779 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:47:53.161 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:47:53.163 [info] [command][24bdfdd9-38ad-4e20-b8f0-4fabc59dd404] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""24bdfdd9-38ad-4e20-b8f0-4fabc59dd404""}\n2025-07-16 16:47:53.163 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3f7a1a52-4794-4b4c-8d61-d244f65b719b] received connection request\n2025-07-16 16:47:53.163 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 16:47:53.163 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:47:53.253 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3f7a1a52-4794-4b4c-8d61-d244f65b719b] socks forwarding established\n2025-07-16 16:47:53.411 [info] [command][24bdfdd9-38ad-4e20-b8f0-4fabc59dd404] Process exited with code 0\n2025-07-16 16:47:53.412 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3f7a1a52-4794-4b4c-8d61-d244f65b719b] socks connection closed\n2025-07-16 16:47:53.412 [info] [command][24bdfdd9-38ad-4e20-b8f0-4fabc59dd404] Socket close event received\n2025-07-16 16:47:53.435 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52855 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:48:53.417 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:48:53.418 [info] [command][5e195185-191b-4c3d-99b9-422f6a652844] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5e195185-191b-4c3d-99b9-422f6a652844""}\n2025-07-16 16:48:53.419 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][75944878-fa7c-47be-bd70-4c7d817c3871] received connection request\n2025-07-16 16:48:53.421 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:48:53.457 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][75944878-fa7c-47be-bd70-4c7d817c3871] socks forwarding established\n2025-07-16 16:48:53.599 [info] [command][5e195185-191b-4c3d-99b9-422f6a652844] Process exited with code 0\n2025-07-16 16:48:53.600 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][75944878-fa7c-47be-bd70-4c7d817c3871] socks connection closed\n2025-07-16 16:48:53.600 [info] [command][5e195185-191b-4c3d-99b9-422f6a652844] Socket close event received\n2025-07-16 16:48:53.624 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52880 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:49:53.603 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:49:53.604 [info] [command][9796ec67-b8bc-4f7c-b5d4-2970beac7b2f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9796ec67-b8bc-4f7c-b5d4-2970beac7b2f""}\n2025-07-16 16:49:53.604 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][24f92fc3-339b-47f8-b770-c024746b769b] received connection request\n2025-07-16 16:49:53.604 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 16:49:53.604 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:49:53.802 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][24f92fc3-339b-47f8-b770-c024746b769b] socks forwarding established\n2025-07-16 16:49:53.831 [info] [command][9796ec67-b8bc-4f7c-b5d4-2970beac7b2f] Process exited with code 0\n2025-07-16 16:49:53.832 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][24f92fc3-339b-47f8-b770-c024746b769b] socks connection closed\n2025-07-16 16:49:53.832 [info] [command][9796ec67-b8bc-4f7c-b5d4-2970beac7b2f] Socket close event received\n2025-07-16 16:49:53.978 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52922 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:50:53.836 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:50:53.838 [info] [command][d6470dc0-593c-49d9-b2a4-70ddfde4412a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d6470dc0-593c-49d9-b2a4-70ddfde4412a""}\n2025-07-16 16:50:53.839 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][173bb699-7d2c-4136-89fb-b63671e50e19] received connection request\n2025-07-16 16:50:53.840 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:50:53.892 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][173bb699-7d2c-4136-89fb-b63671e50e19] socks forwarding established\n2025-07-16 16:50:54.022 [info] [command][d6470dc0-593c-49d9-b2a4-70ddfde4412a] Process exited with code 0\n2025-07-16 16:50:54.023 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][173bb699-7d2c-4136-89fb-b63671e50e19] socks connection closed\n2025-07-16 16:50:54.023 [info] [command][d6470dc0-593c-49d9-b2a4-70ddfde4412a] Socket close event received\n2025-07-16 16:50:54.048 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 52998 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:51:54.029 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:51:54.031 [info] [command][35611e03-e2a6-4e23-a095-708c835f1f65] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""35611e03-e2a6-4e23-a095-708c835f1f65""}\n2025-07-16 16:51:54.032 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d196c137-abad-4557-be06-0486b5d8776f] received connection request\n2025-07-16 16:51:54.032 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:51:54.122 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d196c137-abad-4557-be06-0486b5d8776f] socks forwarding established\n2025-07-16 16:51:54.327 [info] [command][35611e03-e2a6-4e23-a095-708c835f1f65] Process exited with code 0\n2025-07-16 16:51:54.327 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d196c137-abad-4557-be06-0486b5d8776f] socks connection closed\n2025-07-16 16:51:54.327 [info] [command][35611e03-e2a6-4e23-a095-708c835f1f65] Socket close event received\n2025-07-16 16:51:54.354 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53065 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:52:54.332 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:52:54.334 [info] [command][fdd264fe-2d5b-4ddd-9551-ad63d582b45a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""fdd264fe-2d5b-4ddd-9551-ad63d582b45a""}\n2025-07-16 16:52:54.335 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0629e117-040c-481e-987d-2ab78bec11ed] received connection request\n2025-07-16 16:52:54.336 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:52:54.360 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0629e117-040c-481e-987d-2ab78bec11ed] socks forwarding established\n2025-07-16 16:52:54.388 [info] [command][fdd264fe-2d5b-4ddd-9551-ad63d582b45a] Process exited with code 0\n2025-07-16 16:52:54.389 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0629e117-040c-481e-987d-2ab78bec11ed] socks connection closed\n2025-07-16 16:52:54.389 [info] [command][fdd264fe-2d5b-4ddd-9551-ad63d582b45a] Socket close event received\n2025-07-16 16:52:54.414 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53121 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:53:54.391 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:53:54.392 [info] [command][a7701284-fc87-4fb0-ac4b-050a364d788d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a7701284-fc87-4fb0-ac4b-050a364d788d""}\n2025-07-16 16:53:54.393 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][80101a90-4ca0-445d-a3ed-dd654f708a3e] received connection request\n2025-07-16 16:53:54.393 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:53:54.417 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][80101a90-4ca0-445d-a3ed-dd654f708a3e] socks forwarding established\n2025-07-16 16:53:54.817 [info] [command][a7701284-fc87-4fb0-ac4b-050a364d788d] Process exited with code 0\n2025-07-16 16:53:54.817 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][80101a90-4ca0-445d-a3ed-dd654f708a3e] socks connection closed\n2025-07-16 16:53:54.817 [info] [command][a7701284-fc87-4fb0-ac4b-050a364d788d] Socket close event received\n2025-07-16 16:53:54.845 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53182 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:54:54.822 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:54:54.825 [info] [command][b948853c-68f3-47ca-97e9-be2d6dab739d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b948853c-68f3-47ca-97e9-be2d6dab739d""}\n2025-07-16 16:54:54.826 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f4ee1bbf-9434-4092-a1d2-6d01e1c85d70] received connection request\n2025-07-16 16:54:54.827 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:54:54.852 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f4ee1bbf-9434-4092-a1d2-6d01e1c85d70] socks forwarding established\n2025-07-16 16:54:54.979 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f4ee1bbf-9434-4092-a1d2-6d01e1c85d70] socks connection closed\n2025-07-16 16:54:54.979 [info] [command][b948853c-68f3-47ca-97e9-be2d6dab739d] Process exited with code 0\n2025-07-16 16:54:54.980 [info] [command][b948853c-68f3-47ca-97e9-be2d6dab739d] Socket close event received\n2025-07-16 16:54:55.053 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53271 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:55:54.985 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:55:54.987 [info] [command][c312051d-025c-408b-b12d-cde6423cdfd3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c312051d-025c-408b-b12d-cde6423cdfd3""}\n2025-07-16 16:55:54.988 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4e06d156-0656-421b-a98f-4a48fe4272cf] received connection request\n2025-07-16 16:55:54.989 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:55:55.015 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4e06d156-0656-421b-a98f-4a48fe4272cf] socks forwarding established\n2025-07-16 16:55:55.045 [info] [command][c312051d-025c-408b-b12d-cde6423cdfd3] Process exited with code 0\n2025-07-16 16:55:55.045 [info] [command][c312051d-025c-408b-b12d-cde6423cdfd3] Socket close event received\n2025-07-16 16:55:55.179 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4e06d156-0656-421b-a98f-4a48fe4272cf] socks connection closed\n2025-07-16 16:55:55.212 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53311 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:56:55.051 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:56:55.053 [info] [command][997bffd6-760a-4ef2-9be3-a97937dd580a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""997bffd6-760a-4ef2-9be3-a97937dd580a""}\n2025-07-16 16:56:55.054 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7fa48313-b594-49af-aba8-13f4f9836eb3] received connection request\n2025-07-16 16:56:55.055 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:56:55.094 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7fa48313-b594-49af-aba8-13f4f9836eb3] socks forwarding established\n2025-07-16 16:56:55.252 [info] [command][997bffd6-760a-4ef2-9be3-a97937dd580a] Process exited with code 0\n2025-07-16 16:56:55.252 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7fa48313-b594-49af-aba8-13f4f9836eb3] socks connection closed\n2025-07-16 16:56:55.252 [info] [command][997bffd6-760a-4ef2-9be3-a97937dd580a] Socket close event received\n2025-07-16 16:56:55.312 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53364 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:57:55.256 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:57:55.257 [info] [command][297b1098-fc4f-4c4c-851d-093a50842757] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""297b1098-fc4f-4c4c-851d-093a50842757""}\n2025-07-16 16:57:55.258 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][2208ce16-b007-4819-9096-f96805525ee5] received connection request\n2025-07-16 16:57:55.258 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:57:55.284 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2208ce16-b007-4819-9096-f96805525ee5] socks forwarding established\n2025-07-16 16:57:55.325 [info] [command][297b1098-fc4f-4c4c-851d-093a50842757] Process exited with code 0\n2025-07-16 16:57:55.325 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2208ce16-b007-4819-9096-f96805525ee5] socks connection closed\n2025-07-16 16:57:55.325 [info] [command][297b1098-fc4f-4c4c-851d-093a50842757] Socket close event received\n2025-07-16 16:57:55.350 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53460 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:58:55.330 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:58:55.331 [info] [command][cfa33280-730e-4356-843d-ba2085fb588c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""cfa33280-730e-4356-843d-ba2085fb588c""}\n2025-07-16 16:58:55.332 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][50c095f4-1bcc-47b5-962f-ff9bb10bc255] received connection request\n2025-07-16 16:58:55.332 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:58:55.415 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][50c095f4-1bcc-47b5-962f-ff9bb10bc255] socks forwarding established\n2025-07-16 16:58:55.564 [info] [command][cfa33280-730e-4356-843d-ba2085fb588c] Process exited with code 0\n2025-07-16 16:58:55.564 [info] [command][cfa33280-730e-4356-843d-ba2085fb588c] Socket close event received\n2025-07-16 16:58:55.588 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53523 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 16:58:55.588 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][50c095f4-1bcc-47b5-962f-ff9bb10bc255] socks connection closed\n2025-07-16 16:59:55.569 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 16:59:55.572 [info] [command][964c9510-94f9-484b-92a0-da3f98ba08f3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""964c9510-94f9-484b-92a0-da3f98ba08f3""}\n2025-07-16 16:59:55.573 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][96fdc099-1ef8-4659-b38e-7c2366d45add] received connection request\n2025-07-16 16:59:55.574 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 16:59:55.600 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][96fdc099-1ef8-4659-b38e-7c2366d45add] socks forwarding established\n2025-07-16 16:59:55.659 [info] [command][964c9510-94f9-484b-92a0-da3f98ba08f3] Process exited with code 0\n2025-07-16 16:59:55.659 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][96fdc099-1ef8-4659-b38e-7c2366d45add] socks connection closed\n2025-07-16 16:59:55.659 [info] [command][964c9510-94f9-484b-92a0-da3f98ba08f3] Socket close event received\n2025-07-16 16:59:55.685 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53597 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:00:55.665 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:00:55.668 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b3003370-fe7e-48fb-aa50-224e1f217cf9] received connection request\n2025-07-16 17:00:55.669 [info] [command][ad5f86a6-4e50-4d69-8e64-005fa53a8f18] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ad5f86a6-4e50-4d69-8e64-005fa53a8f18""}\n2025-07-16 17:00:55.669 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:00:55.708 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b3003370-fe7e-48fb-aa50-224e1f217cf9] socks forwarding established\n2025-07-16 17:00:55.740 [info] [command][ad5f86a6-4e50-4d69-8e64-005fa53a8f18] Process exited with code 0\n2025-07-16 17:00:55.741 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b3003370-fe7e-48fb-aa50-224e1f217cf9] socks connection closed\n2025-07-16 17:00:55.741 [info] [command][ad5f86a6-4e50-4d69-8e64-005fa53a8f18] Socket close event received\n2025-07-16 17:00:55.828 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53671 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:01:55.743 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:01:55.744 [info] [command][6ce8495d-4cfa-4ba2-9975-f4446ad0a702] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6ce8495d-4cfa-4ba2-9975-f4446ad0a702""}\n2025-07-16 17:01:55.744 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4b39069c-da8a-43ba-abdc-1ca211020b50] received connection request\n2025-07-16 17:01:55.744 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:01:55.880 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4b39069c-da8a-43ba-abdc-1ca211020b50] socks forwarding established\n2025-07-16 17:01:56.038 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4b39069c-da8a-43ba-abdc-1ca211020b50] socks connection closed\n2025-07-16 17:01:56.038 [info] [command][6ce8495d-4cfa-4ba2-9975-f4446ad0a702] Process exited with code 0\n2025-07-16 17:01:56.038 [info] [command][6ce8495d-4cfa-4ba2-9975-f4446ad0a702] Socket close event received\n2025-07-16 17:01:56.062 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53744 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:02:56.044 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:02:56.045 [info] [command][9d931048-9550-4288-a87f-c19709414b30] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9d931048-9550-4288-a87f-c19709414b30""}\n2025-07-16 17:02:56.046 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][cd11dac6-55a3-4bd5-b93f-2b058b944c55] received connection request\n2025-07-16 17:02:56.046 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:02:56.124 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cd11dac6-55a3-4bd5-b93f-2b058b944c55] socks forwarding established\n2025-07-16 17:02:56.283 [info] [command][9d931048-9550-4288-a87f-c19709414b30] Process exited with code 0\n2025-07-16 17:02:56.283 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cd11dac6-55a3-4bd5-b93f-2b058b944c55] socks connection closed\n2025-07-16 17:02:56.284 [info] [command][9d931048-9550-4288-a87f-c19709414b30] Socket close event received\n2025-07-16 17:02:56.307 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53830 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:03:56.289 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:03:56.291 [info] [command][47b50ee3-82f8-4c0b-aa87-781c4899535f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""47b50ee3-82f8-4c0b-aa87-781c4899535f""}\n2025-07-16 17:03:56.292 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e2bbaed4-8076-4684-8cd0-add3fec8750a] received connection request\n2025-07-16 17:03:56.292 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:03:56.326 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e2bbaed4-8076-4684-8cd0-add3fec8750a] socks forwarding established\n2025-07-16 17:03:56.353 [info] [command][47b50ee3-82f8-4c0b-aa87-781c4899535f] Process exited with code 0\n2025-07-16 17:03:56.353 [info] [command][47b50ee3-82f8-4c0b-aa87-781c4899535f] Socket close event received\n2025-07-16 17:03:56.374 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e2bbaed4-8076-4684-8cd0-add3fec8750a] socks connection closed\n2025-07-16 17:03:56.378 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53856 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:04:56.359 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:04:56.360 [info] [command][e53cd641-0d95-40c3-bf30-395c4fd12c93] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e53cd641-0d95-40c3-bf30-395c4fd12c93""}\n2025-07-16 17:04:56.361 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8c4af8b7-3478-4dee-ac70-95fe00238e90] received connection request\n2025-07-16 17:04:56.361 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:04:56.411 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8c4af8b7-3478-4dee-ac70-95fe00238e90] socks forwarding established\n2025-07-16 17:04:56.486 [info] [command][e53cd641-0d95-40c3-bf30-395c4fd12c93] Process exited with code 0\n2025-07-16 17:04:56.486 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8c4af8b7-3478-4dee-ac70-95fe00238e90] socks connection closed\n2025-07-16 17:04:56.486 [info] [command][e53cd641-0d95-40c3-bf30-395c4fd12c93] Socket close event received\n2025-07-16 17:04:56.511 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53923 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:05:56.487 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:05:56.489 [info] [command][1d290ce3-134f-4680-b0f9-cd92a382dc6b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1d290ce3-134f-4680-b0f9-cd92a382dc6b""}\n2025-07-16 17:05:56.489 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b2e27140-128e-4b90-a2d8-2d43f6a6973d] received connection request\n2025-07-16 17:05:56.490 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:05:56.514 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b2e27140-128e-4b90-a2d8-2d43f6a6973d] socks forwarding established\n2025-07-16 17:05:56.542 [info] [command][1d290ce3-134f-4680-b0f9-cd92a382dc6b] Process exited with code 0\n2025-07-16 17:05:56.542 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b2e27140-128e-4b90-a2d8-2d43f6a6973d] socks connection closed\n2025-07-16 17:05:56.542 [info] [command][1d290ce3-134f-4680-b0f9-cd92a382dc6b] Socket close event received\n2025-07-16 17:05:56.574 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 53975 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:06:56.548 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:06:56.551 [info] [command][d915bc7d-4b62-4b8c-9b68-4994d467b9f4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d915bc7d-4b62-4b8c-9b68-4994d467b9f4""}\n2025-07-16 17:06:56.552 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d0ee58b8-5173-4f56-8f4c-3a7c4773eb8f] received connection request\n2025-07-16 17:06:56.552 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:06:56.582 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d0ee58b8-5173-4f56-8f4c-3a7c4773eb8f] socks forwarding established\n2025-07-16 17:06:56.665 [info] [command][d915bc7d-4b62-4b8c-9b68-4994d467b9f4] Process exited with code 0\n2025-07-16 17:06:56.665 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d0ee58b8-5173-4f56-8f4c-3a7c4773eb8f] socks connection closed\n2025-07-16 17:06:56.665 [info] [command][d915bc7d-4b62-4b8c-9b68-4994d467b9f4] Socket close event received\n2025-07-16 17:06:56.688 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54013 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:07:56.670 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:07:56.673 [info] [command][3e2dcc5f-f008-4fe0-8f9a-2e61b17dc602] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3e2dcc5f-f008-4fe0-8f9a-2e61b17dc602""}\n2025-07-16 17:07:56.673 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][01d57781-9fb1-4fc1-b135-8190b912cde3] received connection request\n2025-07-16 17:07:56.674 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:07:56.698 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][01d57781-9fb1-4fc1-b135-8190b912cde3] socks forwarding established\n2025-07-16 17:07:56.724 [info] [command][3e2dcc5f-f008-4fe0-8f9a-2e61b17dc602] Process exited with code 0\n2025-07-16 17:07:56.724 [info] [command][3e2dcc5f-f008-4fe0-8f9a-2e61b17dc602] Socket close event received\n2025-07-16 17:07:56.725 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][01d57781-9fb1-4fc1-b135-8190b912cde3] socks connection closed\n2025-07-16 17:07:56.748 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54052 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:08:56.730 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:08:56.732 [info] [command][205ef38d-31c4-4a7f-aad2-b4acd0b19425] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""205ef38d-31c4-4a7f-aad2-b4acd0b19425""}\n2025-07-16 17:08:56.733 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][aa4ca2a0-91d4-4051-b65f-b98bf4fbb7e5] received connection request\n2025-07-16 17:08:56.734 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:08:56.759 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][aa4ca2a0-91d4-4051-b65f-b98bf4fbb7e5] socks forwarding established\n2025-07-16 17:08:56.786 [info] [command][205ef38d-31c4-4a7f-aad2-b4acd0b19425] Process exited with code 0\n2025-07-16 17:08:56.786 [info] [command][205ef38d-31c4-4a7f-aad2-b4acd0b19425] Socket close event received\n2025-07-16 17:08:56.787 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][aa4ca2a0-91d4-4051-b65f-b98bf4fbb7e5] socks connection closed\n2025-07-16 17:08:56.813 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54082 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:09:56.792 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:09:56.794 [info] [command][d6f50049-8488-463d-b573-6e6519f796bb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d6f50049-8488-463d-b573-6e6519f796bb""}\n2025-07-16 17:09:56.795 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0006f138-402b-4bb9-9c94-b1613ae3f3cb] received connection request\n2025-07-16 17:09:56.795 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:09:56.825 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0006f138-402b-4bb9-9c94-b1613ae3f3cb] socks forwarding established\n2025-07-16 17:09:56.853 [info] [command][d6f50049-8488-463d-b573-6e6519f796bb] Process exited with code 0\n2025-07-16 17:09:56.854 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0006f138-402b-4bb9-9c94-b1613ae3f3cb] socks connection closed\n2025-07-16 17:09:56.854 [info] [command][d6f50049-8488-463d-b573-6e6519f796bb] Socket close event received\n2025-07-16 17:09:56.879 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54122 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:10:56.856 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:10:56.858 [info] [command][6ffc8bc9-823a-4855-b1b2-f5b7ad1a3756] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6ffc8bc9-823a-4855-b1b2-f5b7ad1a3756""}\n2025-07-16 17:10:56.859 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][104f7de0-1d47-43c5-a768-cae6ee2cd60a] received connection request\n2025-07-16 17:10:56.859 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:10:56.883 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][104f7de0-1d47-43c5-a768-cae6ee2cd60a] socks forwarding established\n2025-07-16 17:10:57.025 [info] [command][6ffc8bc9-823a-4855-b1b2-f5b7ad1a3756] Process exited with code 0\n2025-07-16 17:10:57.025 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][104f7de0-1d47-43c5-a768-cae6ee2cd60a] socks connection closed\n2025-07-16 17:10:57.025 [info] [command][6ffc8bc9-823a-4855-b1b2-f5b7ad1a3756] Socket close event received\n2025-07-16 17:10:57.049 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54156 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:11:57.026 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:11:57.029 [info] [command][9cb54f11-6c09-4931-a5b8-8eb9e7a18602] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9cb54f11-6c09-4931-a5b8-8eb9e7a18602""}\n2025-07-16 17:11:57.029 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][eee72e79-d67b-4d6c-b5dd-ac5703274b4b] received connection request\n2025-07-16 17:11:57.030 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:11:57.083 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][eee72e79-d67b-4d6c-b5dd-ac5703274b4b] socks forwarding established\n2025-07-16 17:11:57.257 [info] [command][9cb54f11-6c09-4931-a5b8-8eb9e7a18602] Process exited with code 0\n2025-07-16 17:11:57.257 [info] [command][9cb54f11-6c09-4931-a5b8-8eb9e7a18602] Socket close event received\n2025-07-16 17:11:57.258 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][eee72e79-d67b-4d6c-b5dd-ac5703274b4b] socks connection closed\n2025-07-16 17:11:57.412 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54195 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:12:57.262 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:12:57.265 [info] [command][b3d32834-2942-4677-8962-43405446dab6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b3d32834-2942-4677-8962-43405446dab6""}\n2025-07-16 17:12:57.265 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5fc4c360-9bc9-4f6a-8425-92356f9f5ff1] received connection request\n2025-07-16 17:12:57.266 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:12:57.373 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5fc4c360-9bc9-4f6a-8425-92356f9f5ff1] socks forwarding established\n2025-07-16 17:12:57.402 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5fc4c360-9bc9-4f6a-8425-92356f9f5ff1] socks connection closed\n2025-07-16 17:12:57.402 [info] [command][b3d32834-2942-4677-8962-43405446dab6] Process exited with code 0\n2025-07-16 17:12:57.403 [info] [command][b3d32834-2942-4677-8962-43405446dab6] Socket close event received\n2025-07-16 17:12:57.426 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54245 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:13:57.407 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:13:57.409 [info] [command][d8625194-ca95-4578-b83f-cb33dd1ad7d0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d8625194-ca95-4578-b83f-cb33dd1ad7d0""}\n2025-07-16 17:13:57.410 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5f033554-fcb6-41d4-a2c7-c2e063a5e808] received connection request\n2025-07-16 17:13:57.410 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:13:57.437 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5f033554-fcb6-41d4-a2c7-c2e063a5e808] socks forwarding established\n2025-07-16 17:13:57.595 [info] [command][d8625194-ca95-4578-b83f-cb33dd1ad7d0] Process exited with code 0\n2025-07-16 17:13:57.596 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5f033554-fcb6-41d4-a2c7-c2e063a5e808] socks connection closed\n2025-07-16 17:13:57.596 [info] [command][d8625194-ca95-4578-b83f-cb33dd1ad7d0] Socket close event received\n2025-07-16 17:13:57.624 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54271 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:14:57.600 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:14:57.602 [info] [command][9e715260-a8a9-4d85-9bd4-e81109c4bb47] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9e715260-a8a9-4d85-9bd4-e81109c4bb47""}\n2025-07-16 17:14:57.603 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5df0a80c-e662-4552-899b-d47f9f291740] received connection request\n2025-07-16 17:14:57.603 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:14:57.633 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5df0a80c-e662-4552-899b-d47f9f291740] socks forwarding established\n2025-07-16 17:14:57.659 [info] [command][9e715260-a8a9-4d85-9bd4-e81109c4bb47] Process exited with code 0\n2025-07-16 17:14:57.659 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5df0a80c-e662-4552-899b-d47f9f291740] socks connection closed\n2025-07-16 17:14:57.659 [info] [command][9e715260-a8a9-4d85-9bd4-e81109c4bb47] Socket close event received\n2025-07-16 17:14:57.683 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54316 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:15:57.663 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:15:57.665 [info] [command][b15ab25a-1a9c-4d85-88cb-458fdc4330f0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b15ab25a-1a9c-4d85-88cb-458fdc4330f0""}\n2025-07-16 17:15:57.665 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][342a4ae0-9d5e-46d2-9155-92da7108cd65] received connection request\n2025-07-16 17:15:57.665 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:15:57.704 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][342a4ae0-9d5e-46d2-9155-92da7108cd65] socks forwarding established\n2025-07-16 17:15:57.733 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][342a4ae0-9d5e-46d2-9155-92da7108cd65] socks connection closed\n2025-07-16 17:15:57.733 [info] [command][b15ab25a-1a9c-4d85-88cb-458fdc4330f0] Process exited with code 0\n2025-07-16 17:15:57.733 [info] [command][b15ab25a-1a9c-4d85-88cb-458fdc4330f0] Socket close event received\n2025-07-16 17:15:57.891 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54340 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:16:57.735 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:16:57.737 [info] [command][d8c92783-8c9c-4205-a94b-0381a8680b63] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d8c92783-8c9c-4205-a94b-0381a8680b63""}\n2025-07-16 17:16:57.738 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][149c1045-8279-4820-b8ef-e7f3621afe7c] received connection request\n2025-07-16 17:16:57.738 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:16:57.765 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][149c1045-8279-4820-b8ef-e7f3621afe7c] socks forwarding established\n2025-07-16 17:16:57.923 [info] [command][d8c92783-8c9c-4205-a94b-0381a8680b63] Process exited with code 0\n2025-07-16 17:16:57.923 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][149c1045-8279-4820-b8ef-e7f3621afe7c] socks connection closed\n2025-07-16 17:16:57.923 [info] [command][d8c92783-8c9c-4205-a94b-0381a8680b63] Socket close event received\n2025-07-16 17:16:57.946 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54382 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:17:57.927 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:17:57.929 [info] [command][1c8c1615-5e3b-4471-99b8-c09952575c2f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1c8c1615-5e3b-4471-99b8-c09952575c2f""}\n2025-07-16 17:17:57.930 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6fea7d65-cdba-4430-970f-6711874471cf] received connection request\n2025-07-16 17:17:57.931 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:17:57.996 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6fea7d65-cdba-4430-970f-6711874471cf] socks forwarding established\n2025-07-16 17:17:58.156 [info] [command][1c8c1615-5e3b-4471-99b8-c09952575c2f] Process exited with code 0\n2025-07-16 17:17:58.156 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6fea7d65-cdba-4430-970f-6711874471cf] socks connection closed\n2025-07-16 17:17:58.157 [info] [command][1c8c1615-5e3b-4471-99b8-c09952575c2f] Socket close event received\n2025-07-16 17:17:58.181 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54428 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:18:58.161 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:18:58.164 [info] [command][82a5a51d-dbd9-4b1e-8986-503c502b080e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""82a5a51d-dbd9-4b1e-8986-503c502b080e""}\n2025-07-16 17:18:58.164 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f69447c5-fa44-4607-aa9b-7b3729444a70] received connection request\n2025-07-16 17:18:58.165 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:18:58.277 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f69447c5-fa44-4607-aa9b-7b3729444a70] socks forwarding established\n2025-07-16 17:18:58.304 [info] [command][82a5a51d-dbd9-4b1e-8986-503c502b080e] Process exited with code 0\n2025-07-16 17:18:58.305 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f69447c5-fa44-4607-aa9b-7b3729444a70] socks connection closed\n2025-07-16 17:18:58.305 [info] [command][82a5a51d-dbd9-4b1e-8986-503c502b080e] Socket close event received\n2025-07-16 17:18:58.331 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54451 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:19:58.308 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:19:58.310 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][fadc6253-dcbe-4504-a1cb-066545be415f] received connection request\n2025-07-16 17:19:58.310 [info] [command][924c97c4-791b-41a3-b5e4-15a2bbadad5f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""924c97c4-791b-41a3-b5e4-15a2bbadad5f""}\n2025-07-16 17:19:58.310 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:19:58.511 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fadc6253-dcbe-4504-a1cb-066545be415f] socks forwarding established\n2025-07-16 17:19:58.586 [info] [command][924c97c4-791b-41a3-b5e4-15a2bbadad5f] Process exited with code 0\n2025-07-16 17:19:58.587 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fadc6253-dcbe-4504-a1cb-066545be415f] socks connection closed\n2025-07-16 17:19:58.587 [info] [command][924c97c4-791b-41a3-b5e4-15a2bbadad5f] Socket close event received\n2025-07-16 17:19:58.703 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54488 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:20:58.590 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:20:58.591 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][402d6f92-d7ca-4901-aa14-18e687c02327] received connection request\n2025-07-16 17:20:58.592 [info] [command][94c09aac-1783-4c40-83b8-87f8e5681efe] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""94c09aac-1783-4c40-83b8-87f8e5681efe""}\n2025-07-16 17:20:58.593 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:20:58.656 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][402d6f92-d7ca-4901-aa14-18e687c02327] socks forwarding established\n2025-07-16 17:20:58.764 [info] [command][94c09aac-1783-4c40-83b8-87f8e5681efe] Process exited with code 0\n2025-07-16 17:20:58.764 [info] [command][94c09aac-1783-4c40-83b8-87f8e5681efe] Socket close event received\n2025-07-16 17:20:58.764 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][402d6f92-d7ca-4901-aa14-18e687c02327] socks connection closed\n2025-07-16 17:20:58.865 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54515 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:21:58.767 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:21:58.770 [info] [command][d5092d3b-1c57-449a-92d4-3ba5149b289e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d5092d3b-1c57-449a-92d4-3ba5149b289e""}\n2025-07-16 17:21:58.771 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b257cf9d-6d88-4f16-867f-dd6ab645ca35] received connection request\n2025-07-16 17:21:58.771 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:21:58.799 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b257cf9d-6d88-4f16-867f-dd6ab645ca35] socks forwarding established\n2025-07-16 17:21:58.829 [info] [command][d5092d3b-1c57-449a-92d4-3ba5149b289e] Process exited with code 0\n2025-07-16 17:21:58.830 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b257cf9d-6d88-4f16-867f-dd6ab645ca35] socks connection closed\n2025-07-16 17:21:58.830 [info] [command][d5092d3b-1c57-449a-92d4-3ba5149b289e] Socket close event received\n2025-07-16 17:21:58.854 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54563 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:22:58.834 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:22:58.835 [info] [command][3dcd7538-f615-4a56-8fd5-d58e5b5bb3f6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3dcd7538-f615-4a56-8fd5-d58e5b5bb3f6""}\n2025-07-16 17:22:58.836 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5e7f81a5-be24-4a76-b5c0-1bb451cf048f] received connection request\n2025-07-16 17:22:58.836 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:22:58.862 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5e7f81a5-be24-4a76-b5c0-1bb451cf048f] socks forwarding established\n2025-07-16 17:22:58.887 [info] [command][3dcd7538-f615-4a56-8fd5-d58e5b5bb3f6] Process exited with code 0\n2025-07-16 17:22:58.888 [info] [command][3dcd7538-f615-4a56-8fd5-d58e5b5bb3f6] Socket close event received\n2025-07-16 17:22:58.888 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5e7f81a5-be24-4a76-b5c0-1bb451cf048f] socks connection closed\n2025-07-16 17:22:58.925 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54598 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:23:58.893 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:23:58.895 [info] [command][e9106596-9b29-4aaa-9ed0-947b4635666c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e9106596-9b29-4aaa-9ed0-947b4635666c""}\n2025-07-16 17:23:58.896 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6667c852-4913-40d9-9693-d6c5e8eb277d] received connection request\n2025-07-16 17:23:58.897 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:23:58.921 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6667c852-4913-40d9-9693-d6c5e8eb277d] socks forwarding established\n2025-07-16 17:23:58.948 [info] [command][e9106596-9b29-4aaa-9ed0-947b4635666c] Process exited with code 0\n2025-07-16 17:23:58.948 [info] [command][e9106596-9b29-4aaa-9ed0-947b4635666c] Socket close event received\n2025-07-16 17:23:58.949 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6667c852-4913-40d9-9693-d6c5e8eb277d] socks connection closed\n2025-07-16 17:23:58.971 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54622 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:24:58.950 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:24:58.951 [info] [command][55780904-22b1-4273-8677-fa5a51102d11] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""55780904-22b1-4273-8677-fa5a51102d11""}\n2025-07-16 17:24:58.952 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e9f459ba-586f-4cd5-b851-5f37a259210a] received connection request\n2025-07-16 17:24:58.952 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:24:58.977 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e9f459ba-586f-4cd5-b851-5f37a259210a] socks forwarding established\n2025-07-16 17:24:59.006 [info] [command][55780904-22b1-4273-8677-fa5a51102d11] Process exited with code 0\n2025-07-16 17:24:59.006 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e9f459ba-586f-4cd5-b851-5f37a259210a] socks connection closed\n2025-07-16 17:24:59.006 [info] [command][55780904-22b1-4273-8677-fa5a51102d11] Socket close event received\n2025-07-16 17:24:59.030 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54673 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:25:59.011 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:25:59.012 [info] [command][5db6896b-947a-441a-bfa7-39e2e1001406] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5db6896b-947a-441a-bfa7-39e2e1001406""}\n2025-07-16 17:25:59.013 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][276419f8-8267-496a-9e51-5c0f1d205d25] received connection request\n2025-07-16 17:25:59.013 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:25:59.042 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][276419f8-8267-496a-9e51-5c0f1d205d25] socks forwarding established\n2025-07-16 17:25:59.069 [info] [command][5db6896b-947a-441a-bfa7-39e2e1001406] Process exited with code 0\n2025-07-16 17:25:59.069 [info] [command][5db6896b-947a-441a-bfa7-39e2e1001406] Socket close event received\n2025-07-16 17:25:59.083 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][276419f8-8267-496a-9e51-5c0f1d205d25] socks connection closed\n2025-07-16 17:25:59.095 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54735 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:26:59.072 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:26:59.073 [info] [command][ab45d2ac-6bb1-4e27-98e1-acaaa6ec3121] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ab45d2ac-6bb1-4e27-98e1-acaaa6ec3121""}\n2025-07-16 17:26:59.074 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5c677088-e116-4dcb-9eb5-df0f05197753] received connection request\n2025-07-16 17:26:59.074 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:26:59.099 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5c677088-e116-4dcb-9eb5-df0f05197753] socks forwarding established\n2025-07-16 17:26:59.125 [info] [command][ab45d2ac-6bb1-4e27-98e1-acaaa6ec3121] Process exited with code 0\n2025-07-16 17:26:59.125 [info] [command][ab45d2ac-6bb1-4e27-98e1-acaaa6ec3121] Socket close event received\n2025-07-16 17:26:59.125 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5c677088-e116-4dcb-9eb5-df0f05197753] socks connection closed\n2025-07-16 17:26:59.149 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54804 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:27:59.127 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:27:59.129 [info] [command][b289b0d3-76b4-43d7-8c07-55e68d923593] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b289b0d3-76b4-43d7-8c07-55e68d923593""}\n2025-07-16 17:27:59.129 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d4a128fb-2bd8-4248-bd19-fbf9b990a08e] received connection request\n2025-07-16 17:27:59.129 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:27:59.154 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d4a128fb-2bd8-4248-bd19-fbf9b990a08e] socks forwarding established\n2025-07-16 17:27:59.182 [info] [command][b289b0d3-76b4-43d7-8c07-55e68d923593] Process exited with code 0\n2025-07-16 17:27:59.182 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d4a128fb-2bd8-4248-bd19-fbf9b990a08e] socks connection closed\n2025-07-16 17:27:59.182 [info] [command][b289b0d3-76b4-43d7-8c07-55e68d923593] Socket close event received\n2025-07-16 17:27:59.207 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54857 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:28:59.188 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:28:59.189 [info] [command][eb869105-2ccb-4f2b-9738-6bf45f7d89d7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""eb869105-2ccb-4f2b-9738-6bf45f7d89d7""}\n2025-07-16 17:28:59.189 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6f00a359-f86a-4d28-9335-f6114c0daa74] received connection request\n2025-07-16 17:28:59.189 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:28:59.213 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6f00a359-f86a-4d28-9335-f6114c0daa74] socks forwarding established\n2025-07-16 17:28:59.238 [info] [command][eb869105-2ccb-4f2b-9738-6bf45f7d89d7] Process exited with code 0\n2025-07-16 17:28:59.239 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6f00a359-f86a-4d28-9335-f6114c0daa74] socks connection closed\n2025-07-16 17:28:59.239 [info] [command][eb869105-2ccb-4f2b-9738-6bf45f7d89d7] Socket close event received\n2025-07-16 17:28:59.263 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54898 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:29:59.241 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:29:59.243 [info] [command][8bed8e09-6765-48be-8712-bb1af006a694] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8bed8e09-6765-48be-8712-bb1af006a694""}\n2025-07-16 17:29:59.244 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][34b67351-226d-4220-a192-822e2e7440f3] received connection request\n2025-07-16 17:29:59.244 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:29:59.292 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][34b67351-226d-4220-a192-822e2e7440f3] socks forwarding established\n2025-07-16 17:29:59.354 [info] [command][8bed8e09-6765-48be-8712-bb1af006a694] Process exited with code 0\n2025-07-16 17:29:59.354 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][34b67351-226d-4220-a192-822e2e7440f3] socks connection closed\n2025-07-16 17:29:59.354 [info] [command][8bed8e09-6765-48be-8712-bb1af006a694] Socket close event received\n2025-07-16 17:29:59.430 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54941 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:30:59.356 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:30:59.358 [info] [command][52a9dc7b-a247-4749-adf2-36e1acf3797e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""52a9dc7b-a247-4749-adf2-36e1acf3797e""}\n2025-07-16 17:30:59.358 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][dd530a02-1b1f-401c-93e2-141aeefa4139] received connection request\n2025-07-16 17:30:59.359 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:30:59.382 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][dd530a02-1b1f-401c-93e2-141aeefa4139] socks forwarding established\n2025-07-16 17:30:59.412 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][dd530a02-1b1f-401c-93e2-141aeefa4139] socks connection closed\n2025-07-16 17:30:59.413 [info] [command][52a9dc7b-a247-4749-adf2-36e1acf3797e] Process exited with code 0\n2025-07-16 17:30:59.413 [info] [command][52a9dc7b-a247-4749-adf2-36e1acf3797e] Socket close event received\n2025-07-16 17:30:59.438 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 54979 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:31:59.416 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:31:59.418 [info] [command][c485cc6b-7841-40da-a510-9538e08b94bb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c485cc6b-7841-40da-a510-9538e08b94bb""}\n2025-07-16 17:31:59.419 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][69d9fb88-422f-43a9-a21a-1436d0453d9e] received connection request\n2025-07-16 17:31:59.419 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:31:59.443 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][69d9fb88-422f-43a9-a21a-1436d0453d9e] socks forwarding established\n2025-07-16 17:31:59.471 [info] [command][c485cc6b-7841-40da-a510-9538e08b94bb] Process exited with code 0\n2025-07-16 17:31:59.471 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][69d9fb88-422f-43a9-a21a-1436d0453d9e] socks connection closed\n2025-07-16 17:31:59.471 [info] [command][c485cc6b-7841-40da-a510-9538e08b94bb] Socket close event received\n2025-07-16 17:31:59.496 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55045 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:32:59.472 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:32:59.474 [info] [command][7dc65128-f43d-4fa2-984b-6ac252bf782e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7dc65128-f43d-4fa2-984b-6ac252bf782e""}\n2025-07-16 17:32:59.475 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][246a1781-f740-425f-b633-25eb312849fd] received connection request\n2025-07-16 17:32:59.475 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:32:59.500 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][246a1781-f740-425f-b633-25eb312849fd] socks forwarding established\n2025-07-16 17:32:59.527 [info] [command][7dc65128-f43d-4fa2-984b-6ac252bf782e] Process exited with code 0\n2025-07-16 17:32:59.528 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][246a1781-f740-425f-b633-25eb312849fd] socks connection closed\n2025-07-16 17:32:59.528 [info] [command][7dc65128-f43d-4fa2-984b-6ac252bf782e] Socket close event received\n2025-07-16 17:32:59.551 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55102 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:33:59.532 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:33:59.533 [info] [command][aa2e0796-cb42-4cd6-aea1-a044fea05ed8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""aa2e0796-cb42-4cd6-aea1-a044fea05ed8""}\n2025-07-16 17:33:59.534 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][94610996-2231-44ff-bd45-f0dd573066dc] received connection request\n2025-07-16 17:33:59.535 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:33:59.560 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][94610996-2231-44ff-bd45-f0dd573066dc] socks forwarding established\n2025-07-16 17:33:59.588 [info] [command][aa2e0796-cb42-4cd6-aea1-a044fea05ed8] Process exited with code 0\n2025-07-16 17:33:59.588 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][94610996-2231-44ff-bd45-f0dd573066dc] socks connection closed\n2025-07-16 17:33:59.588 [info] [command][aa2e0796-cb42-4cd6-aea1-a044fea05ed8] Socket close event received\n2025-07-16 17:33:59.612 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55130 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:34:59.593 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:34:59.595 [info] [command][2a68b897-86b8-4c06-8f1d-67fc34a662e9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2a68b897-86b8-4c06-8f1d-67fc34a662e9""}\n2025-07-16 17:34:59.596 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][40487a3b-a04e-4490-8d28-39b527c56294] received connection request\n2025-07-16 17:34:59.597 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:34:59.621 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][40487a3b-a04e-4490-8d28-39b527c56294] socks forwarding established\n2025-07-16 17:34:59.647 [info] [command][2a68b897-86b8-4c06-8f1d-67fc34a662e9] Process exited with code 0\n2025-07-16 17:34:59.647 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][40487a3b-a04e-4490-8d28-39b527c56294] socks connection closed\n2025-07-16 17:34:59.647 [info] [command][2a68b897-86b8-4c06-8f1d-67fc34a662e9] Socket close event received\n2025-07-16 17:34:59.671 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55173 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:35:59.648 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:35:59.651 [info] [command][f8664e7c-42b1-400d-ad16-a2c72f15625d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f8664e7c-42b1-400d-ad16-a2c72f15625d""}\n2025-07-16 17:35:59.652 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][cb7e3b72-a969-4fc2-82b1-d24aa9b48579] received connection request\n2025-07-16 17:35:59.652 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:35:59.680 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cb7e3b72-a969-4fc2-82b1-d24aa9b48579] socks forwarding established\n2025-07-16 17:35:59.716 [info] [command][f8664e7c-42b1-400d-ad16-a2c72f15625d] Process exited with code 0\n2025-07-16 17:35:59.716 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cb7e3b72-a969-4fc2-82b1-d24aa9b48579] socks connection closed\n2025-07-16 17:35:59.717 [info] [command][f8664e7c-42b1-400d-ad16-a2c72f15625d] Socket close event received\n2025-07-16 17:35:59.740 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55203 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:36:59.720 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:36:59.721 [info] [command][2fd07de6-d8b7-4a51-8bd1-357ce1351ad3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2fd07de6-d8b7-4a51-8bd1-357ce1351ad3""}\n2025-07-16 17:36:59.721 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][55a91ce7-b68b-4c52-9d30-f6499d0a67ea] received connection request\n2025-07-16 17:36:59.721 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:36:59.745 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][55a91ce7-b68b-4c52-9d30-f6499d0a67ea] socks forwarding established\n2025-07-16 17:36:59.794 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][55a91ce7-b68b-4c52-9d30-f6499d0a67ea] socks connection closed\n2025-07-16 17:36:59.794 [info] [command][2fd07de6-d8b7-4a51-8bd1-357ce1351ad3] Process exited with code 0\n2025-07-16 17:36:59.794 [info] [command][2fd07de6-d8b7-4a51-8bd1-357ce1351ad3] Socket close event received\n2025-07-16 17:36:59.817 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55258 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:37:59.799 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:37:59.802 [info] [command][8f89df9c-3aa9-4033-9877-e4f8083b2408] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8f89df9c-3aa9-4033-9877-e4f8083b2408""}\n2025-07-16 17:37:59.802 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1d0bc87e-434b-425f-a4d4-819cdbdfc6c4] received connection request\n2025-07-16 17:37:59.803 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:37:59.851 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1d0bc87e-434b-425f-a4d4-819cdbdfc6c4] socks forwarding established\n2025-07-16 17:37:59.877 [info] [command][8f89df9c-3aa9-4033-9877-e4f8083b2408] Process exited with code 0\n2025-07-16 17:37:59.878 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1d0bc87e-434b-425f-a4d4-819cdbdfc6c4] socks connection closed\n2025-07-16 17:37:59.878 [info] [command][8f89df9c-3aa9-4033-9877-e4f8083b2408] Socket close event received\n2025-07-16 17:37:59.901 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55302 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:38:59.880 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:38:59.883 [info] [command][7c3cfd4f-4366-47f0-999d-3e6ef22be9c3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7c3cfd4f-4366-47f0-999d-3e6ef22be9c3""}\n2025-07-16 17:38:59.883 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0bbefe09-bdab-4402-bfca-c6c45c2dfa76] received connection request\n2025-07-16 17:38:59.884 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:38:59.908 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0bbefe09-bdab-4402-bfca-c6c45c2dfa76] socks forwarding established\n2025-07-16 17:38:59.959 [info] [command][7c3cfd4f-4366-47f0-999d-3e6ef22be9c3] Process exited with code 0\n2025-07-16 17:38:59.959 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0bbefe09-bdab-4402-bfca-c6c45c2dfa76] socks connection closed\n2025-07-16 17:38:59.960 [info] [command][7c3cfd4f-4366-47f0-999d-3e6ef22be9c3] Socket close event received\n2025-07-16 17:38:59.984 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55340 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:39:59.965 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:39:59.967 [info] [command][ac2d4b05-77d4-4571-b576-8efee7efab29] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ac2d4b05-77d4-4571-b576-8efee7efab29""}\n2025-07-16 17:39:59.967 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][df1817bf-ebc9-4322-a8b9-094b04f89d94] received connection request\n2025-07-16 17:39:59.968 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:39:59.999 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][df1817bf-ebc9-4322-a8b9-094b04f89d94] socks forwarding established\n2025-07-16 17:40:00.039 [info] [command][ac2d4b05-77d4-4571-b576-8efee7efab29] Process exited with code 0\n2025-07-16 17:40:00.039 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][df1817bf-ebc9-4322-a8b9-094b04f89d94] socks connection closed\n2025-07-16 17:40:00.039 [info] [command][ac2d4b05-77d4-4571-b576-8efee7efab29] Socket close event received\n2025-07-16 17:40:00.062 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55401 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:41:00.045 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:41:00.046 [info] [command][391a5c25-5ef8-4874-867c-365d53f20cae] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""391a5c25-5ef8-4874-867c-365d53f20cae""}\n2025-07-16 17:41:00.046 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][238b6a0b-d2d9-4621-9bff-fd5df5d8a6a6] received connection request\n2025-07-16 17:41:00.047 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:41:00.071 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][238b6a0b-d2d9-4621-9bff-fd5df5d8a6a6] socks forwarding established\n2025-07-16 17:41:00.096 [info] [command][391a5c25-5ef8-4874-867c-365d53f20cae] Process exited with code 0\n2025-07-16 17:41:00.097 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][238b6a0b-d2d9-4621-9bff-fd5df5d8a6a6] socks connection closed\n2025-07-16 17:41:00.097 [info] [command][391a5c25-5ef8-4874-867c-365d53f20cae] Socket close event received\n2025-07-16 17:41:00.122 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55426 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:42:00.100 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:42:00.102 [info] [command][1c59febb-cd79-4f1f-9a93-be8d2276812e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1c59febb-cd79-4f1f-9a93-be8d2276812e""}\n2025-07-16 17:42:00.102 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a3b695be-c2d2-4410-8a0a-e1b0f7ae27bf] received connection request\n2025-07-16 17:42:00.103 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:42:00.129 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a3b695be-c2d2-4410-8a0a-e1b0f7ae27bf] socks forwarding established\n2025-07-16 17:42:00.175 [info] [command][1c59febb-cd79-4f1f-9a93-be8d2276812e] Process exited with code 0\n2025-07-16 17:42:00.176 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a3b695be-c2d2-4410-8a0a-e1b0f7ae27bf] socks connection closed\n2025-07-16 17:42:00.176 [info] [command][1c59febb-cd79-4f1f-9a93-be8d2276812e] Socket close event received\n2025-07-16 17:42:00.199 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55465 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:43:00.179 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:43:00.182 [info] [command][3c4cf87e-b9f2-45dc-a81a-3ae99f5f756c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3c4cf87e-b9f2-45dc-a81a-3ae99f5f756c""}\n2025-07-16 17:43:00.183 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][996a49b1-e514-44f9-b480-3f22f92c9cfa] received connection request\n2025-07-16 17:43:00.185 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:43:00.281 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][996a49b1-e514-44f9-b480-3f22f92c9cfa] socks forwarding established\n2025-07-16 17:43:00.310 [info] [command][3c4cf87e-b9f2-45dc-a81a-3ae99f5f756c] Process exited with code 0\n2025-07-16 17:43:00.311 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][996a49b1-e514-44f9-b480-3f22f92c9cfa] socks connection closed\n2025-07-16 17:43:00.312 [info] [command][3c4cf87e-b9f2-45dc-a81a-3ae99f5f756c] Socket close event received\n2025-07-16 17:43:00.337 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55505 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:44:00.316 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:44:00.317 [info] [command][88ae8cdc-b48d-4987-a121-e373898b8b67] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""88ae8cdc-b48d-4987-a121-e373898b8b67""}\n2025-07-16 17:44:00.317 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c23f0b3b-fd91-4bd1-95d5-f07071350f67] received connection request\n2025-07-16 17:44:00.317 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:44:00.367 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c23f0b3b-fd91-4bd1-95d5-f07071350f67] socks forwarding established\n2025-07-16 17:44:00.416 [info] [command][88ae8cdc-b48d-4987-a121-e373898b8b67] Process exited with code 0\n2025-07-16 17:44:00.416 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c23f0b3b-fd91-4bd1-95d5-f07071350f67] socks connection closed\n2025-07-16 17:44:00.416 [info] [command][88ae8cdc-b48d-4987-a121-e373898b8b67] Socket close event received\n2025-07-16 17:44:00.438 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55547 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:45:00.422 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:45:00.424 [info] [command][a4959a7d-e2b9-4621-b94c-8c8e46f5cb9b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a4959a7d-e2b9-4621-b94c-8c8e46f5cb9b""}\n2025-07-16 17:45:00.424 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3c2a9e61-592a-44fd-97e0-3755c1d41521] received connection request\n2025-07-16 17:45:00.425 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:45:00.467 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3c2a9e61-592a-44fd-97e0-3755c1d41521] socks forwarding established\n2025-07-16 17:45:00.494 [info] [command][a4959a7d-e2b9-4621-b94c-8c8e46f5cb9b] Process exited with code 0\n2025-07-16 17:45:00.494 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3c2a9e61-592a-44fd-97e0-3755c1d41521] socks connection closed\n2025-07-16 17:45:00.494 [info] [command][a4959a7d-e2b9-4621-b94c-8c8e46f5cb9b] Socket close event received\n2025-07-16 17:45:00.519 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55597 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:46:00.500 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:46:00.501 [info] [command][914c9265-d1d3-4342-93fe-c248957347d4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""914c9265-d1d3-4342-93fe-c248957347d4""}\n2025-07-16 17:46:00.502 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][96a70a04-f5d7-4e67-aabe-e3c561f1f246] received connection request\n2025-07-16 17:46:00.502 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:46:00.528 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][96a70a04-f5d7-4e67-aabe-e3c561f1f246] socks forwarding established\n2025-07-16 17:46:00.578 [info] [command][914c9265-d1d3-4342-93fe-c248957347d4] Process exited with code 0\n2025-07-16 17:46:00.578 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][96a70a04-f5d7-4e67-aabe-e3c561f1f246] socks connection closed\n2025-07-16 17:46:00.579 [info] [command][914c9265-d1d3-4342-93fe-c248957347d4] Socket close event received\n2025-07-16 17:46:00.604 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55628 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:47:00.584 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:47:00.586 [info] [command][c1e38575-3b6b-4ecd-a67f-f4a07aaa139b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c1e38575-3b6b-4ecd-a67f-f4a07aaa139b""}\n2025-07-16 17:47:00.587 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][cfff69df-c67f-4165-9e19-e930e05f174f] received connection request\n2025-07-16 17:47:00.588 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:47:00.613 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cfff69df-c67f-4165-9e19-e930e05f174f] socks forwarding established\n2025-07-16 17:47:00.640 [info] [command][c1e38575-3b6b-4ecd-a67f-f4a07aaa139b] Process exited with code 0\n2025-07-16 17:47:00.640 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cfff69df-c67f-4165-9e19-e930e05f174f] socks connection closed\n2025-07-16 17:47:00.641 [info] [command][c1e38575-3b6b-4ecd-a67f-f4a07aaa139b] Socket close event received\n2025-07-16 17:47:00.663 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55671 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:48:00.645 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:48:00.646 [info] [command][eb2cdff4-a037-448a-8324-29fcc499b3aa] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""eb2cdff4-a037-448a-8324-29fcc499b3aa""}\n2025-07-16 17:48:00.647 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][356b0dd8-3a74-442d-9952-41a83cd352f3] received connection request\n2025-07-16 17:48:00.648 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:48:00.672 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][356b0dd8-3a74-442d-9952-41a83cd352f3] socks forwarding established\n2025-07-16 17:48:00.698 [info] [command][eb2cdff4-a037-448a-8324-29fcc499b3aa] Process exited with code 0\n2025-07-16 17:48:00.698 [info] [command][eb2cdff4-a037-448a-8324-29fcc499b3aa] Socket close event received\n2025-07-16 17:48:00.699 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][356b0dd8-3a74-442d-9952-41a83cd352f3] socks connection closed\n2025-07-16 17:48:00.723 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55713 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:49:00.704 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:49:00.705 [info] [command][3bce143c-bb77-4f9e-a912-3ceefae06896] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3bce143c-bb77-4f9e-a912-3ceefae06896""}\n2025-07-16 17:49:00.705 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][2a17d9a5-ec88-49d4-9c19-5ff3c9ed25ac] received connection request\n2025-07-16 17:49:00.706 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 17:49:00.706 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:49:00.729 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2a17d9a5-ec88-49d4-9c19-5ff3c9ed25ac] socks forwarding established\n2025-07-16 17:49:00.757 [info] [command][3bce143c-bb77-4f9e-a912-3ceefae06896] Process exited with code 0\n2025-07-16 17:49:00.757 [info] [command][3bce143c-bb77-4f9e-a912-3ceefae06896] Socket close event received\n2025-07-16 17:49:00.780 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55739 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:49:00.780 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2a17d9a5-ec88-49d4-9c19-5ff3c9ed25ac] socks connection closed\n2025-07-16 17:50:00.758 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:50:00.759 [info] [command][6665650e-601e-41cc-9981-85a47fa23486] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6665650e-601e-41cc-9981-85a47fa23486""}\n2025-07-16 17:50:00.760 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c51bd02e-f652-4f26-bcd7-f878e59afa39] received connection request\n2025-07-16 17:50:00.760 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 17:50:00.760 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:50:00.785 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c51bd02e-f652-4f26-bcd7-f878e59afa39] socks forwarding established\n2025-07-16 17:50:00.833 [info] [command][6665650e-601e-41cc-9981-85a47fa23486] Process exited with code 0\n2025-07-16 17:50:00.833 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c51bd02e-f652-4f26-bcd7-f878e59afa39] socks connection closed\n2025-07-16 17:50:00.833 [info] [command][6665650e-601e-41cc-9981-85a47fa23486] Socket close event received\n2025-07-16 17:50:00.857 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55783 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:51:00.838 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:51:00.840 [info] [command][d379ceea-6702-4ade-8e98-8e9e7b5dff33] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d379ceea-6702-4ade-8e98-8e9e7b5dff33""}\n2025-07-16 17:51:00.841 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][7ccf8ced-dd0a-4e63-9be1-098856c793f1] received connection request\n2025-07-16 17:51:00.842 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:51:00.869 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7ccf8ced-dd0a-4e63-9be1-098856c793f1] socks forwarding established\n2025-07-16 17:51:00.918 [info] [command][d379ceea-6702-4ade-8e98-8e9e7b5dff33] Process exited with code 0\n2025-07-16 17:51:00.918 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][7ccf8ced-dd0a-4e63-9be1-098856c793f1] socks connection closed\n2025-07-16 17:51:00.919 [info] [command][d379ceea-6702-4ade-8e98-8e9e7b5dff33] Socket close event received\n2025-07-16 17:51:00.943 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55812 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:52:00.921 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:52:00.922 [info] [command][68b1e2f5-7b04-42f4-9434-9735c229f42e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""68b1e2f5-7b04-42f4-9434-9735c229f42e""}\n2025-07-16 17:52:00.922 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][9585a937-16cf-4b15-82b0-2808c68cdc2e] received connection request\n2025-07-16 17:52:00.923 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:52:00.947 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9585a937-16cf-4b15-82b0-2808c68cdc2e] socks forwarding established\n2025-07-16 17:52:00.995 [info] [command][68b1e2f5-7b04-42f4-9434-9735c229f42e] Process exited with code 0\n2025-07-16 17:52:00.995 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9585a937-16cf-4b15-82b0-2808c68cdc2e] socks connection closed\n2025-07-16 17:52:00.996 [info] [command][68b1e2f5-7b04-42f4-9434-9735c229f42e] Socket close event received\n2025-07-16 17:52:01.020 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55855 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:53:01.001 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:53:01.003 [info] [command][51716921-f06e-4fed-9894-0228a3e05928] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""51716921-f06e-4fed-9894-0228a3e05928""}\n2025-07-16 17:53:01.004 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][9b744b71-cbe7-4382-85e6-b522f4b5c1fc] received connection request\n2025-07-16 17:53:01.004 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:53:01.038 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9b744b71-cbe7-4382-85e6-b522f4b5c1fc] socks forwarding established\n2025-07-16 17:53:01.196 [info] [command][51716921-f06e-4fed-9894-0228a3e05928] Process exited with code 0\n2025-07-16 17:53:01.197 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9b744b71-cbe7-4382-85e6-b522f4b5c1fc] socks connection closed\n2025-07-16 17:53:01.197 [info] [command][51716921-f06e-4fed-9894-0228a3e05928] Socket close event received\n2025-07-16 17:53:01.220 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55920 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:54:01.211 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:54:01.213 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0bf4b653-be30-4670-a12a-3e5347975e7b] received connection request\n2025-07-16 17:54:01.213 [info] [command][5724c853-d459-4c42-a018-aa3d772c745a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5724c853-d459-4c42-a018-aa3d772c745a""}\n2025-07-16 17:54:01.213 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:54:01.251 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0bf4b653-be30-4670-a12a-3e5347975e7b] socks forwarding established\n2025-07-16 17:54:01.285 [info] [command][5724c853-d459-4c42-a018-aa3d772c745a] Process exited with code 0\n2025-07-16 17:54:01.285 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0bf4b653-be30-4670-a12a-3e5347975e7b] socks connection closed\n2025-07-16 17:54:01.285 [info] [command][5724c853-d459-4c42-a018-aa3d772c745a] Socket close event received\n2025-07-16 17:54:01.312 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55952 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:55:01.287 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:55:01.290 [info] [command][04db1326-a442-4b12-adfe-01c3dab3e9dc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""04db1326-a442-4b12-adfe-01c3dab3e9dc""}\n2025-07-16 17:55:01.290 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][769dffad-1b48-41f5-8230-477fcbb5376e] received connection request\n2025-07-16 17:55:01.291 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:55:01.434 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][769dffad-1b48-41f5-8230-477fcbb5376e] socks forwarding established\n2025-07-16 17:55:01.463 [info] [command][04db1326-a442-4b12-adfe-01c3dab3e9dc] Process exited with code 0\n2025-07-16 17:55:01.463 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][769dffad-1b48-41f5-8230-477fcbb5376e] socks connection closed\n2025-07-16 17:55:01.463 [info] [command][04db1326-a442-4b12-adfe-01c3dab3e9dc] Socket close event received\n2025-07-16 17:55:01.597 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 55998 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:56:01.469 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:56:01.472 [info] [command][ad5e654e-9269-4bc2-88dc-9fb84d586e4c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ad5e654e-9269-4bc2-88dc-9fb84d586e4c""}\n2025-07-16 17:56:01.473 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][adc9a3d6-2384-4c29-9a93-66e53e8ab111] received connection request\n2025-07-16 17:56:01.473 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:56:01.499 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][adc9a3d6-2384-4c29-9a93-66e53e8ab111] socks forwarding established\n2025-07-16 17:56:01.659 [info] [command][ad5e654e-9269-4bc2-88dc-9fb84d586e4c] Process exited with code 0\n2025-07-16 17:56:01.659 [info] [command][ad5e654e-9269-4bc2-88dc-9fb84d586e4c] Socket close event received\n2025-07-16 17:56:01.660 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][adc9a3d6-2384-4c29-9a93-66e53e8ab111] socks connection closed\n2025-07-16 17:56:01.814 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56030 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:57:01.665 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:57:01.666 [info] [command][a345a99a-67de-4bbd-8df3-b152b847d792] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a345a99a-67de-4bbd-8df3-b152b847d792""}\n2025-07-16 17:57:01.667 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][899bf684-3c96-456c-b832-e3d0f3717bff] received connection request\n2025-07-16 17:57:01.668 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:57:01.709 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][899bf684-3c96-456c-b832-e3d0f3717bff] socks forwarding established\n2025-07-16 17:57:01.870 [info] [command][a345a99a-67de-4bbd-8df3-b152b847d792] Process exited with code 0\n2025-07-16 17:57:01.870 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][899bf684-3c96-456c-b832-e3d0f3717bff] socks connection closed\n2025-07-16 17:57:01.871 [info] [command][a345a99a-67de-4bbd-8df3-b152b847d792] Socket close event received\n2025-07-16 17:57:01.900 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56068 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:58:01.876 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:58:01.877 [info] [command][a22f99bd-3061-44fe-9b59-4c5ab3526c82] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a22f99bd-3061-44fe-9b59-4c5ab3526c82""}\n2025-07-16 17:58:01.877 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][31c3aba8-97f1-452c-8492-a08ef226066c] received connection request\n2025-07-16 17:58:01.878 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 17:58:01.878 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:58:01.905 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][31c3aba8-97f1-452c-8492-a08ef226066c] socks forwarding established\n2025-07-16 17:58:02.025 [info] [command][a22f99bd-3061-44fe-9b59-4c5ab3526c82] Process exited with code 0\n2025-07-16 17:58:02.026 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][31c3aba8-97f1-452c-8492-a08ef226066c] socks connection closed\n2025-07-16 17:58:02.026 [info] [command][a22f99bd-3061-44fe-9b59-4c5ab3526c82] Socket close event received\n2025-07-16 17:58:02.051 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56116 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 17:59:02.031 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 17:59:02.033 [info] [command][d7d50712-ecdb-47db-b861-827812ca23fb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d7d50712-ecdb-47db-b861-827812ca23fb""}\n2025-07-16 17:59:02.034 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d6591a2d-51af-4dfb-9366-538b877e5cbf] received connection request\n2025-07-16 17:59:02.036 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 17:59:02.061 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d6591a2d-51af-4dfb-9366-538b877e5cbf] socks forwarding established\n2025-07-16 17:59:02.087 [info] [command][d7d50712-ecdb-47db-b861-827812ca23fb] Process exited with code 0\n2025-07-16 17:59:02.088 [info] [command][d7d50712-ecdb-47db-b861-827812ca23fb] Socket close event received\n2025-07-16 17:59:02.088 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d6591a2d-51af-4dfb-9366-538b877e5cbf] socks connection closed\n2025-07-16 17:59:02.112 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56148 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:00:02.092 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:00:02.094 [info] [command][4953be61-156c-419b-a949-d0736a258fb0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4953be61-156c-419b-a949-d0736a258fb0""}\n2025-07-16 18:00:02.095 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f9d88927-8c64-4512-9f6d-1c7f4aabc2fe] received connection request\n2025-07-16 18:00:02.096 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:00:02.143 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f9d88927-8c64-4512-9f6d-1c7f4aabc2fe] socks forwarding established\n2025-07-16 18:00:02.303 [info] [command][4953be61-156c-419b-a949-d0736a258fb0] Process exited with code 0\n2025-07-16 18:00:02.304 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f9d88927-8c64-4512-9f6d-1c7f4aabc2fe] socks connection closed\n2025-07-16 18:00:02.304 [info] [command][4953be61-156c-419b-a949-d0736a258fb0] Socket close event received\n2025-07-16 18:00:02.326 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56198 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:01:02.309 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:01:02.311 [info] [command][ec77c1da-7a15-4a2d-bd26-cd70c2a0a754] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ec77c1da-7a15-4a2d-bd26-cd70c2a0a754""}\n2025-07-16 18:01:02.312 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][037381ab-d408-42b2-9eef-7c2f452ba361] received connection request\n2025-07-16 18:01:02.312 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:01:02.337 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][037381ab-d408-42b2-9eef-7c2f452ba361] socks forwarding established\n2025-07-16 18:01:02.366 [info] [command][ec77c1da-7a15-4a2d-bd26-cd70c2a0a754] Process exited with code 0\n2025-07-16 18:01:02.366 [info] [command][ec77c1da-7a15-4a2d-bd26-cd70c2a0a754] Socket close event received\n2025-07-16 18:01:02.366 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][037381ab-d408-42b2-9eef-7c2f452ba361] socks connection closed\n2025-07-16 18:01:02.389 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56254 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:02:02.369 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:02:02.371 [info] [command][605b6d04-bb87-4c4d-a3d6-9e3843234c47] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""605b6d04-bb87-4c4d-a3d6-9e3843234c47""}\n2025-07-16 18:02:02.371 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][86f6c321-d1a8-4b35-a894-4aa6c6a057d7] received connection request\n2025-07-16 18:02:02.371 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 18:02:02.371 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:02:02.397 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][86f6c321-d1a8-4b35-a894-4aa6c6a057d7] socks forwarding established\n2025-07-16 18:02:02.423 [info] [command][605b6d04-bb87-4c4d-a3d6-9e3843234c47] Process exited with code 0\n2025-07-16 18:02:02.423 [info] [command][605b6d04-bb87-4c4d-a3d6-9e3843234c47] Socket close event received\n2025-07-16 18:02:02.423 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][86f6c321-d1a8-4b35-a894-4aa6c6a057d7] socks connection closed\n2025-07-16 18:02:02.446 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56333 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:03:02.424 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:03:02.427 [info] [command][2f305572-87eb-4f80-a4f4-74b3d89114a7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2f305572-87eb-4f80-a4f4-74b3d89114a7""}\n2025-07-16 18:03:02.427 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][295792a8-71d5-4949-ae48-19b796d35d25] received connection request\n2025-07-16 18:03:02.428 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:03:02.451 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][295792a8-71d5-4949-ae48-19b796d35d25] socks forwarding established\n2025-07-16 18:03:02.588 [info] [command][2f305572-87eb-4f80-a4f4-74b3d89114a7] Process exited with code 0\n2025-07-16 18:03:02.589 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][295792a8-71d5-4949-ae48-19b796d35d25] socks connection closed\n2025-07-16 18:03:02.589 [info] [command][2f305572-87eb-4f80-a4f4-74b3d89114a7] Socket close event received\n2025-07-16 18:03:02.615 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56397 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:04:02.589 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:04:02.591 [info] [command][af46dfb0-4292-4c2f-bbff-d38233abe64b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""af46dfb0-4292-4c2f-bbff-d38233abe64b""}\n2025-07-16 18:04:02.592 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][04c0ef83-4b72-4dac-b5b4-9c3e8ab8aed4] received connection request\n2025-07-16 18:04:02.593 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:04:02.618 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][04c0ef83-4b72-4dac-b5b4-9c3e8ab8aed4] socks forwarding established\n2025-07-16 18:04:02.645 [info] [command][af46dfb0-4292-4c2f-bbff-d38233abe64b] Process exited with code 0\n2025-07-16 18:04:02.646 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][04c0ef83-4b72-4dac-b5b4-9c3e8ab8aed4] socks connection closed\n2025-07-16 18:04:02.646 [info] [command][af46dfb0-4292-4c2f-bbff-d38233abe64b] Socket close event received\n2025-07-16 18:04:02.668 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56427 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:05:02.648 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:05:02.650 [info] [command][8d170b7d-92f8-4c90-a978-d32c486d5852] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8d170b7d-92f8-4c90-a978-d32c486d5852""}\n2025-07-16 18:05:02.650 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e3b54e49-5a8d-4b76-a122-0cf1debc8d0e] received connection request\n2025-07-16 18:05:02.651 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:05:02.677 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e3b54e49-5a8d-4b76-a122-0cf1debc8d0e] socks forwarding established\n2025-07-16 18:05:02.707 [info] [command][8d170b7d-92f8-4c90-a978-d32c486d5852] Process exited with code 0\n2025-07-16 18:05:02.707 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e3b54e49-5a8d-4b76-a122-0cf1debc8d0e] socks connection closed\n2025-07-16 18:05:02.707 [info] [command][8d170b7d-92f8-4c90-a978-d32c486d5852] Socket close event received\n2025-07-16 18:05:02.732 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56474 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:06:02.711 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:06:02.713 [info] [command][0218de3e-ca49-4a48-bdad-605cf766876d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""0218de3e-ca49-4a48-bdad-605cf766876d""}\n2025-07-16 18:06:02.713 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a5db0a32-3d6f-4f90-873f-a0ae022dd8e2] received connection request\n2025-07-16 18:06:02.714 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:06:02.742 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a5db0a32-3d6f-4f90-873f-a0ae022dd8e2] socks forwarding established\n2025-07-16 18:06:02.775 [info] [command][0218de3e-ca49-4a48-bdad-605cf766876d] Process exited with code 0\n2025-07-16 18:06:02.775 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a5db0a32-3d6f-4f90-873f-a0ae022dd8e2] socks connection closed\n2025-07-16 18:06:02.775 [info] [command][0218de3e-ca49-4a48-bdad-605cf766876d] Socket close event received\n2025-07-16 18:06:02.801 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56501 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:07:02.780 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:07:02.783 [info] [command][7bf7b272-84d3-4008-996d-5f02df4e680e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7bf7b272-84d3-4008-996d-5f02df4e680e""}\n2025-07-16 18:07:02.784 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][777c1c8b-6d06-47fe-9dc5-e2e9b186e819] received connection request\n2025-07-16 18:07:02.784 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:07:02.813 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][777c1c8b-6d06-47fe-9dc5-e2e9b186e819] socks forwarding established\n2025-07-16 18:07:02.840 [info] [command][7bf7b272-84d3-4008-996d-5f02df4e680e] Process exited with code 0\n2025-07-16 18:07:02.841 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][777c1c8b-6d06-47fe-9dc5-e2e9b186e819] socks connection closed\n2025-07-16 18:07:02.841 [info] [command][7bf7b272-84d3-4008-996d-5f02df4e680e] Socket close event received\n2025-07-16 18:07:02.863 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56543 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:08:02.845 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:08:02.845 [info] [command][a9f87c3d-1baf-4ea3-9108-acc316f11c06] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a9f87c3d-1baf-4ea3-9108-acc316f11c06""}\n2025-07-16 18:08:02.846 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8c95ea1c-55bf-47b0-81dc-5f85041e3739] received connection request\n2025-07-16 18:08:02.846 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:08:02.871 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8c95ea1c-55bf-47b0-81dc-5f85041e3739] socks forwarding established\n2025-07-16 18:08:02.900 [info] [command][a9f87c3d-1baf-4ea3-9108-acc316f11c06] Process exited with code 0\n2025-07-16 18:08:02.901 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8c95ea1c-55bf-47b0-81dc-5f85041e3739] socks connection closed\n2025-07-16 18:08:02.901 [info] [command][a9f87c3d-1baf-4ea3-9108-acc316f11c06] Socket close event received\n2025-07-16 18:08:02.925 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56584 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:09:02.902 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:09:02.904 [info] [command][10e016bd-e5ee-4d3a-9e97-4068bbd95c80] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""10e016bd-e5ee-4d3a-9e97-4068bbd95c80""}\n2025-07-16 18:09:02.905 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][37913b0b-97c7-4c85-90fa-5b6f47bc2d3d] received connection request\n2025-07-16 18:09:02.906 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:09:05.603 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][37913b0b-97c7-4c85-90fa-5b6f47bc2d3d] socks forwarding established\n2025-07-16 18:09:05.662 [info] [command][10e016bd-e5ee-4d3a-9e97-4068bbd95c80] Process exited with code 0\n2025-07-16 18:09:05.663 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][37913b0b-97c7-4c85-90fa-5b6f47bc2d3d] socks connection closed\n2025-07-16 18:09:05.663 [info] [command][10e016bd-e5ee-4d3a-9e97-4068bbd95c80] Socket close event received\n2025-07-16 18:09:05.688 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56619 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:10:05.668 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:10:05.669 [info] [command][c88ecf13-2f4c-49db-a22d-daba723d35b3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c88ecf13-2f4c-49db-a22d-daba723d35b3""}\n2025-07-16 18:10:05.669 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][96511d53-3059-498a-b8a2-26881f2cc06e] received connection request\n2025-07-16 18:10:05.669 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:10:06.075 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][96511d53-3059-498a-b8a2-26881f2cc06e] socks forwarding established\n2025-07-16 18:10:06.129 [info] [command][c88ecf13-2f4c-49db-a22d-daba723d35b3] Process exited with code 0\n2025-07-16 18:10:06.129 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][96511d53-3059-498a-b8a2-26881f2cc06e] socks connection closed\n2025-07-16 18:10:06.129 [info] [command][c88ecf13-2f4c-49db-a22d-daba723d35b3] Socket close event received\n2025-07-16 18:10:06.156 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56675 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:11:06.132 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:11:06.134 [info] [command][1b28b273-31d5-4b95-a05e-1da61ef63ff7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""1b28b273-31d5-4b95-a05e-1da61ef63ff7""}\n2025-07-16 18:11:06.135 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d8e58008-f676-472e-80f1-eeb7f9b4dfa2] received connection request\n2025-07-16 18:11:06.135 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:11:06.161 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d8e58008-f676-472e-80f1-eeb7f9b4dfa2] socks forwarding established\n2025-07-16 18:11:06.189 [info] [command][1b28b273-31d5-4b95-a05e-1da61ef63ff7] Process exited with code 0\n2025-07-16 18:11:06.189 [info] [command][1b28b273-31d5-4b95-a05e-1da61ef63ff7] Socket close event received\n2025-07-16 18:11:06.212 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d8e58008-f676-472e-80f1-eeb7f9b4dfa2] socks connection closed\n2025-07-16 18:11:06.213 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56699 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:12:06.194 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:12:06.196 [info] [command][51abe081-4f28-4019-8729-0ff7b3764735] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""51abe081-4f28-4019-8729-0ff7b3764735""}\n2025-07-16 18:12:06.196 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c8c303d6-2dca-4cdf-9e7c-dfd9465d2255] received connection request\n2025-07-16 18:12:06.197 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:12:06.222 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c8c303d6-2dca-4cdf-9e7c-dfd9465d2255] socks forwarding established\n2025-07-16 18:12:06.251 [info] [command][51abe081-4f28-4019-8729-0ff7b3764735] Process exited with code 0\n2025-07-16 18:12:06.251 [info] [command][51abe081-4f28-4019-8729-0ff7b3764735] Socket close event received\n2025-07-16 18:12:06.273 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c8c303d6-2dca-4cdf-9e7c-dfd9465d2255] socks connection closed\n2025-07-16 18:12:06.277 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56749 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:13:06.256 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:13:06.259 [info] [command][51db9fd9-0d06-4e02-a4c7-ee128dfca8c4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""51db9fd9-0d06-4e02-a4c7-ee128dfca8c4""}\n2025-07-16 18:13:06.260 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a376e225-46fe-4b83-8fbd-f873c16d518e] received connection request\n2025-07-16 18:13:06.260 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:13:06.290 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a376e225-46fe-4b83-8fbd-f873c16d518e] socks forwarding established\n2025-07-16 18:13:06.324 [info] [command][51db9fd9-0d06-4e02-a4c7-ee128dfca8c4] Process exited with code 0\n2025-07-16 18:13:06.325 [info] [command][51db9fd9-0d06-4e02-a4c7-ee128dfca8c4] Socket close event received\n2025-07-16 18:13:06.348 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a376e225-46fe-4b83-8fbd-f873c16d518e] socks connection closed\n2025-07-16 18:13:06.349 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56786 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:14:06.329 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:14:06.331 [info] [command][5ceb1b83-52b9-438f-baee-be4a39cd36b9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5ceb1b83-52b9-438f-baee-be4a39cd36b9""}\n2025-07-16 18:14:06.331 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][178ccfc3-dc89-407a-8c04-06faaf08f2cd] received connection request\n2025-07-16 18:14:06.331 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:14:06.355 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][178ccfc3-dc89-407a-8c04-06faaf08f2cd] socks forwarding established\n2025-07-16 18:14:06.383 [info] [command][5ceb1b83-52b9-438f-baee-be4a39cd36b9] Process exited with code 0\n2025-07-16 18:14:06.383 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][178ccfc3-dc89-407a-8c04-06faaf08f2cd] socks connection closed\n2025-07-16 18:14:06.383 [info] [command][5ceb1b83-52b9-438f-baee-be4a39cd36b9] Socket close event received\n2025-07-16 18:14:06.408 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56811 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:15:06.389 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:15:06.391 [info] [command][9fa8431a-bd46-4c61-9dbe-ed5bdbc204fa] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9fa8431a-bd46-4c61-9dbe-ed5bdbc204fa""}\n2025-07-16 18:15:06.391 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][bfba70d3-6858-4d52-b031-70bada922d51] received connection request\n2025-07-16 18:15:06.391 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:15:06.416 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][bfba70d3-6858-4d52-b031-70bada922d51] socks forwarding established\n2025-07-16 18:15:06.441 [info] [command][9fa8431a-bd46-4c61-9dbe-ed5bdbc204fa] Process exited with code 0\n2025-07-16 18:15:06.441 [info] [command][9fa8431a-bd46-4c61-9dbe-ed5bdbc204fa] Socket close event received\n2025-07-16 18:15:06.463 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][bfba70d3-6858-4d52-b031-70bada922d51] socks connection closed\n2025-07-16 18:15:06.465 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56868 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:16:06.445 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:16:06.448 [info] [command][920f779d-4dd8-4039-aff2-6c699a2014c4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""920f779d-4dd8-4039-aff2-6c699a2014c4""}\n2025-07-16 18:16:06.449 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][47a07665-74a5-4248-81a6-f20d1d20f147] received connection request\n2025-07-16 18:16:06.449 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:16:06.473 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][47a07665-74a5-4248-81a6-f20d1d20f147] socks forwarding established\n2025-07-16 18:16:06.502 [info] [command][920f779d-4dd8-4039-aff2-6c699a2014c4] Process exited with code 0\n2025-07-16 18:16:06.502 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][47a07665-74a5-4248-81a6-f20d1d20f147] socks connection closed\n2025-07-16 18:16:06.502 [info] [command][920f779d-4dd8-4039-aff2-6c699a2014c4] Socket close event received\n2025-07-16 18:16:06.526 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56898 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:17:06.508 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:17:06.510 [info] [command][4f510301-59f8-40dc-9465-5e4dd8eff8b2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4f510301-59f8-40dc-9465-5e4dd8eff8b2""}\n2025-07-16 18:17:06.511 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][820f34f1-8f9d-4b79-a058-10117f7f5261] received connection request\n2025-07-16 18:17:06.511 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:17:06.538 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][820f34f1-8f9d-4b79-a058-10117f7f5261] socks forwarding established\n2025-07-16 18:17:06.566 [info] [command][4f510301-59f8-40dc-9465-5e4dd8eff8b2] Process exited with code 0\n2025-07-16 18:17:06.566 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][820f34f1-8f9d-4b79-a058-10117f7f5261] socks connection closed\n2025-07-16 18:17:06.567 [info] [command][4f510301-59f8-40dc-9465-5e4dd8eff8b2] Socket close event received\n2025-07-16 18:17:06.590 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 56952 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:18:06.571 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:18:06.572 [info] [command][18383533-4f93-47af-b2c9-897dc49e1855] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""18383533-4f93-47af-b2c9-897dc49e1855""}\n2025-07-16 18:18:06.572 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1c2421cd-1824-4bc6-bae4-5569eb6cec41] received connection request\n2025-07-16 18:18:06.572 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:18:06.595 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1c2421cd-1824-4bc6-bae4-5569eb6cec41] socks forwarding established\n2025-07-16 18:18:06.620 [info] [command][18383533-4f93-47af-b2c9-897dc49e1855] Process exited with code 0\n2025-07-16 18:18:06.620 [info] [command][18383533-4f93-47af-b2c9-897dc49e1855] Socket close event received\n2025-07-16 18:18:06.620 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1c2421cd-1824-4bc6-bae4-5569eb6cec41] socks connection closed\n2025-07-16 18:18:06.644 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57003 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:19:06.625 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:19:06.627 [info] [command][880a641b-c54c-4b0a-8973-c71dae81813b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""880a641b-c54c-4b0a-8973-c71dae81813b""}\n2025-07-16 18:19:06.628 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4baeda98-7457-44f8-a5e7-fe7119ca09eb] received connection request\n2025-07-16 18:19:06.628 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:19:06.653 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4baeda98-7457-44f8-a5e7-fe7119ca09eb] socks forwarding established\n2025-07-16 18:19:06.679 [info] [command][880a641b-c54c-4b0a-8973-c71dae81813b] Process exited with code 0\n2025-07-16 18:19:06.680 [info] [command][880a641b-c54c-4b0a-8973-c71dae81813b] Socket close event received\n2025-07-16 18:19:06.680 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4baeda98-7457-44f8-a5e7-fe7119ca09eb] socks connection closed\n2025-07-16 18:19:06.703 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57049 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:20:06.685 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:20:06.688 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8acb2c78-9e46-4744-b0a9-252e66f07081] received connection request\n2025-07-16 18:20:06.688 [info] [command][2e6daa8b-4c9b-4abf-a15f-35d1d900bfeb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2e6daa8b-4c9b-4abf-a15f-35d1d900bfeb""}\n2025-07-16 18:20:06.689 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:20:06.715 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8acb2c78-9e46-4744-b0a9-252e66f07081] socks forwarding established\n2025-07-16 18:20:06.742 [info] [command][2e6daa8b-4c9b-4abf-a15f-35d1d900bfeb] Process exited with code 0\n2025-07-16 18:20:06.742 [info] [command][2e6daa8b-4c9b-4abf-a15f-35d1d900bfeb] Socket close event received\n2025-07-16 18:20:06.743 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8acb2c78-9e46-4744-b0a9-252e66f07081] socks connection closed\n2025-07-16 18:20:06.767 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57114 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:21:06.748 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:21:06.751 [info] [command][9b6a5090-11fb-43fa-b0f4-398a748b6a81] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9b6a5090-11fb-43fa-b0f4-398a748b6a81""}\n2025-07-16 18:21:06.751 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e4c9632f-0539-4ebe-bfc7-692b07a7e95f] received connection request\n2025-07-16 18:21:06.752 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:21:06.898 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e4c9632f-0539-4ebe-bfc7-692b07a7e95f] socks forwarding established\n2025-07-16 18:21:07.005 [info] [command][9b6a5090-11fb-43fa-b0f4-398a748b6a81] Process exited with code 0\n2025-07-16 18:21:07.005 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e4c9632f-0539-4ebe-bfc7-692b07a7e95f] socks connection closed\n2025-07-16 18:21:07.006 [info] [command][9b6a5090-11fb-43fa-b0f4-398a748b6a81] Socket close event received\n2025-07-16 18:21:07.028 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57145 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:22:07.009 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:22:07.010 [info] [command][3aff3c68-b592-4a89-9cb0-b737d7e16e53] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3aff3c68-b592-4a89-9cb0-b737d7e16e53""}\n2025-07-16 18:22:07.011 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0f0097b6-2e2f-4b1f-8b68-78d5e1f0507c] received connection request\n2025-07-16 18:22:07.011 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:22:07.164 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0f0097b6-2e2f-4b1f-8b68-78d5e1f0507c] socks forwarding established\n2025-07-16 18:22:07.190 [info] [command][3aff3c68-b592-4a89-9cb0-b737d7e16e53] Process exited with code 0\n2025-07-16 18:22:07.190 [info] [command][3aff3c68-b592-4a89-9cb0-b737d7e16e53] Socket close event received\n2025-07-16 18:22:07.191 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0f0097b6-2e2f-4b1f-8b68-78d5e1f0507c] socks connection closed\n2025-07-16 18:22:07.343 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57191 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:23:07.194 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:23:07.196 [info] [command][2a595a24-f984-4d54-8849-3887766893a6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2a595a24-f984-4d54-8849-3887766893a6""}\n2025-07-16 18:23:07.197 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][b2d1477c-b2de-4953-8ee2-2448a1b60022] received connection request\n2025-07-16 18:23:07.197 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:23:07.319 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b2d1477c-b2de-4953-8ee2-2448a1b60022] socks forwarding established\n2025-07-16 18:23:07.355 [info] [command][2a595a24-f984-4d54-8849-3887766893a6] Process exited with code 0\n2025-07-16 18:23:07.355 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][b2d1477c-b2de-4953-8ee2-2448a1b60022] socks connection closed\n2025-07-16 18:23:07.355 [info] [command][2a595a24-f984-4d54-8849-3887766893a6] Socket close event received\n2025-07-16 18:23:07.379 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57226 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:24:07.358 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:24:07.360 [info] [command][b7001eef-f2a4-4460-a602-923fb33d4e04] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b7001eef-f2a4-4460-a602-923fb33d4e04""}\n2025-07-16 18:24:07.361 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4f1d2d55-1817-4d23-92a6-dafb5bd8a7c3] received connection request\n2025-07-16 18:24:07.362 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:24:07.414 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4f1d2d55-1817-4d23-92a6-dafb5bd8a7c3] socks forwarding established\n2025-07-16 18:24:07.502 [info] [command][b7001eef-f2a4-4460-a602-923fb33d4e04] Process exited with code 0\n2025-07-16 18:24:07.502 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4f1d2d55-1817-4d23-92a6-dafb5bd8a7c3] socks connection closed\n2025-07-16 18:24:07.502 [info] [command][b7001eef-f2a4-4460-a602-923fb33d4e04] Socket close event received\n2025-07-16 18:24:07.526 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57263 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:25:07.507 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:25:07.508 [info] [command][fa6f01d5-35bc-4368-a212-168d03690bae] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""fa6f01d5-35bc-4368-a212-168d03690bae""}\n2025-07-16 18:25:07.509 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e8f8904e-2fc6-4803-921f-a7d247531b34] received connection request\n2025-07-16 18:25:07.509 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:25:07.668 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e8f8904e-2fc6-4803-921f-a7d247531b34] socks forwarding established\n2025-07-16 18:25:07.695 [info] [command][fa6f01d5-35bc-4368-a212-168d03690bae] Process exited with code 0\n2025-07-16 18:25:07.695 [info] [command][fa6f01d5-35bc-4368-a212-168d03690bae] Socket close event received\n2025-07-16 18:25:07.845 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e8f8904e-2fc6-4803-921f-a7d247531b34] socks connection closed\n2025-07-16 18:25:07.845 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57311 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:26:07.696 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:26:07.698 [info] [command][ca5b8080-ba2f-42fa-a00c-5b74b1ab0f3d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ca5b8080-ba2f-42fa-a00c-5b74b1ab0f3d""}\n2025-07-16 18:26:07.699 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][51fa499d-d6d3-4f63-a373-bb7199aa1a52] received connection request\n2025-07-16 18:26:07.700 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:26:07.831 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][51fa499d-d6d3-4f63-a373-bb7199aa1a52] socks forwarding established\n2025-07-16 18:26:07.991 [info] [command][ca5b8080-ba2f-42fa-a00c-5b74b1ab0f3d] Process exited with code 0\n2025-07-16 18:26:07.991 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][51fa499d-d6d3-4f63-a373-bb7199aa1a52] socks connection closed\n2025-07-16 18:26:07.992 [info] [command][ca5b8080-ba2f-42fa-a00c-5b74b1ab0f3d] Socket close event received\n2025-07-16 18:26:08.017 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57351 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:27:07.997 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:27:07.999 [info] [command][f7515a7c-73bf-4d3b-9559-e6e78abecd89] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f7515a7c-73bf-4d3b-9559-e6e78abecd89""}\n2025-07-16 18:27:07.999 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][223992d3-5d18-4bb7-b08b-a351a8a8199f] received connection request\n2025-07-16 18:27:07.999 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 18:27:07.999 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:27:08.024 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][223992d3-5d18-4bb7-b08b-a351a8a8199f] socks forwarding established\n2025-07-16 18:27:08.050 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][223992d3-5d18-4bb7-b08b-a351a8a8199f] socks connection closed\n2025-07-16 18:27:08.050 [info] [command][f7515a7c-73bf-4d3b-9559-e6e78abecd89] Process exited with code 0\n2025-07-16 18:27:08.050 [info] [command][f7515a7c-73bf-4d3b-9559-e6e78abecd89] Socket close event received\n2025-07-16 18:27:08.075 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57409 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:28:08.053 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:28:08.056 [info] [command][bfd4e9fc-f508-461b-bd3b-c37b66ccfb50] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""bfd4e9fc-f508-461b-bd3b-c37b66ccfb50""}\n2025-07-16 18:28:08.056 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][fb91b3fa-2cfd-4c7c-a0fb-eec3cd71a6d6] received connection request\n2025-07-16 18:28:08.057 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:28:08.080 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fb91b3fa-2cfd-4c7c-a0fb-eec3cd71a6d6] socks forwarding established\n2025-07-16 18:28:08.109 [info] [command][bfd4e9fc-f508-461b-bd3b-c37b66ccfb50] Process exited with code 0\n2025-07-16 18:28:08.109 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][fb91b3fa-2cfd-4c7c-a0fb-eec3cd71a6d6] socks connection closed\n2025-07-16 18:28:08.109 [info] [command][bfd4e9fc-f508-461b-bd3b-c37b66ccfb50] Socket close event received\n2025-07-16 18:28:08.134 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57462 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:29:08.110 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:29:08.112 [info] [command][debfdf1c-f542-43c4-b156-dd1dc5aa15f3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""debfdf1c-f542-43c4-b156-dd1dc5aa15f3""}\n2025-07-16 18:29:08.112 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0ae41754-5108-4211-b69c-b11df674d346] received connection request\n2025-07-16 18:29:08.113 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:29:08.137 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0ae41754-5108-4211-b69c-b11df674d346] socks forwarding established\n2025-07-16 18:29:08.165 [info] [command][debfdf1c-f542-43c4-b156-dd1dc5aa15f3] Process exited with code 0\n2025-07-16 18:29:08.165 [info] [command][debfdf1c-f542-43c4-b156-dd1dc5aa15f3] Socket close event received\n2025-07-16 18:29:08.189 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57487 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:29:08.189 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0ae41754-5108-4211-b69c-b11df674d346] socks connection closed\n2025-07-16 18:30:08.167 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:30:08.169 [info] [command][ce838c67-08da-4f13-8540-123172d58f3b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ce838c67-08da-4f13-8540-123172d58f3b""}\n2025-07-16 18:30:08.169 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][9c03a2ae-bcae-4801-9266-9fab1ab46ebd] received connection request\n2025-07-16 18:30:08.170 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:30:08.194 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9c03a2ae-bcae-4801-9266-9fab1ab46ebd] socks forwarding established\n2025-07-16 18:30:08.338 [info] [command][ce838c67-08da-4f13-8540-123172d58f3b] Process exited with code 0\n2025-07-16 18:30:08.338 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9c03a2ae-bcae-4801-9266-9fab1ab46ebd] socks connection closed\n2025-07-16 18:30:08.338 [info] [command][ce838c67-08da-4f13-8540-123172d58f3b] Socket close event received\n2025-07-16 18:30:08.361 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57522 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:31:08.344 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:31:08.347 [info] [command][bf6011b9-bc6f-4ed8-b73b-c21caf74a26a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""bf6011b9-bc6f-4ed8-b73b-c21caf74a26a""}\n2025-07-16 18:31:08.348 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1e586414-9143-4826-9366-435d5f017d74] received connection request\n2025-07-16 18:31:08.349 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:31:08.377 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1e586414-9143-4826-9366-435d5f017d74] socks forwarding established\n2025-07-16 18:31:08.419 [info] [command][bf6011b9-bc6f-4ed8-b73b-c21caf74a26a] Process exited with code 0\n2025-07-16 18:31:08.420 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1e586414-9143-4826-9366-435d5f017d74] socks connection closed\n2025-07-16 18:31:08.420 [info] [command][bf6011b9-bc6f-4ed8-b73b-c21caf74a26a] Socket close event received\n2025-07-16 18:31:08.446 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57578 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:32:08.421 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:32:08.423 [info] [command][045de876-173c-423a-a516-ccf574ddfc0f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""045de876-173c-423a-a516-ccf574ddfc0f""}\n2025-07-16 18:32:08.424 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5538cdde-fea4-461d-bcbd-59652f6dfec4] received connection request\n2025-07-16 18:32:08.425 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:32:08.455 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5538cdde-fea4-461d-bcbd-59652f6dfec4] socks forwarding established\n2025-07-16 18:32:08.483 [info] [command][045de876-173c-423a-a516-ccf574ddfc0f] Process exited with code 0\n2025-07-16 18:32:08.483 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5538cdde-fea4-461d-bcbd-59652f6dfec4] socks connection closed\n2025-07-16 18:32:08.483 [info] [command][045de876-173c-423a-a516-ccf574ddfc0f] Socket close event received\n2025-07-16 18:32:08.507 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57618 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:33:08.486 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:33:08.488 [info] [command][eb20d259-d3f9-44f5-8d2b-63cc90c40144] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""eb20d259-d3f9-44f5-8d2b-63cc90c40144""}\n2025-07-16 18:33:08.488 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6c2240b3-84bb-4974-acb9-fb6a5ba85cdf] received connection request\n2025-07-16 18:33:08.489 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:33:08.514 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6c2240b3-84bb-4974-acb9-fb6a5ba85cdf] socks forwarding established\n2025-07-16 18:33:08.542 [info] [command][eb20d259-d3f9-44f5-8d2b-63cc90c40144] Process exited with code 0\n2025-07-16 18:33:08.542 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6c2240b3-84bb-4974-acb9-fb6a5ba85cdf] socks connection closed\n2025-07-16 18:33:08.542 [info] [command][eb20d259-d3f9-44f5-8d2b-63cc90c40144] Socket close event received\n2025-07-16 18:33:08.566 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57656 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:34:08.545 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:34:08.548 [info] [command][250e232c-e716-4a43-ac88-13dea2997513] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""250e232c-e716-4a43-ac88-13dea2997513""}\n2025-07-16 18:34:08.549 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3e71765c-d49d-45c6-8ecb-b201dc32f9b4] received connection request\n2025-07-16 18:34:08.549 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:34:08.573 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3e71765c-d49d-45c6-8ecb-b201dc32f9b4] socks forwarding established\n2025-07-16 18:34:08.600 [info] [command][250e232c-e716-4a43-ac88-13dea2997513] Process exited with code 0\n2025-07-16 18:34:08.600 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3e71765c-d49d-45c6-8ecb-b201dc32f9b4] socks connection closed\n2025-07-16 18:34:08.600 [info] [command][250e232c-e716-4a43-ac88-13dea2997513] Socket close event received\n2025-07-16 18:34:08.624 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57681 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:35:08.601 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:35:08.603 [info] [command][e45a35c3-63a5-44fd-9f2b-4070afa8b1e3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e45a35c3-63a5-44fd-9f2b-4070afa8b1e3""}\n2025-07-16 18:35:08.604 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0b4fea10-dbff-44c2-990a-1ea094f718cb] received connection request\n2025-07-16 18:35:08.605 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:35:08.630 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0b4fea10-dbff-44c2-990a-1ea094f718cb] socks forwarding established\n2025-07-16 18:35:08.657 [info] [command][e45a35c3-63a5-44fd-9f2b-4070afa8b1e3] Process exited with code 0\n2025-07-16 18:35:08.657 [info] [command][e45a35c3-63a5-44fd-9f2b-4070afa8b1e3] Socket close event received\n2025-07-16 18:35:08.657 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0b4fea10-dbff-44c2-990a-1ea094f718cb] socks connection closed\n2025-07-16 18:35:08.680 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57730 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:36:08.658 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:36:08.661 [info] [command][883d9cc1-0daa-429d-a28e-257887dd705e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""883d9cc1-0daa-429d-a28e-257887dd705e""}\n2025-07-16 18:36:08.661 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8a6719a1-6dca-478e-bb8b-7ce69478ca44] received connection request\n2025-07-16 18:36:08.662 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:36:08.688 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8a6719a1-6dca-478e-bb8b-7ce69478ca44] socks forwarding established\n2025-07-16 18:36:08.716 [info] [command][883d9cc1-0daa-429d-a28e-257887dd705e] Process exited with code 0\n2025-07-16 18:36:08.716 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8a6719a1-6dca-478e-bb8b-7ce69478ca44] socks connection closed\n2025-07-16 18:36:08.716 [info] [command][883d9cc1-0daa-429d-a28e-257887dd705e] Socket close event received\n2025-07-16 18:36:08.740 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57755 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:37:08.721 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:37:08.722 [info] [command][8aa87846-648c-4930-a8e4-1e69564c517f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""8aa87846-648c-4930-a8e4-1e69564c517f""}\n2025-07-16 18:37:08.723 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8c58b7df-bc37-447a-a5ff-2dff3cba37ed] received connection request\n2025-07-16 18:37:08.724 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:37:08.748 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8c58b7df-bc37-447a-a5ff-2dff3cba37ed] socks forwarding established\n2025-07-16 18:37:08.775 [info] [command][8aa87846-648c-4930-a8e4-1e69564c517f] Process exited with code 0\n2025-07-16 18:37:08.775 [info] [command][8aa87846-648c-4930-a8e4-1e69564c517f] Socket close event received\n2025-07-16 18:37:08.776 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8c58b7df-bc37-447a-a5ff-2dff3cba37ed] socks connection closed\n2025-07-16 18:37:08.801 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57805 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:38:08.777 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:38:08.780 [info] [command][215aee5b-d857-447e-a5f2-98aa0b8378da] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""215aee5b-d857-447e-a5f2-98aa0b8378da""}\n2025-07-16 18:38:08.780 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d3d8cc5e-138e-4066-8e51-6620702e6e4e] received connection request\n2025-07-16 18:38:08.781 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:38:08.805 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d3d8cc5e-138e-4066-8e51-6620702e6e4e] socks forwarding established\n2025-07-16 18:38:08.832 [info] [command][215aee5b-d857-447e-a5f2-98aa0b8378da] Process exited with code 0\n2025-07-16 18:38:08.832 [info] [command][215aee5b-d857-447e-a5f2-98aa0b8378da] Socket close event received\n2025-07-16 18:38:08.856 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57848 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:38:08.856 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d3d8cc5e-138e-4066-8e51-6620702e6e4e] socks connection closed\n2025-07-16 18:39:08.836 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:39:08.837 [info] [command][ae02faa5-7ac5-49f7-bfe6-c688ad3b25b2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ae02faa5-7ac5-49f7-bfe6-c688ad3b25b2""}\n2025-07-16 18:39:08.838 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d5b04b05-d122-4c76-94cf-badf361bd431] received connection request\n2025-07-16 18:39:08.838 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:39:08.863 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d5b04b05-d122-4c76-94cf-badf361bd431] socks forwarding established\n2025-07-16 18:39:08.888 [info] [command][ae02faa5-7ac5-49f7-bfe6-c688ad3b25b2] Process exited with code 0\n2025-07-16 18:39:08.888 [info] [command][ae02faa5-7ac5-49f7-bfe6-c688ad3b25b2] Socket close event received\n2025-07-16 18:39:08.889 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d5b04b05-d122-4c76-94cf-badf361bd431] socks connection closed\n2025-07-16 18:39:08.914 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57894 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:40:08.894 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:40:08.896 [info] [command][9f6ca6a2-8e68-468e-ada3-d4a43bc56306] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""9f6ca6a2-8e68-468e-ada3-d4a43bc56306""}\n2025-07-16 18:40:08.897 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][04bd3a7c-cca7-4f76-9f2c-169681db447c] received connection request\n2025-07-16 18:40:08.897 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:40:08.922 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][04bd3a7c-cca7-4f76-9f2c-169681db447c] socks forwarding established\n2025-07-16 18:40:08.949 [info] [command][9f6ca6a2-8e68-468e-ada3-d4a43bc56306] Process exited with code 0\n2025-07-16 18:40:08.949 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][04bd3a7c-cca7-4f76-9f2c-169681db447c] socks connection closed\n2025-07-16 18:40:08.949 [info] [command][9f6ca6a2-8e68-468e-ada3-d4a43bc56306] Socket close event received\n2025-07-16 18:40:08.974 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57941 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:41:08.953 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:41:08.954 [info] [command][fcbd64b5-5c73-4d48-820a-eb4f80d47076] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""fcbd64b5-5c73-4d48-820a-eb4f80d47076""}\n2025-07-16 18:41:08.954 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][41823ca2-37b0-4e14-834b-73a5d78f5d6f] received connection request\n2025-07-16 18:41:08.955 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 18:41:08.955 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:41:08.979 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][41823ca2-37b0-4e14-834b-73a5d78f5d6f] socks forwarding established\n2025-07-16 18:41:09.004 [info] [command][fcbd64b5-5c73-4d48-820a-eb4f80d47076] Process exited with code 0\n2025-07-16 18:41:09.004 [info] [command][fcbd64b5-5c73-4d48-820a-eb4f80d47076] Socket close event received\n2025-07-16 18:41:09.005 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][41823ca2-37b0-4e14-834b-73a5d78f5d6f] socks connection closed\n2025-07-16 18:41:09.026 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 57973 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:42:09.010 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:42:09.011 [info] [command][064d2347-f40a-4bf8-a388-b5cd92932b87] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""064d2347-f40a-4bf8-a388-b5cd92932b87""}\n2025-07-16 18:42:09.011 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1df123f7-448a-44dd-acde-cc585fb25bd4] received connection request\n2025-07-16 18:42:09.011 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:42:09.035 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1df123f7-448a-44dd-acde-cc585fb25bd4] socks forwarding established\n2025-07-16 18:42:09.060 [info] [command][064d2347-f40a-4bf8-a388-b5cd92932b87] Process exited with code 0\n2025-07-16 18:42:09.060 [info] [command][064d2347-f40a-4bf8-a388-b5cd92932b87] Socket close event received\n2025-07-16 18:42:09.060 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1df123f7-448a-44dd-acde-cc585fb25bd4] socks connection closed\n2025-07-16 18:42:09.083 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58037 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:43:09.060 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:43:09.061 [info] [command][ad8d5bd8-64b5-49ed-b315-95d9fc99e047] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ad8d5bd8-64b5-49ed-b315-95d9fc99e047""}\n2025-07-16 18:43:09.062 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][80650c4d-ddbf-4ddd-a92a-6f0e9f02aa1f] received connection request\n2025-07-16 18:43:09.062 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:43:09.085 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][80650c4d-ddbf-4ddd-a92a-6f0e9f02aa1f] socks forwarding established\n2025-07-16 18:43:09.194 [info] [command][ad8d5bd8-64b5-49ed-b315-95d9fc99e047] Process exited with code 0\n2025-07-16 18:43:09.194 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][80650c4d-ddbf-4ddd-a92a-6f0e9f02aa1f] socks connection closed\n2025-07-16 18:43:09.194 [info] [command][ad8d5bd8-64b5-49ed-b315-95d9fc99e047] Socket close event received\n2025-07-16 18:43:09.218 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58087 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:44:09.200 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:44:09.201 [info] [command][331bb60a-b8a8-4e72-8ede-956578ac46c0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""331bb60a-b8a8-4e72-8ede-956578ac46c0""}\n2025-07-16 18:44:09.202 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d1d990f8-7e9c-4cb5-9bd0-f09c58fda6ea] received connection request\n2025-07-16 18:44:09.202 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:44:09.273 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d1d990f8-7e9c-4cb5-9bd0-f09c58fda6ea] socks forwarding established\n2025-07-16 18:44:09.431 [info] [command][331bb60a-b8a8-4e72-8ede-956578ac46c0] Process exited with code 0\n2025-07-16 18:44:09.432 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d1d990f8-7e9c-4cb5-9bd0-f09c58fda6ea] socks connection closed\n2025-07-16 18:44:09.432 [info] [command][331bb60a-b8a8-4e72-8ede-956578ac46c0] Socket close event received\n2025-07-16 18:44:09.455 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58118 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:45:09.437 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:45:09.439 [info] [command][7fb44d7b-fbd6-4e88-9642-d620989f1f83] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7fb44d7b-fbd6-4e88-9642-d620989f1f83""}\n2025-07-16 18:45:09.439 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0fa885c7-7810-4753-9732-734d2719dc8e] received connection request\n2025-07-16 18:45:09.440 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:45:09.463 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0fa885c7-7810-4753-9732-734d2719dc8e] socks forwarding established\n2025-07-16 18:45:09.491 [info] [command][7fb44d7b-fbd6-4e88-9642-d620989f1f83] Process exited with code 0\n2025-07-16 18:45:09.491 [info] [command][7fb44d7b-fbd6-4e88-9642-d620989f1f83] Socket close event received\n2025-07-16 18:45:09.493 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0fa885c7-7810-4753-9732-734d2719dc8e] socks connection closed\n2025-07-16 18:45:09.522 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58172 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:46:09.491 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:46:09.493 [info] [command][a7b91046-9202-49fa-916b-0755b5e2cd9c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a7b91046-9202-49fa-916b-0755b5e2cd9c""}\n2025-07-16 18:46:09.494 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][31213f92-a998-4911-9eac-b3b7db74b0f7] received connection request\n2025-07-16 18:46:09.494 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:46:09.521 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][31213f92-a998-4911-9eac-b3b7db74b0f7] socks forwarding established\n2025-07-16 18:46:09.549 [info] [command][a7b91046-9202-49fa-916b-0755b5e2cd9c] Process exited with code 0\n2025-07-16 18:46:09.549 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][31213f92-a998-4911-9eac-b3b7db74b0f7] socks connection closed\n2025-07-16 18:46:09.549 [info] [command][a7b91046-9202-49fa-916b-0755b5e2cd9c] Socket close event received\n2025-07-16 18:46:09.573 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58209 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:47:09.551 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:47:09.553 [info] [command][56eb7aaa-9a6d-409d-b14c-0c2c04f604ad] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""56eb7aaa-9a6d-409d-b14c-0c2c04f604ad""}\n2025-07-16 18:47:09.553 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3134e6ea-500b-474d-8aed-0adf7a0dfe78] received connection request\n2025-07-16 18:47:09.553 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:47:09.579 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3134e6ea-500b-474d-8aed-0adf7a0dfe78] socks forwarding established\n2025-07-16 18:47:09.607 [info] [command][56eb7aaa-9a6d-409d-b14c-0c2c04f604ad] Process exited with code 0\n2025-07-16 18:47:09.608 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3134e6ea-500b-474d-8aed-0adf7a0dfe78] socks connection closed\n2025-07-16 18:47:09.608 [info] [command][56eb7aaa-9a6d-409d-b14c-0c2c04f604ad] Socket close event received\n2025-07-16 18:47:09.634 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58311 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:48:09.614 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:48:09.617 [info] [command][f190bceb-328a-419c-bf03-51347a7c5dff] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f190bceb-328a-419c-bf03-51347a7c5dff""}\n2025-07-16 18:48:09.617 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3152b09e-ddb2-4b15-8799-29652b2dbd77] received connection request\n2025-07-16 18:48:09.618 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:48:09.643 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3152b09e-ddb2-4b15-8799-29652b2dbd77] socks forwarding established\n2025-07-16 18:48:09.668 [info] [command][f190bceb-328a-419c-bf03-51347a7c5dff] Process exited with code 0\n2025-07-16 18:48:09.668 [info] [command][f190bceb-328a-419c-bf03-51347a7c5dff] Socket close event received\n2025-07-16 18:48:09.669 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3152b09e-ddb2-4b15-8799-29652b2dbd77] socks connection closed\n2025-07-16 18:48:09.694 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58363 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:49:09.673 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:49:09.676 [info] [command][5ffde3f8-8268-4962-bbf2-d72b3976655c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5ffde3f8-8268-4962-bbf2-d72b3976655c""}\n2025-07-16 18:49:09.677 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][72b44828-f293-41d1-8b8c-7eeb1266cad1] received connection request\n2025-07-16 18:49:09.678 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:49:09.704 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][72b44828-f293-41d1-8b8c-7eeb1266cad1] socks forwarding established\n2025-07-16 18:49:09.733 [info] [command][5ffde3f8-8268-4962-bbf2-d72b3976655c] Process exited with code 0\n2025-07-16 18:49:09.734 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][72b44828-f293-41d1-8b8c-7eeb1266cad1] socks connection closed\n2025-07-16 18:49:09.734 [info] [command][5ffde3f8-8268-4962-bbf2-d72b3976655c] Socket close event received\n2025-07-16 18:49:09.759 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58403 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:50:09.742 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:50:09.745 [info] [command][f42eca7a-7d92-4851-872b-fbf409d78d63] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f42eca7a-7d92-4851-872b-fbf409d78d63""}\n2025-07-16 18:50:09.746 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4261e989-65cb-4135-9e62-83a8e44faf06] received connection request\n2025-07-16 18:50:09.746 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:50:09.774 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4261e989-65cb-4135-9e62-83a8e44faf06] socks forwarding established\n2025-07-16 18:50:09.799 [info] [command][f42eca7a-7d92-4851-872b-fbf409d78d63] Process exited with code 0\n2025-07-16 18:50:09.800 [info] [command][f42eca7a-7d92-4851-872b-fbf409d78d63] Socket close event received\n2025-07-16 18:50:09.800 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4261e989-65cb-4135-9e62-83a8e44faf06] socks connection closed\n2025-07-16 18:50:09.823 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58457 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:51:09.804 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:51:09.808 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f3c2e02c-f602-4842-a075-3fe4ba3ba4d2] received connection request\n2025-07-16 18:51:09.808 [info] [command][5368156d-79a1-4aa4-b298-a2be2ab35e23] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5368156d-79a1-4aa4-b298-a2be2ab35e23""}\n2025-07-16 18:51:09.815 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:51:09.842 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f3c2e02c-f602-4842-a075-3fe4ba3ba4d2] socks forwarding established\n2025-07-16 18:51:09.876 [info] [command][5368156d-79a1-4aa4-b298-a2be2ab35e23] Process exited with code 0\n2025-07-16 18:51:09.876 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f3c2e02c-f602-4842-a075-3fe4ba3ba4d2] socks connection closed\n2025-07-16 18:51:09.876 [info] [command][5368156d-79a1-4aa4-b298-a2be2ab35e23] Socket close event received\n2025-07-16 18:51:09.900 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58493 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:52:09.882 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:52:09.885 [info] [command][b0ef90b0-30c8-4d1d-91da-944cb779fbe0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b0ef90b0-30c8-4d1d-91da-944cb779fbe0""}\n2025-07-16 18:52:09.886 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f485213b-1fc1-4ec7-a644-2b43cba6cc06] received connection request\n2025-07-16 18:52:09.887 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:52:09.916 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f485213b-1fc1-4ec7-a644-2b43cba6cc06] socks forwarding established\n2025-07-16 18:52:09.943 [info] [command][b0ef90b0-30c8-4d1d-91da-944cb779fbe0] Process exited with code 0\n2025-07-16 18:52:09.943 [info] [command][b0ef90b0-30c8-4d1d-91da-944cb779fbe0] Socket close event received\n2025-07-16 18:52:09.944 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f485213b-1fc1-4ec7-a644-2b43cba6cc06] socks connection closed\n2025-07-16 18:52:09.970 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58570 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:53:09.946 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:53:09.948 [info] [command][97d61bf5-7859-4ecf-8f92-4438d9c39c8c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""97d61bf5-7859-4ecf-8f92-4438d9c39c8c""}\n2025-07-16 18:53:09.949 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][9e2ca87c-4ebe-45a9-8d9c-e5ea18a2134d] received connection request\n2025-07-16 18:53:09.950 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:53:09.979 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9e2ca87c-4ebe-45a9-8d9c-e5ea18a2134d] socks forwarding established\n2025-07-16 18:53:10.007 [info] [command][97d61bf5-7859-4ecf-8f92-4438d9c39c8c] Process exited with code 0\n2025-07-16 18:53:10.007 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9e2ca87c-4ebe-45a9-8d9c-e5ea18a2134d] socks connection closed\n2025-07-16 18:53:10.007 [info] [command][97d61bf5-7859-4ecf-8f92-4438d9c39c8c] Socket close event received\n2025-07-16 18:53:10.032 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58611 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:54:10.012 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:54:10.013 [info] [command][e15a0105-256f-4139-b362-65e0b1c18a2a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e15a0105-256f-4139-b362-65e0b1c18a2a""}\n2025-07-16 18:54:10.014 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][bcb5e281-d5d9-4571-966c-cd91f3378ad5] received connection request\n2025-07-16 18:54:10.015 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:54:10.040 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][bcb5e281-d5d9-4571-966c-cd91f3378ad5] socks forwarding established\n2025-07-16 18:54:10.068 [info] [command][e15a0105-256f-4139-b362-65e0b1c18a2a] Process exited with code 0\n2025-07-16 18:54:10.068 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][bcb5e281-d5d9-4571-966c-cd91f3378ad5] socks connection closed\n2025-07-16 18:54:10.068 [info] [command][e15a0105-256f-4139-b362-65e0b1c18a2a] Socket close event received\n2025-07-16 18:54:10.093 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58646 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:55:10.073 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:55:10.075 [info] [command][2394dfb0-c41e-4f64-8296-c5241d899c2f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2394dfb0-c41e-4f64-8296-c5241d899c2f""}\n2025-07-16 18:55:10.076 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6badb100-025f-4660-8401-5492314cb1a3] received connection request\n2025-07-16 18:55:10.076 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:55:10.102 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6badb100-025f-4660-8401-5492314cb1a3] socks forwarding established\n2025-07-16 18:55:10.129 [info] [command][2394dfb0-c41e-4f64-8296-c5241d899c2f] Process exited with code 0\n2025-07-16 18:55:10.130 [info] [command][2394dfb0-c41e-4f64-8296-c5241d899c2f] Socket close event received\n2025-07-16 18:55:10.131 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6badb100-025f-4660-8401-5492314cb1a3] socks connection closed\n2025-07-16 18:55:10.156 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58689 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:56:10.130 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:56:10.132 [info] [command][5fd8ad8a-3023-4823-b13d-f84ee83d98b7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5fd8ad8a-3023-4823-b13d-f84ee83d98b7""}\n2025-07-16 18:56:10.133 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d3018bc1-dc7e-4a6c-8e48-dab1427bc8fe] received connection request\n2025-07-16 18:56:10.134 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:56:10.158 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d3018bc1-dc7e-4a6c-8e48-dab1427bc8fe] socks forwarding established\n2025-07-16 18:56:10.185 [info] [command][5fd8ad8a-3023-4823-b13d-f84ee83d98b7] Process exited with code 0\n2025-07-16 18:56:10.185 [info] [command][5fd8ad8a-3023-4823-b13d-f84ee83d98b7] Socket close event received\n2025-07-16 18:56:10.186 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d3018bc1-dc7e-4a6c-8e48-dab1427bc8fe] socks connection closed\n2025-07-16 18:56:10.211 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58727 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:57:10.191 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:57:10.194 [info] [command][2fbb92fd-1198-4b4e-b24f-465f1c307b7c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2fbb92fd-1198-4b4e-b24f-465f1c307b7c""}\n2025-07-16 18:57:10.195 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][64bd96ef-c292-4d9b-81b2-92795dbaa8a1] received connection request\n2025-07-16 18:57:10.196 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:57:10.225 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][64bd96ef-c292-4d9b-81b2-92795dbaa8a1] socks forwarding established\n2025-07-16 18:57:10.252 [info] [command][2fbb92fd-1198-4b4e-b24f-465f1c307b7c] Process exited with code 0\n2025-07-16 18:57:10.253 [info] [command][2fbb92fd-1198-4b4e-b24f-465f1c307b7c] Socket close event received\n2025-07-16 18:57:10.253 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][64bd96ef-c292-4d9b-81b2-92795dbaa8a1] socks connection closed\n2025-07-16 18:57:10.277 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58765 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:58:10.259 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:58:10.261 [info] [command][63ae2a12-b760-4113-a579-5cc03b1140e8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""63ae2a12-b760-4113-a579-5cc03b1140e8""}\n2025-07-16 18:58:10.262 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0a7392ae-a66a-49d4-a27c-7c711d0f9ec3] received connection request\n2025-07-16 18:58:10.262 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:58:10.286 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0a7392ae-a66a-49d4-a27c-7c711d0f9ec3] socks forwarding established\n2025-07-16 18:58:10.316 [info] [command][63ae2a12-b760-4113-a579-5cc03b1140e8] Process exited with code 0\n2025-07-16 18:58:10.316 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0a7392ae-a66a-49d4-a27c-7c711d0f9ec3] socks connection closed\n2025-07-16 18:58:10.317 [info] [command][63ae2a12-b760-4113-a579-5cc03b1140e8] Socket close event received\n2025-07-16 18:58:10.341 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58833 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 18:59:10.317 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 18:59:10.318 [info] [command][4f466365-7724-4f9e-a419-e988721ebc5a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4f466365-7724-4f9e-a419-e988721ebc5a""}\n2025-07-16 18:59:10.318 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][0f0b2f2a-56c9-4cb8-a893-2accf900456f] received connection request\n2025-07-16 18:59:10.318 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 18:59:10.318 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 18:59:10.341 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0f0b2f2a-56c9-4cb8-a893-2accf900456f] socks forwarding established\n2025-07-16 18:59:10.366 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][0f0b2f2a-56c9-4cb8-a893-2accf900456f] socks connection closed\n2025-07-16 18:59:10.366 [info] [command][4f466365-7724-4f9e-a419-e988721ebc5a] Process exited with code 0\n2025-07-16 18:59:10.366 [info] [command][4f466365-7724-4f9e-a419-e988721ebc5a] Socket close event received\n2025-07-16 18:59:10.390 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58859 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:00:10.371 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:00:10.373 [info] [command][d3163aac-0183-478d-9683-9d2d559fb704] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d3163aac-0183-478d-9683-9d2d559fb704""}\n2025-07-16 19:00:10.374 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][bcc5cb95-b589-4afd-9739-c335180b6bd1] received connection request\n2025-07-16 19:00:10.374 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:00:10.397 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][bcc5cb95-b589-4afd-9739-c335180b6bd1] socks forwarding established\n2025-07-16 19:00:10.451 [info] [command][d3163aac-0183-478d-9683-9d2d559fb704] Process exited with code 0\n2025-07-16 19:00:10.451 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][bcc5cb95-b589-4afd-9739-c335180b6bd1] socks connection closed\n2025-07-16 19:00:10.451 [info] [command][d3163aac-0183-478d-9683-9d2d559fb704] Socket close event received\n2025-07-16 19:00:10.475 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58901 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:01:10.454 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:01:10.455 [info] [command][64f823b1-9cf3-41b5-9d61-a33fb01efde6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""64f823b1-9cf3-41b5-9d61-a33fb01efde6""}\n2025-07-16 19:01:10.456 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4b6a61d7-b2dc-4728-a9f0-91ca84979587] received connection request\n2025-07-16 19:01:10.456 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:01:10.481 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4b6a61d7-b2dc-4728-a9f0-91ca84979587] socks forwarding established\n2025-07-16 19:01:10.527 [info] [command][64f823b1-9cf3-41b5-9d61-a33fb01efde6] Process exited with code 0\n2025-07-16 19:01:10.527 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4b6a61d7-b2dc-4728-a9f0-91ca84979587] socks connection closed\n2025-07-16 19:01:10.527 [info] [command][64f823b1-9cf3-41b5-9d61-a33fb01efde6] Socket close event received\n2025-07-16 19:01:10.552 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58934 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:02:10.528 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:02:10.530 [info] [command][3bdb46e8-10be-4544-9abc-dc461f7d7336] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3bdb46e8-10be-4544-9abc-dc461f7d7336""}\n2025-07-16 19:02:10.532 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8fe1dde9-76e8-4f82-86f7-87e0f72dc2b5] received connection request\n2025-07-16 19:02:10.532 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:02:10.557 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8fe1dde9-76e8-4f82-86f7-87e0f72dc2b5] socks forwarding established\n2025-07-16 19:02:10.591 [info] [command][3bdb46e8-10be-4544-9abc-dc461f7d7336] Process exited with code 0\n2025-07-16 19:02:10.591 [info] [command][3bdb46e8-10be-4544-9abc-dc461f7d7336] Socket close event received\n2025-07-16 19:02:10.615 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8fe1dde9-76e8-4f82-86f7-87e0f72dc2b5] socks connection closed\n2025-07-16 19:02:10.618 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 58969 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:03:10.597 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:03:10.599 [info] [command][df9da95f-bd5b-415c-a7cc-335f09bdba95] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""df9da95f-bd5b-415c-a7cc-335f09bdba95""}\n2025-07-16 19:03:10.600 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f834af81-361e-4508-9506-d3d35cec3243] received connection request\n2025-07-16 19:03:10.600 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:03:10.664 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f834af81-361e-4508-9506-d3d35cec3243] socks forwarding established\n2025-07-16 19:03:10.770 [info] [command][df9da95f-bd5b-415c-a7cc-335f09bdba95] Process exited with code 0\n2025-07-16 19:03:10.770 [info] [command][df9da95f-bd5b-415c-a7cc-335f09bdba95] Socket close event received\n2025-07-16 19:03:10.793 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f834af81-361e-4508-9506-d3d35cec3243] socks connection closed\n2025-07-16 19:03:10.795 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59005 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:04:10.776 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:04:10.778 [info] [command][80243d8a-2277-49a1-94b5-5aaff788654e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""80243d8a-2277-49a1-94b5-5aaff788654e""}\n2025-07-16 19:04:10.778 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3a33e87a-4a78-4bf9-bbef-cb343dda6f74] received connection request\n2025-07-16 19:04:10.778 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:04:10.801 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3a33e87a-4a78-4bf9-bbef-cb343dda6f74] socks forwarding established\n2025-07-16 19:04:10.830 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3a33e87a-4a78-4bf9-bbef-cb343dda6f74] socks connection closed\n2025-07-16 19:04:10.830 [info] [command][80243d8a-2277-49a1-94b5-5aaff788654e] Process exited with code 0\n2025-07-16 19:04:10.830 [info] [command][80243d8a-2277-49a1-94b5-5aaff788654e] Socket close event received\n2025-07-16 19:04:10.854 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59032 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:05:10.837 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:05:10.841 [info] [command][6dcb44d8-91e7-453e-82f8-8474bc286ee7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6dcb44d8-91e7-453e-82f8-8474bc286ee7""}\n2025-07-16 19:05:10.843 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4bb234f1-1752-4548-a337-46a8845aa4ba] received connection request\n2025-07-16 19:05:10.844 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:05:11.002 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4bb234f1-1752-4548-a337-46a8845aa4ba] socks forwarding established\n2025-07-16 19:05:11.030 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4bb234f1-1752-4548-a337-46a8845aa4ba] socks connection closed\n2025-07-16 19:05:11.031 [info] [command][6dcb44d8-91e7-453e-82f8-8474bc286ee7] Process exited with code 0\n2025-07-16 19:05:11.031 [info] [command][6dcb44d8-91e7-453e-82f8-8474bc286ee7] Socket close event received\n2025-07-16 19:05:11.169 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59091 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:06:11.037 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:06:11.038 [info] [command][548b35d4-e9ad-436b-b39f-835e39fb36bb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""548b35d4-e9ad-436b-b39f-835e39fb36bb""}\n2025-07-16 19:06:11.038 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8c75eafe-a1fc-4ed9-a211-f5fe1e5fbe63] received connection request\n2025-07-16 19:06:11.039 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:06:11.318 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8c75eafe-a1fc-4ed9-a211-f5fe1e5fbe63] socks forwarding established\n2025-07-16 19:06:11.353 [info] [command][548b35d4-e9ad-436b-b39f-835e39fb36bb] Process exited with code 0\n2025-07-16 19:06:11.353 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8c75eafe-a1fc-4ed9-a211-f5fe1e5fbe63] socks connection closed\n2025-07-16 19:06:11.353 [info] [command][548b35d4-e9ad-436b-b39f-835e39fb36bb] Socket close event received\n2025-07-16 19:06:11.390 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59158 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:07:11.353 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:07:11.355 [info] [command][6b9093cc-88bd-4aa6-9046-2f464485f606] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""6b9093cc-88bd-4aa6-9046-2f464485f606""}\n2025-07-16 19:07:11.356 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][3ec1831a-df61-42bd-90a4-9fbefaabdf53] received connection request\n2025-07-16 19:07:11.356 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:07:11.382 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3ec1831a-df61-42bd-90a4-9fbefaabdf53] socks forwarding established\n2025-07-16 19:07:11.432 [info] [command][6b9093cc-88bd-4aa6-9046-2f464485f606] Process exited with code 0\n2025-07-16 19:07:11.432 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][3ec1831a-df61-42bd-90a4-9fbefaabdf53] socks connection closed\n2025-07-16 19:07:11.432 [info] [command][6b9093cc-88bd-4aa6-9046-2f464485f606] Socket close event received\n2025-07-16 19:07:11.455 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59225 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:08:11.438 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:08:11.440 [info] [command][435fe038-b091-44ea-9f63-cf48c9c429ba] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""435fe038-b091-44ea-9f63-cf48c9c429ba""}\n2025-07-16 19:08:11.440 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c56506e6-d8ae-4ad5-85a0-2eb86d919d52] received connection request\n2025-07-16 19:08:11.441 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:08:11.465 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c56506e6-d8ae-4ad5-85a0-2eb86d919d52] socks forwarding established\n2025-07-16 19:08:11.615 [info] [command][435fe038-b091-44ea-9f63-cf48c9c429ba] Process exited with code 0\n2025-07-16 19:08:11.615 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c56506e6-d8ae-4ad5-85a0-2eb86d919d52] socks connection closed\n2025-07-16 19:08:11.615 [info] [command][435fe038-b091-44ea-9f63-cf48c9c429ba] Socket close event received\n2025-07-16 19:08:11.638 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59257 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:09:11.618 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:09:11.621 [info] [command][5e950164-91a4-4ba5-9ee5-4cf73f7d2d7e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5e950164-91a4-4ba5-9ee5-4cf73f7d2d7e""}\n2025-07-16 19:09:11.622 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][30a6e809-f074-4702-8304-017881c75427] received connection request\n2025-07-16 19:09:11.622 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:09:11.726 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][30a6e809-f074-4702-8304-017881c75427] socks forwarding established\n2025-07-16 19:09:11.753 [info] [command][5e950164-91a4-4ba5-9ee5-4cf73f7d2d7e] Process exited with code 0\n2025-07-16 19:09:11.754 [info] [command][5e950164-91a4-4ba5-9ee5-4cf73f7d2d7e] Socket close event received\n2025-07-16 19:09:11.903 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59288 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:09:11.904 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][30a6e809-f074-4702-8304-017881c75427] socks connection closed\n2025-07-16 19:10:11.759 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:10:11.761 [info] [command][57c76224-d2c0-4129-9a7c-01d7d3ff2500] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""57c76224-d2c0-4129-9a7c-01d7d3ff2500""}\n2025-07-16 19:10:11.762 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][051f8e22-2675-42a3-9c4a-69d265d3bd2e] received connection request\n2025-07-16 19:10:11.762 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:10:11.838 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][051f8e22-2675-42a3-9c4a-69d265d3bd2e] socks forwarding established\n2025-07-16 19:10:11.945 [info] [command][57c76224-d2c0-4129-9a7c-01d7d3ff2500] Process exited with code 0\n2025-07-16 19:10:11.945 [info] [command][57c76224-d2c0-4129-9a7c-01d7d3ff2500] Socket close event received\n2025-07-16 19:10:11.946 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][051f8e22-2675-42a3-9c4a-69d265d3bd2e] socks connection closed\n2025-07-16 19:10:11.970 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59333 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:11:11.947 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:11:11.949 [info] [command][94c60058-e54f-4b11-bee4-9b7377cee210] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""94c60058-e54f-4b11-bee4-9b7377cee210""}\n2025-07-16 19:11:11.950 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ee465898-8ba8-43bf-aa34-a0ea91ef4a0b] received connection request\n2025-07-16 19:11:11.951 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:11:12.093 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ee465898-8ba8-43bf-aa34-a0ea91ef4a0b] socks forwarding established\n2025-07-16 19:11:12.274 [info] [command][94c60058-e54f-4b11-bee4-9b7377cee210] Process exited with code 0\n2025-07-16 19:11:12.274 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ee465898-8ba8-43bf-aa34-a0ea91ef4a0b] socks connection closed\n2025-07-16 19:11:12.274 [info] [command][94c60058-e54f-4b11-bee4-9b7377cee210] Socket close event received\n2025-07-16 19:11:12.308 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59355 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:12:12.276 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:12:12.279 [info] [command][34792f3f-859e-4559-8470-937902022141] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""34792f3f-859e-4559-8470-937902022141""}\n2025-07-16 19:12:12.279 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ecfa0893-b765-4016-b7ca-55b426a20352] received connection request\n2025-07-16 19:12:12.280 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:12:12.306 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ecfa0893-b765-4016-b7ca-55b426a20352] socks forwarding established\n2025-07-16 19:12:12.362 [info] [command][34792f3f-859e-4559-8470-937902022141] Process exited with code 0\n2025-07-16 19:12:12.362 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ecfa0893-b765-4016-b7ca-55b426a20352] socks connection closed\n2025-07-16 19:12:12.363 [info] [command][34792f3f-859e-4559-8470-937902022141] Socket close event received\n2025-07-16 19:12:12.435 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59404 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:13:12.365 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:13:12.367 [info] [command][0af62315-6b88-4212-a797-1779ebaafcae] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""0af62315-6b88-4212-a797-1779ebaafcae""}\n2025-07-16 19:13:12.367 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][27e29587-edbd-48ac-bfb2-74452d09df19] received connection request\n2025-07-16 19:13:12.368 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:13:12.392 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][27e29587-edbd-48ac-bfb2-74452d09df19] socks forwarding established\n2025-07-16 19:13:12.448 [info] [command][0af62315-6b88-4212-a797-1779ebaafcae] Process exited with code 0\n2025-07-16 19:13:12.448 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][27e29587-edbd-48ac-bfb2-74452d09df19] socks connection closed\n2025-07-16 19:13:12.449 [info] [command][0af62315-6b88-4212-a797-1779ebaafcae] Socket close event received\n2025-07-16 19:13:12.474 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59480 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:14:12.452 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:14:12.453 [info] [command][ffdbb7af-b2d6-4358-b99c-906c7373ff57] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ffdbb7af-b2d6-4358-b99c-906c7373ff57""}\n2025-07-16 19:14:12.453 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4b807539-ee74-493f-951a-08313fd88fea] received connection request\n2025-07-16 19:14:12.454 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:14:12.534 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4b807539-ee74-493f-951a-08313fd88fea] socks forwarding established\n2025-07-16 19:14:12.686 [info] [command][ffdbb7af-b2d6-4358-b99c-906c7373ff57] Process exited with code 0\n2025-07-16 19:14:12.686 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4b807539-ee74-493f-951a-08313fd88fea] socks connection closed\n2025-07-16 19:14:12.686 [info] [command][ffdbb7af-b2d6-4358-b99c-906c7373ff57] Socket close event received\n2025-07-16 19:14:12.786 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59510 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:15:12.690 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:15:12.692 [info] [command][2a880044-8010-4c73-9fac-2829b0bba5a3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""2a880044-8010-4c73-9fac-2829b0bba5a3""}\n2025-07-16 19:15:12.693 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][d6e65366-b381-483a-8970-8a91033d2d90] received connection request\n2025-07-16 19:15:12.694 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:15:12.851 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d6e65366-b381-483a-8970-8a91033d2d90] socks forwarding established\n2025-07-16 19:15:12.878 [info] [command][2a880044-8010-4c73-9fac-2829b0bba5a3] Process exited with code 0\n2025-07-16 19:15:12.879 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][d6e65366-b381-483a-8970-8a91033d2d90] socks connection closed\n2025-07-16 19:15:12.879 [info] [command][2a880044-8010-4c73-9fac-2829b0bba5a3] Socket close event received\n2025-07-16 19:15:13.028 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59554 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:16:12.884 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:16:12.886 [info] [command][461003d5-b711-49bf-a30d-461bf38ff3f8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""461003d5-b711-49bf-a30d-461bf38ff3f8""}\n2025-07-16 19:16:12.887 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][766ccfba-8fe9-4edd-9a73-30d62bbb15a5] received connection request\n2025-07-16 19:16:12.888 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:16:12.915 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][766ccfba-8fe9-4edd-9a73-30d62bbb15a5] socks forwarding established\n2025-07-16 19:16:12.946 [info] [command][461003d5-b711-49bf-a30d-461bf38ff3f8] Process exited with code 0\n2025-07-16 19:16:12.946 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][766ccfba-8fe9-4edd-9a73-30d62bbb15a5] socks connection closed\n2025-07-16 19:16:12.946 [info] [command][461003d5-b711-49bf-a30d-461bf38ff3f8] Socket close event received\n2025-07-16 19:16:12.971 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59580 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:17:12.951 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:17:12.953 [info] [command][cae4ab4b-fd6f-4642-88ff-19f3c5f30630] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""cae4ab4b-fd6f-4642-88ff-19f3c5f30630""}\n2025-07-16 19:17:12.954 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][86823d7f-f291-413f-8173-3097fa5b0da9] received connection request\n2025-07-16 19:17:12.954 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:17:12.979 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][86823d7f-f291-413f-8173-3097fa5b0da9] socks forwarding established\n2025-07-16 19:17:13.039 [info] [command][cae4ab4b-fd6f-4642-88ff-19f3c5f30630] Process exited with code 0\n2025-07-16 19:17:13.039 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][86823d7f-f291-413f-8173-3097fa5b0da9] socks connection closed\n2025-07-16 19:17:13.039 [info] [command][cae4ab4b-fd6f-4642-88ff-19f3c5f30630] Socket close event received\n2025-07-16 19:17:13.063 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59656 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:18:13.047 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:18:13.050 [info] [command][7f07b651-d511-40c3-ada6-e1a2b95d5f1b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7f07b651-d511-40c3-ada6-e1a2b95d5f1b""}\n2025-07-16 19:18:13.051 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1617acf1-9ec3-480f-be09-c0a3955ab4e9] received connection request\n2025-07-16 19:18:13.052 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:18:13.078 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1617acf1-9ec3-480f-be09-c0a3955ab4e9] socks forwarding established\n2025-07-16 19:18:13.105 [info] [command][7f07b651-d511-40c3-ada6-e1a2b95d5f1b] Process exited with code 0\n2025-07-16 19:18:13.105 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1617acf1-9ec3-480f-be09-c0a3955ab4e9] socks connection closed\n2025-07-16 19:18:13.105 [info] [command][7f07b651-d511-40c3-ada6-e1a2b95d5f1b] Socket close event received\n2025-07-16 19:18:13.128 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59693 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:19:13.110 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:19:13.113 [info] [command][0879559e-c3f7-4633-94b6-cfc5cf5173bc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""0879559e-c3f7-4633-94b6-cfc5cf5173bc""}\n2025-07-16 19:19:13.113 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][150f62a6-ea19-49b4-a548-b736fa809ba7] received connection request\n2025-07-16 19:19:13.114 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:19:13.140 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][150f62a6-ea19-49b4-a548-b736fa809ba7] socks forwarding established\n2025-07-16 19:19:13.169 [info] [command][0879559e-c3f7-4633-94b6-cfc5cf5173bc] Process exited with code 0\n2025-07-16 19:19:13.169 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][150f62a6-ea19-49b4-a548-b736fa809ba7] socks connection closed\n2025-07-16 19:19:13.169 [info] [command][0879559e-c3f7-4633-94b6-cfc5cf5173bc] Socket close event received\n2025-07-16 19:19:13.194 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59721 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:20:13.172 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:20:13.175 [info] [command][bd7a3f41-6445-48a8-8244-db82643f4f93] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""bd7a3f41-6445-48a8-8244-db82643f4f93""}\n2025-07-16 19:20:13.176 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][a26111ae-154e-4265-916d-27abea70ff48] received connection request\n2025-07-16 19:20:13.177 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:20:13.203 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a26111ae-154e-4265-916d-27abea70ff48] socks forwarding established\n2025-07-16 19:20:13.231 [info] [command][bd7a3f41-6445-48a8-8244-db82643f4f93] Process exited with code 0\n2025-07-16 19:20:13.231 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][a26111ae-154e-4265-916d-27abea70ff48] socks connection closed\n2025-07-16 19:20:13.231 [info] [command][bd7a3f41-6445-48a8-8244-db82643f4f93] Socket close event received\n2025-07-16 19:20:13.256 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59767 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:21:13.235 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:21:13.237 [info] [command][86d760c3-4106-410b-ad15-5e1403d0e84b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""86d760c3-4106-410b-ad15-5e1403d0e84b""}\n2025-07-16 19:21:13.239 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][89f87633-f42d-4109-8029-65fd2075c598] received connection request\n2025-07-16 19:21:13.239 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:21:13.263 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][89f87633-f42d-4109-8029-65fd2075c598] socks forwarding established\n2025-07-16 19:21:13.357 [info] [command][86d760c3-4106-410b-ad15-5e1403d0e84b] Process exited with code 0\n2025-07-16 19:21:13.358 [info] [command][86d760c3-4106-410b-ad15-5e1403d0e84b] Socket close event received\n2025-07-16 19:21:13.359 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][89f87633-f42d-4109-8029-65fd2075c598] socks connection closed\n2025-07-16 19:21:13.384 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59805 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:22:13.357 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:22:13.359 [info] [command][0d9f5d1b-0e75-47f9-bee7-6124b5b4c2de] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""0d9f5d1b-0e75-47f9-bee7-6124b5b4c2de""}\n2025-07-16 19:22:13.360 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6d8cd79d-8107-4d96-bd87-7cbba191ce13] received connection request\n2025-07-16 19:22:13.360 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:22:13.385 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6d8cd79d-8107-4d96-bd87-7cbba191ce13] socks forwarding established\n2025-07-16 19:22:13.411 [info] [command][0d9f5d1b-0e75-47f9-bee7-6124b5b4c2de] Process exited with code 0\n2025-07-16 19:22:13.412 [info] [command][0d9f5d1b-0e75-47f9-bee7-6124b5b4c2de] Socket close event received\n2025-07-16 19:22:13.412 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6d8cd79d-8107-4d96-bd87-7cbba191ce13] socks connection closed\n2025-07-16 19:22:13.436 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59857 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:23:13.416 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:23:13.418 [info] [command][10d58eb6-3d07-4e3c-95c4-edb337b5cb7e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""10d58eb6-3d07-4e3c-95c4-edb337b5cb7e""}\n2025-07-16 19:23:13.419 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4207e14d-b669-4c35-b908-d2247c5c9242] received connection request\n2025-07-16 19:23:13.420 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:23:13.444 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4207e14d-b669-4c35-b908-d2247c5c9242] socks forwarding established\n2025-07-16 19:23:13.468 [info] [command][10d58eb6-3d07-4e3c-95c4-edb337b5cb7e] Process exited with code 0\n2025-07-16 19:23:13.469 [info] [command][10d58eb6-3d07-4e3c-95c4-edb337b5cb7e] Socket close event received\n2025-07-16 19:23:13.492 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59887 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:23:13.492 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4207e14d-b669-4c35-b908-d2247c5c9242] socks connection closed\n2025-07-16 19:24:13.471 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:24:13.472 [info] [command][775a29d9-42a3-4547-bd03-a5041c39ad5d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""775a29d9-42a3-4547-bd03-a5041c39ad5d""}\n2025-07-16 19:24:13.472 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8970d2bb-a690-408d-a4c4-fca0586a6283] received connection request\n2025-07-16 19:24:13.472 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 19:24:13.472 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:24:13.495 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8970d2bb-a690-408d-a4c4-fca0586a6283] socks forwarding established\n2025-07-16 19:24:13.521 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8970d2bb-a690-408d-a4c4-fca0586a6283] socks connection closed\n2025-07-16 19:24:13.521 [info] [command][775a29d9-42a3-4547-bd03-a5041c39ad5d] Process exited with code 0\n2025-07-16 19:24:13.521 [info] [command][775a29d9-42a3-4547-bd03-a5041c39ad5d] Socket close event received\n2025-07-16 19:24:13.544 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59918 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:25:13.524 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:25:13.526 [info] [command][a6432284-07e9-4e86-9c05-564cf3d4a412] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a6432284-07e9-4e86-9c05-564cf3d4a412""}\n2025-07-16 19:25:13.527 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][beba7062-b909-4120-ac2b-31d4640fa03c] received connection request\n2025-07-16 19:25:13.528 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 19:25:13.528 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:25:13.638 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][beba7062-b909-4120-ac2b-31d4640fa03c] socks forwarding established\n2025-07-16 19:25:13.673 [info] [command][a6432284-07e9-4e86-9c05-564cf3d4a412] Process exited with code 0\n2025-07-16 19:25:13.674 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][beba7062-b909-4120-ac2b-31d4640fa03c] socks connection closed\n2025-07-16 19:25:13.674 [info] [command][a6432284-07e9-4e86-9c05-564cf3d4a412] Socket close event received\n2025-07-16 19:25:13.735 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59964 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:26:13.679 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:26:13.681 [info] [command][329425ab-2637-4329-9708-de263ae41585] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""329425ab-2637-4329-9708-de263ae41585""}\n2025-07-16 19:26:13.682 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][76a5e25c-b4f9-4ac3-9a62-b3f62f59147b] received connection request\n2025-07-16 19:26:13.683 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:26:13.707 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][76a5e25c-b4f9-4ac3-9a62-b3f62f59147b] socks forwarding established\n2025-07-16 19:26:13.733 [info] [command][329425ab-2637-4329-9708-de263ae41585] Process exited with code 0\n2025-07-16 19:26:13.733 [info] [command][329425ab-2637-4329-9708-de263ae41585] Socket close event received\n2025-07-16 19:26:13.734 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][76a5e25c-b4f9-4ac3-9a62-b3f62f59147b] socks connection closed\n2025-07-16 19:26:13.758 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 59995 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:27:13.738 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:27:13.740 [info] [command][7e15cff7-3a26-4542-98dc-940443a356fc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7e15cff7-3a26-4542-98dc-940443a356fc""}\n2025-07-16 19:27:13.740 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c56635ae-e9ef-419a-aaeb-2e6505e0f189] received connection request\n2025-07-16 19:27:13.741 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:27:13.767 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c56635ae-e9ef-419a-aaeb-2e6505e0f189] socks forwarding established\n2025-07-16 19:27:13.794 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c56635ae-e9ef-419a-aaeb-2e6505e0f189] socks connection closed\n2025-07-16 19:27:13.794 [info] [command][7e15cff7-3a26-4542-98dc-940443a356fc] Process exited with code 0\n2025-07-16 19:27:13.794 [info] [command][7e15cff7-3a26-4542-98dc-940443a356fc] Socket close event received\n2025-07-16 19:27:13.817 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60056 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:28:13.797 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:28:13.799 [info] [command][4e518bce-c91a-4026-b6ab-833bb4f06a58] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4e518bce-c91a-4026-b6ab-833bb4f06a58""}\n2025-07-16 19:28:13.800 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5b20b191-3aed-4834-8859-9bf54e517999] received connection request\n2025-07-16 19:28:13.800 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:28:13.825 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5b20b191-3aed-4834-8859-9bf54e517999] socks forwarding established\n2025-07-16 19:28:13.853 [info] [command][4e518bce-c91a-4026-b6ab-833bb4f06a58] Process exited with code 0\n2025-07-16 19:28:13.853 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5b20b191-3aed-4834-8859-9bf54e517999] socks connection closed\n2025-07-16 19:28:13.853 [info] [command][4e518bce-c91a-4026-b6ab-833bb4f06a58] Socket close event received\n2025-07-16 19:28:13.876 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60095 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:29:13.857 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:29:13.859 [info] [command][da1daaa9-e07f-4256-b224-225100ea4309] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""da1daaa9-e07f-4256-b224-225100ea4309""}\n2025-07-16 19:29:13.860 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][cedea12b-0b56-4ed9-af4e-9477483e1343] received connection request\n2025-07-16 19:29:13.860 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:29:13.887 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cedea12b-0b56-4ed9-af4e-9477483e1343] socks forwarding established\n2025-07-16 19:29:13.914 [info] [command][da1daaa9-e07f-4256-b224-225100ea4309] Process exited with code 0\n2025-07-16 19:29:13.914 [info] [command][da1daaa9-e07f-4256-b224-225100ea4309] Socket close event received\n2025-07-16 19:29:13.915 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][cedea12b-0b56-4ed9-af4e-9477483e1343] socks connection closed\n2025-07-16 19:29:13.940 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60158 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:30:13.916 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:30:13.918 [info] [command][e9c71857-1ff9-4ecb-ae28-1cafddfbcecf] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e9c71857-1ff9-4ecb-ae28-1cafddfbcecf""}\n2025-07-16 19:30:13.919 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][f3a2d71d-60c2-4d4b-83e6-62b08ec1f47a] received connection request\n2025-07-16 19:30:13.920 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:30:13.946 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f3a2d71d-60c2-4d4b-83e6-62b08ec1f47a] socks forwarding established\n2025-07-16 19:30:13.972 [info] [command][e9c71857-1ff9-4ecb-ae28-1cafddfbcecf] Process exited with code 0\n2025-07-16 19:30:13.972 [info] [command][e9c71857-1ff9-4ecb-ae28-1cafddfbcecf] Socket close event received\n2025-07-16 19:30:13.973 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][f3a2d71d-60c2-4d4b-83e6-62b08ec1f47a] socks connection closed\n2025-07-16 19:30:13.996 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60199 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:31:13.978 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:31:13.980 [info] [command][08b60b8a-ac01-4ece-bf93-8d65359bb7b8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""08b60b8a-ac01-4ece-bf93-8d65359bb7b8""}\n2025-07-16 19:31:13.981 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1fa2096e-896c-4439-af62-05e7f4357f8f] received connection request\n2025-07-16 19:31:13.982 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:31:14.007 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1fa2096e-896c-4439-af62-05e7f4357f8f] socks forwarding established\n2025-07-16 19:31:14.034 [info] [command][08b60b8a-ac01-4ece-bf93-8d65359bb7b8] Process exited with code 0\n2025-07-16 19:31:14.034 [info] [command][08b60b8a-ac01-4ece-bf93-8d65359bb7b8] Socket close event received\n2025-07-16 19:31:14.035 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1fa2096e-896c-4439-af62-05e7f4357f8f] socks connection closed\n2025-07-16 19:31:14.059 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60229 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:32:14.035 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:32:14.037 [info] [command][f496a7a6-f3a2-404d-b8ef-0ba83152e6f3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""f496a7a6-f3a2-404d-b8ef-0ba83152e6f3""}\n2025-07-16 19:32:14.037 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][6a1880d4-f025-4813-8afe-31bd52e4d4c8] received connection request\n2025-07-16 19:32:14.038 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:32:14.063 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6a1880d4-f025-4813-8afe-31bd52e4d4c8] socks forwarding established\n2025-07-16 19:32:14.091 [info] [command][f496a7a6-f3a2-404d-b8ef-0ba83152e6f3] Process exited with code 0\n2025-07-16 19:32:14.091 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][6a1880d4-f025-4813-8afe-31bd52e4d4c8] socks connection closed\n2025-07-16 19:32:14.091 [info] [command][f496a7a6-f3a2-404d-b8ef-0ba83152e6f3] Socket close event received\n2025-07-16 19:32:14.116 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60294 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:33:14.095 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:33:14.096 [info] [command][b1386132-8c77-475b-b5db-45a76a99b3e4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""b1386132-8c77-475b-b5db-45a76a99b3e4""}\n2025-07-16 19:33:14.096 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][c6fadd7c-bac6-40ec-be2a-cf0a48f9663f] received connection request\n2025-07-16 19:33:14.096 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:33:14.124 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c6fadd7c-bac6-40ec-be2a-cf0a48f9663f] socks forwarding established\n2025-07-16 19:33:14.150 [info] [command][b1386132-8c77-475b-b5db-45a76a99b3e4] Process exited with code 0\n2025-07-16 19:33:14.150 [info] [command][b1386132-8c77-475b-b5db-45a76a99b3e4] Socket close event received\n2025-07-16 19:33:14.151 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][c6fadd7c-bac6-40ec-be2a-cf0a48f9663f] socks connection closed\n2025-07-16 19:33:14.175 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60335 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:34:14.156 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:34:14.158 [info] [command][13fda8da-9513-46db-bac1-917a993e3d34] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""13fda8da-9513-46db-bac1-917a993e3d34""}\n2025-07-16 19:34:14.159 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][8d071bc6-9c97-416d-8acd-f65debaf866d] received connection request\n2025-07-16 19:34:14.159 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:34:14.183 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8d071bc6-9c97-416d-8acd-f65debaf866d] socks forwarding established\n2025-07-16 19:34:14.209 [info] [command][13fda8da-9513-46db-bac1-917a993e3d34] Process exited with code 0\n2025-07-16 19:34:14.209 [info] [command][13fda8da-9513-46db-bac1-917a993e3d34] Socket close event received\n2025-07-16 19:34:14.209 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][8d071bc6-9c97-416d-8acd-f65debaf866d] socks connection closed\n2025-07-16 19:34:14.232 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60371 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:35:14.211 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:35:14.214 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][383ba2cf-a738-4171-bfc3-e16dc3d7ce77] received connection request\n2025-07-16 19:35:14.214 [info] [command][db21db25-53e5-4ece-a6b5-999bf7aaca1f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""db21db25-53e5-4ece-a6b5-999bf7aaca1f""}\n2025-07-16 19:35:14.214 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:35:14.240 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][383ba2cf-a738-4171-bfc3-e16dc3d7ce77] socks forwarding established\n2025-07-16 19:35:14.274 [info] [command][db21db25-53e5-4ece-a6b5-999bf7aaca1f] Process exited with code 0\n2025-07-16 19:35:14.274 [info] [command][db21db25-53e5-4ece-a6b5-999bf7aaca1f] Socket close event received\n2025-07-16 19:35:14.292 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][383ba2cf-a738-4171-bfc3-e16dc3d7ce77] socks connection closed\n2025-07-16 19:35:14.303 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60422 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:36:14.280 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:36:14.282 [info] [command][5366db9b-cea0-4fa7-b8a6-dc88075e0cd6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""5366db9b-cea0-4fa7-b8a6-dc88075e0cd6""}\n2025-07-16 19:36:14.282 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][63bf671b-242d-4c9a-86ff-6f3ab95ce0dc] received connection request\n2025-07-16 19:36:14.282 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:36:14.306 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][63bf671b-242d-4c9a-86ff-6f3ab95ce0dc] socks forwarding established\n2025-07-16 19:36:14.358 [info] [command][5366db9b-cea0-4fa7-b8a6-dc88075e0cd6] Process exited with code 0\n2025-07-16 19:36:14.358 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][63bf671b-242d-4c9a-86ff-6f3ab95ce0dc] socks connection closed\n2025-07-16 19:36:14.359 [info] [command][5366db9b-cea0-4fa7-b8a6-dc88075e0cd6] Socket close event received\n2025-07-16 19:36:14.383 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60452 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:37:14.360 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:37:14.362 [info] [command][344163d4-1022-4b73-9b20-4b412734cf94] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""344163d4-1022-4b73-9b20-4b412734cf94""}\n2025-07-16 19:37:14.362 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1fabed77-f5a1-46da-907f-94fcb7cdd92b] received connection request\n2025-07-16 19:37:14.362 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:37:14.420 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1fabed77-f5a1-46da-907f-94fcb7cdd92b] socks forwarding established\n2025-07-16 19:37:14.449 [info] [command][344163d4-1022-4b73-9b20-4b412734cf94] Process exited with code 0\n2025-07-16 19:37:14.449 [info] [command][344163d4-1022-4b73-9b20-4b412734cf94] Socket close event received\n2025-07-16 19:37:14.471 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1fabed77-f5a1-46da-907f-94fcb7cdd92b] socks connection closed\n2025-07-16 19:37:14.475 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60536 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:38:14.450 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:38:14.451 [info] [command][e36a8880-1a15-40a0-94f9-e31e6c5da5cf] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e36a8880-1a15-40a0-94f9-e31e6c5da5cf""}\n2025-07-16 19:38:14.451 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][268a1129-7b3f-4fbf-8a73-f989e6981f89] received connection request\n2025-07-16 19:38:14.452 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:38:14.476 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][268a1129-7b3f-4fbf-8a73-f989e6981f89] socks forwarding established\n2025-07-16 19:38:14.502 [info] [command][e36a8880-1a15-40a0-94f9-e31e6c5da5cf] Process exited with code 0\n2025-07-16 19:38:14.502 [info] [command][e36a8880-1a15-40a0-94f9-e31e6c5da5cf] Socket close event received\n2025-07-16 19:38:14.526 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][268a1129-7b3f-4fbf-8a73-f989e6981f89] socks connection closed\n2025-07-16 19:38:14.527 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60564 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:39:14.507 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:39:14.510 [info] [command][7991d014-54ed-43e2-b1e0-154261d1f54e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7991d014-54ed-43e2-b1e0-154261d1f54e""}\n2025-07-16 19:39:14.511 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][301aa61d-72d9-4afd-956c-9dbfaa9d1f4a] received connection request\n2025-07-16 19:39:14.512 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:39:14.663 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][301aa61d-72d9-4afd-956c-9dbfaa9d1f4a] socks forwarding established\n2025-07-16 19:39:14.715 [info] [command][7991d014-54ed-43e2-b1e0-154261d1f54e] Process exited with code 0\n2025-07-16 19:39:14.715 [info] [command][7991d014-54ed-43e2-b1e0-154261d1f54e] Socket close event received\n2025-07-16 19:39:14.800 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][301aa61d-72d9-4afd-956c-9dbfaa9d1f4a] socks connection closed\n2025-07-16 19:39:14.821 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60592 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:40:14.720 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:40:14.723 [info] [command][22d25dd6-87d1-4412-98f0-b7b226fb930a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""22d25dd6-87d1-4412-98f0-b7b226fb930a""}\n2025-07-16 19:40:14.723 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ca5b1e69-c868-4fb8-99c4-e45faa397906] received connection request\n2025-07-16 19:40:14.724 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:40:14.791 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ca5b1e69-c868-4fb8-99c4-e45faa397906] socks forwarding established\n2025-07-16 19:40:14.954 [info] [command][22d25dd6-87d1-4412-98f0-b7b226fb930a] Process exited with code 0\n2025-07-16 19:40:14.954 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ca5b1e69-c868-4fb8-99c4-e45faa397906] socks connection closed\n2025-07-16 19:40:14.955 [info] [command][22d25dd6-87d1-4412-98f0-b7b226fb930a] Socket close event received\n2025-07-16 19:40:14.980 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60631 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:41:14.960 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:41:14.962 [info] [command][e9a837eb-303f-48f0-8b43-d77bd28fe483] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""e9a837eb-303f-48f0-8b43-d77bd28fe483""}\n2025-07-16 19:41:14.962 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][dada907d-10a7-4a32-be07-1d54983d01a0] received connection request\n2025-07-16 19:41:14.962 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:41:14.990 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][dada907d-10a7-4a32-be07-1d54983d01a0] socks forwarding established\n2025-07-16 19:41:15.015 [info] [command][e9a837eb-303f-48f0-8b43-d77bd28fe483] Process exited with code 0\n2025-07-16 19:41:15.015 [info] [command][e9a837eb-303f-48f0-8b43-d77bd28fe483] Socket close event received\n2025-07-16 19:41:15.015 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][dada907d-10a7-4a32-be07-1d54983d01a0] socks connection closed\n2025-07-16 19:41:15.038 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60660 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:42:15.018 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:42:15.020 [info] [command][c459cd24-328a-481d-b0ad-cb98bb95e43d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c459cd24-328a-481d-b0ad-cb98bb95e43d""}\n2025-07-16 19:42:15.021 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][1df764e9-8b7a-4713-ad2a-d42060db4c59] received connection request\n2025-07-16 19:42:15.022 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:42:15.045 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1df764e9-8b7a-4713-ad2a-d42060db4c59] socks forwarding established\n2025-07-16 19:42:15.071 [info] [command][c459cd24-328a-481d-b0ad-cb98bb95e43d] Process exited with code 0\n2025-07-16 19:42:15.071 [info] [command][c459cd24-328a-481d-b0ad-cb98bb95e43d] Socket close event received\n2025-07-16 19:42:15.071 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][1df764e9-8b7a-4713-ad2a-d42060db4c59] socks connection closed\n2025-07-16 19:42:15.096 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60715 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:43:15.077 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:43:15.079 [info] [command][ec0a1817-9581-4330-a711-9e01921fd734] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""ec0a1817-9581-4330-a711-9e01921fd734""}\n2025-07-16 19:43:15.079 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][658685fa-3c79-4845-a806-dfa97d6c615c] received connection request\n2025-07-16 19:43:15.080 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 19:43:15.080 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:43:15.105 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][658685fa-3c79-4845-a806-dfa97d6c615c] socks forwarding established\n2025-07-16 19:43:15.135 [info] [command][ec0a1817-9581-4330-a711-9e01921fd734] Process exited with code 0\n2025-07-16 19:43:15.135 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][658685fa-3c79-4845-a806-dfa97d6c615c] socks connection closed\n2025-07-16 19:43:15.135 [info] [command][ec0a1817-9581-4330-a711-9e01921fd734] Socket close event received\n2025-07-16 19:43:15.160 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60743 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:44:15.140 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:44:15.143 [info] [command][4f2c543e-7261-4d96-ad0c-3d1849a3c385] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""4f2c543e-7261-4d96-ad0c-3d1849a3c385""}\n2025-07-16 19:44:15.144 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][13eef585-01ed-47dc-8e2a-0cd4e74603cf] received connection request\n2025-07-16 19:44:15.145 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:44:15.171 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][13eef585-01ed-47dc-8e2a-0cd4e74603cf] socks forwarding established\n2025-07-16 19:44:15.199 [info] [command][4f2c543e-7261-4d96-ad0c-3d1849a3c385] Process exited with code 0\n2025-07-16 19:44:15.199 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][13eef585-01ed-47dc-8e2a-0cd4e74603cf] socks connection closed\n2025-07-16 19:44:15.199 [info] [command][4f2c543e-7261-4d96-ad0c-3d1849a3c385] Socket close event received\n2025-07-16 19:44:15.223 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60774 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:45:15.205 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:45:15.207 [info] [command][d5b5f214-a142-4630-9335-8439f5c7bbcd] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""d5b5f214-a142-4630-9335-8439f5c7bbcd""}\n2025-07-16 19:45:15.207 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][ebbc8f67-91e0-4795-b483-5493265db727] received connection request\n2025-07-16 19:45:15.208 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:45:15.239 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ebbc8f67-91e0-4795-b483-5493265db727] socks forwarding established\n2025-07-16 19:45:15.398 [info] [command][d5b5f214-a142-4630-9335-8439f5c7bbcd] Process exited with code 0\n2025-07-16 19:45:15.399 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][ebbc8f67-91e0-4795-b483-5493265db727] socks connection closed\n2025-07-16 19:45:15.399 [info] [command][d5b5f214-a142-4630-9335-8439f5c7bbcd] Socket close event received\n2025-07-16 19:45:15.425 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60817 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:46:15.403 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:46:15.404 [info] [command][567940e4-fde4-4d0f-b55e-99c8ee4e2feb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""567940e4-fde4-4d0f-b55e-99c8ee4e2feb""}\n2025-07-16 19:46:15.404 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][31dc2856-0af6-4634-b5f1-c832cb520e7a] received connection request\n2025-07-16 19:46:15.404 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 19:46:15.404 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:46:15.515 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][31dc2856-0af6-4634-b5f1-c832cb520e7a] socks forwarding established\n2025-07-16 19:46:15.542 [info] [command][567940e4-fde4-4d0f-b55e-99c8ee4e2feb] Process exited with code 0\n2025-07-16 19:46:15.542 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][31dc2856-0af6-4634-b5f1-c832cb520e7a] socks connection closed\n2025-07-16 19:46:15.542 [info] [command][567940e4-fde4-4d0f-b55e-99c8ee4e2feb] Socket close event received\n2025-07-16 19:46:15.639 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60840 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:47:15.548 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:47:15.549 [info] [command][a55f01a8-7c3c-44cc-b59c-5f2ffeff7346] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""a55f01a8-7c3c-44cc-b59c-5f2ffeff7346""}\n2025-07-16 19:47:15.550 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][912cd98f-eaee-4494-b76e-228431da4c8c] received connection request\n2025-07-16 19:47:15.550 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 19:47:15.550 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:47:15.574 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][912cd98f-eaee-4494-b76e-228431da4c8c] socks forwarding established\n2025-07-16 19:47:15.602 [info] [command][a55f01a8-7c3c-44cc-b59c-5f2ffeff7346] Process exited with code 0\n2025-07-16 19:47:15.602 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][912cd98f-eaee-4494-b76e-228431da4c8c] socks connection closed\n2025-07-16 19:47:15.602 [info] [command][a55f01a8-7c3c-44cc-b59c-5f2ffeff7346] Socket close event received\n2025-07-16 19:47:15.626 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60898 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:48:15.603 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:48:15.606 [info] [command][c72c8736-b1b0-4617-87dd-a2c2df8aae8d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c72c8736-b1b0-4617-87dd-a2c2df8aae8d""}\n2025-07-16 19:48:15.606 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][2c610c44-1af4-4f85-8fb1-70cdd8a09f61] received connection request\n2025-07-16 19:48:15.607 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:48:15.733 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2c610c44-1af4-4f85-8fb1-70cdd8a09f61] socks forwarding established\n2025-07-16 19:48:15.847 [info] [command][c72c8736-b1b0-4617-87dd-a2c2df8aae8d] Process exited with code 0\n2025-07-16 19:48:15.847 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][2c610c44-1af4-4f85-8fb1-70cdd8a09f61] socks connection closed\n2025-07-16 19:48:15.848 [info] [command][c72c8736-b1b0-4617-87dd-a2c2df8aae8d] Socket close event received\n2025-07-16 19:48:15.873 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60922 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:49:15.853 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:49:15.854 [info] [command][7c0c3acd-e1fa-456e-a260-bfe75eef4368] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""7c0c3acd-e1fa-456e-a260-bfe75eef4368""}\n2025-07-16 19:49:15.855 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][410dbeb6-e490-4d74-a163-880c00404d3a] received connection request\n2025-07-16 19:49:15.856 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:49:15.884 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][410dbeb6-e490-4d74-a163-880c00404d3a] socks forwarding established\n2025-07-16 19:49:15.914 [info] [command][7c0c3acd-e1fa-456e-a260-bfe75eef4368] Process exited with code 0\n2025-07-16 19:49:15.914 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][410dbeb6-e490-4d74-a163-880c00404d3a] socks connection closed\n2025-07-16 19:49:15.915 [info] [command][7c0c3acd-e1fa-456e-a260-bfe75eef4368] Socket close event received\n2025-07-16 19:49:15.938 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60948 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:50:15.918 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:50:15.919 [info] [command][49d68fe5-e128-408a-920c-0fae3c3aaf46] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""49d68fe5-e128-408a-920c-0fae3c3aaf46""}\n2025-07-16 19:50:15.920 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][4145100c-3df8-41d5-8007-2b3f0e311cd7] received connection request\n2025-07-16 19:50:15.921 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:50:15.947 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4145100c-3df8-41d5-8007-2b3f0e311cd7] socks forwarding established\n2025-07-16 19:50:15.973 [info] [command][49d68fe5-e128-408a-920c-0fae3c3aaf46] Process exited with code 0\n2025-07-16 19:50:15.973 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][4145100c-3df8-41d5-8007-2b3f0e311cd7] socks connection closed\n2025-07-16 19:50:15.974 [info] [command][49d68fe5-e128-408a-920c-0fae3c3aaf46] Socket close event received\n2025-07-16 19:50:15.997 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 60990 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:51:15.979 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:51:15.981 [info] [command][02e2b5fe-ede7-4594-82c9-ccc94cfadb13] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""02e2b5fe-ede7-4594-82c9-ccc94cfadb13""}\n2025-07-16 19:51:15.981 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][9b5f1189-c651-40f0-846e-e2c0dbe23088] received connection request\n2025-07-16 19:51:15.982 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 19:51:15.982 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:51:16.005 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9b5f1189-c651-40f0-846e-e2c0dbe23088] socks forwarding established\n2025-07-16 19:51:16.032 [info] [command][02e2b5fe-ede7-4594-82c9-ccc94cfadb13] Process exited with code 0\n2025-07-16 19:51:16.032 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][9b5f1189-c651-40f0-846e-e2c0dbe23088] socks connection closed\n2025-07-16 19:51:16.032 [info] [command][02e2b5fe-ede7-4594-82c9-ccc94cfadb13] Socket close event received\n2025-07-16 19:51:16.056 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61015 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:52:16.039 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:52:16.041 [info] [command][08b00750-6b7c-4f3d-ac78-32521e43e8d9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""08b00750-6b7c-4f3d-ac78-32521e43e8d9""}\n2025-07-16 19:52:16.041 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][731c4878-8ecd-4cbc-8758-dfd111398227] received connection request\n2025-07-16 19:52:16.042 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:52:16.069 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][731c4878-8ecd-4cbc-8758-dfd111398227] socks forwarding established\n2025-07-16 19:52:16.143 [info] [command][08b00750-6b7c-4f3d-ac78-32521e43e8d9] Process exited with code 0\n2025-07-16 19:52:16.144 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][731c4878-8ecd-4cbc-8758-dfd111398227] socks connection closed\n2025-07-16 19:52:16.144 [info] [command][08b00750-6b7c-4f3d-ac78-32521e43e8d9] Socket close event received\n2025-07-16 19:52:16.229 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61068 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:53:16.146 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:53:16.148 [info] [command][c3ceb4e5-1549-461b-a876-6983de6ce902] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""c3ceb4e5-1549-461b-a876-6983de6ce902""}\n2025-07-16 19:53:16.148 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][49f46f5c-f9e8-4677-a8cf-4091a2256f31] received connection request\n2025-07-16 19:53:16.148 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:53:16.302 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][49f46f5c-f9e8-4677-a8cf-4091a2256f31] socks forwarding established\n2025-07-16 19:53:16.352 [info] [command][c3ceb4e5-1549-461b-a876-6983de6ce902] Process exited with code 0\n2025-07-16 19:53:16.352 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][49f46f5c-f9e8-4677-a8cf-4091a2256f31] socks connection closed\n2025-07-16 19:53:16.353 [info] [command][c3ceb4e5-1549-461b-a876-6983de6ce902] Socket close event received\n2025-07-16 19:53:16.455 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61091 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:54:16.357 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:54:16.359 [info] [command][58129aef-84d5-4404-ad2e-be579bb59505] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""58129aef-84d5-4404-ad2e-be579bb59505""}\n2025-07-16 19:54:16.359 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][73e7bf04-d4dc-4596-9522-2356135b3493] received connection request\n2025-07-16 19:54:16.360 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:54:16.450 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][73e7bf04-d4dc-4596-9522-2356135b3493] socks forwarding established\n2025-07-16 19:54:16.508 [info] [command][58129aef-84d5-4404-ad2e-be579bb59505] Process exited with code 0\n2025-07-16 19:54:16.509 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][73e7bf04-d4dc-4596-9522-2356135b3493] socks connection closed\n2025-07-16 19:54:16.509 [info] [command][58129aef-84d5-4404-ad2e-be579bb59505] Socket close event received\n2025-07-16 19:54:16.584 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61111 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:55:16.514 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:55:16.517 [info] [command][3d7d1704-c577-41d3-9e1b-9845a9956838] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3d7d1704-c577-41d3-9e1b-9845a9956838""}\n2025-07-16 19:55:16.518 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e16d6949-8016-4b86-a7b0-6893a4e6627e] received connection request\n2025-07-16 19:55:16.519 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:55:16.545 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e16d6949-8016-4b86-a7b0-6893a4e6627e] socks forwarding established\n2025-07-16 19:55:16.574 [info] [command][3d7d1704-c577-41d3-9e1b-9845a9956838] Process exited with code 0\n2025-07-16 19:55:16.575 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e16d6949-8016-4b86-a7b0-6893a4e6627e] socks connection closed\n2025-07-16 19:55:16.575 [info] [command][3d7d1704-c577-41d3-9e1b-9845a9956838] Socket close event received\n2025-07-16 19:55:16.599 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61180 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:56:16.580 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:56:16.582 [info] [command][40a8876e-4fc2-40e4-b830-6681ec7d39f1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""40a8876e-4fc2-40e4-b830-6681ec7d39f1""}\n2025-07-16 19:56:16.583 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][e29c3501-23de-44f6-87be-bfe97140e885] received connection request\n2025-07-16 19:56:16.584 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:56:16.608 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e29c3501-23de-44f6-87be-bfe97140e885] socks forwarding established\n2025-07-16 19:56:16.635 [info] [command][40a8876e-4fc2-40e4-b830-6681ec7d39f1] Process exited with code 0\n2025-07-16 19:56:16.636 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][e29c3501-23de-44f6-87be-bfe97140e885] socks connection closed\n2025-07-16 19:56:16.636 [info] [command][40a8876e-4fc2-40e4-b830-6681ec7d39f1] Socket close event received\n2025-07-16 19:56:16.660 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61208 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:57:16.639 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:57:16.641 [info] [command][88b16899-1a83-4b72-93c3-85bf956a9db8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""88b16899-1a83-4b72-93c3-85bf956a9db8""}\n2025-07-16 19:57:16.642 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][891afe4f-6a12-484e-9ca5-66c9dced1e31] received connection request\n2025-07-16 19:57:16.642 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:57:16.668 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][891afe4f-6a12-484e-9ca5-66c9dced1e31] socks forwarding established\n2025-07-16 19:57:16.695 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][891afe4f-6a12-484e-9ca5-66c9dced1e31] socks connection closed\n2025-07-16 19:57:16.695 [info] [command][88b16899-1a83-4b72-93c3-85bf956a9db8] Process exited with code 0\n2025-07-16 19:57:16.696 [info] [command][88b16899-1a83-4b72-93c3-85bf956a9db8] Socket close event received\n2025-07-16 19:57:16.720 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61267 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:58:16.702 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:58:16.704 [info] [command][be5f7923-dec9-4689-ba95-15aba9d60ef8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""be5f7923-dec9-4689-ba95-15aba9d60ef8""}\n2025-07-16 19:58:16.705 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][13dfb843-5781-4053-a3e0-e3275e48173a] received connection request\n2025-07-16 19:58:16.706 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:58:16.730 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][13dfb843-5781-4053-a3e0-e3275e48173a] socks forwarding established\n2025-07-16 19:58:16.757 [info] [command][be5f7923-dec9-4689-ba95-15aba9d60ef8] Process exited with code 0\n2025-07-16 19:58:16.758 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][13dfb843-5781-4053-a3e0-e3275e48173a] socks connection closed\n2025-07-16 19:58:16.758 [info] [command][be5f7923-dec9-4689-ba95-15aba9d60ef8] Socket close event received\n2025-07-16 19:58:16.780 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61294 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 19:59:16.762 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 19:59:16.765 [info] [command][3fb60b60-1175-484e-8233-bbce50802f81] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""3fb60b60-1175-484e-8233-bbce50802f81""}\n2025-07-16 19:59:16.766 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:37309][5a343097-e7fd-43d6-86ae-47f9850d1a6b] received connection request\n2025-07-16 19:59:16.767 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 19:59:16.799 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5a343097-e7fd-43d6-86ae-47f9850d1a6b] socks forwarding established\n2025-07-16 19:59:16.827 [info] [command][3fb60b60-1175-484e-8233-bbce50802f81] Process exited with code 0\n2025-07-16 19:59:16.828 [info] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:57974 -> 127.0.0.1:37309][5a343097-e7fd-43d6-86ae-47f9850d1a6b] socks connection closed\n2025-07-16 19:59:16.828 [info] [command][3fb60b60-1175-484e-8233-bbce50802f81] Socket close event received\n2025-07-16 19:59:16.853 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 57974 for 127.0.0.1 port 37309, connect from 127.0.0.1 port 61327 to 127.0.0.1 port 57974, nchannels 6\n\n2025-07-16 20:00:13.754 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:46727][787c5bb5-3ec2-4f82-823d-c95b982b88d7] received connection request\n2025-07-16 20:00:13.777 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #2)\n2025-07-16 20:00:13.778 [info] Received re-connection request; checking to see if existing connection is still valid\n2025-07-16 20:00:13.820 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:46727][9aa459fb-8800-482f-a7a8-424fe28a6017] received connection request\n2025-07-16 20:00:13.829 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\n\n2025-07-16 20:00:13.834 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\ndebug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 6: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 20:00:16.793 [error] Unexpected error while checking if existing connection is still valid Timeout while checking if existing connection is still valid\n2025-07-16 20:00:16.793 [error] Failed to connect to Cursor server at http://127.0.0.1:57977, attempt 1 of 3 This operation was aborted\n2025-07-16 20:00:16.797 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:46727][7efa7db7-d562-4801-9c94-a1cc25823840] received connection request\n2025-07-16 20:00:16.797 [info] (ssh_tunnel) stderr: debug1: Connection to port 57974 forwarding to socks port 0 requested.\ndebug1: channel 7: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-16 20:00:16.847 [info] Terminating existing SSH process with pid: 61926\n2025-07-16 20:00:16.848 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-16 20:00:16.848 [info] [remote-ssh] Pinging remote server on port 57978\n2025-07-16 20:00:16.848 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:00:16.848 [info] (ssh_tunnel): exit: code=null signal=SIGKILL\n2025-07-16 20:00:16.849 [info] [command][609060a4-f66a-45da-bd5b-104bc72d4a49] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""de7d54cc-97ec-4acb-a7dc-909a8230ad14"",""id"":""609060a4-f66a-45da-bd5b-104bc72d4a49""}\n2025-07-16 20:00:16.849 [error] [forwarding][multiplex][127.0.0.1:57978 -> 127.0.0.1:undefined][7a3c1df4-4709-439c-869d-bd95fb707655] remote server not configured\n2025-07-16 20:00:16.849 [error] [command][609060a4-f66a-45da-bd5b-104bc72d4a49] Socket error: Error: read ECONNRESET\n2025-07-16 20:00:16.850 [error] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:57974 -> 127.0.0.1:46727][787c5bb5-3ec2-4f82-823d-c95b982b88d7] error while creating socks forwarding Socket closed\n2025-07-16 20:00:16.850 [error] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:57974 -> 127.0.0.1:46727][9aa459fb-8800-482f-a7a8-424fe28a6017] error while creating socks forwarding Socket closed\n2025-07-16 20:00:16.850 [error] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:57974 -> 127.0.0.1:46727][7efa7db7-d562-4801-9c94-a1cc25823840] error while creating socks forwarding Socket closed\n2025-07-16 20:00:16.850 [info] [command][609060a4-f66a-45da-bd5b-104bc72d4a49] Socket close event received\n2025-07-16 20:00:16.850 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:57974 -> 127.0.0.1:46727][9243c5a5-29cb-40d6-a147-75b8db807ee6] socks connection closed\n2025-07-16 20:00:16.850 [info] [forwarding][code][127.0.0.1:57977 -> 127.0.0.1:57974 -> 127.0.0.1:46727][a28e2b9d-8aa2-4937-9fa5-64a130a89a90] socks connection closed\n2025-07-16 20:00:16.853 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_77644.sh"" | ssh -v -T -D 61381 login.haicore.berlin bash --login -c bash\n2025-07-16 20:00:16.853 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:00:16.853 [info] Waiting for server to install via process(75807)...\n2025-07-16 20:00:16.859 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-16 20:00:16.859 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:00:16.859 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 20:00:16.860 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:00:16.860 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:00:16.861 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:00:16.862 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:00:16.862 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:00:16.862 [info] Retrying connection in 5 seconds...\n2025-07-16 20:00:17.800 [error] Failed to connect to Cursor server at http://127.0.0.1:57977, attempt 2 of 3 This operation was aborted\n2025-07-16 20:00:18.812 [error] Failed to connect to Cursor server at http://127.0.0.1:57977, attempt 3 of 3 This operation was aborted\n2025-07-16 20:00:18.813 [error] Could not re-use existing SOCKS connection; attempting to re-establish SOCKS forwarding Failed to connect to Cursor code server. Ensure that your remote host ssh config has 'AllowTcpForwarding yes' in '/etc/ssh/sshd_config'. Please check the logs and try reinstalling the server.\n2025-07-16 20:00:18.813 [error] Could not re-establish SOCKS forwarding; re-establishing entire SSH connection Remote server is not set\n2025-07-16 20:03:24.415 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_77644.sh\n2025-07-16 20:03:24.416 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:03:24.417 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_6723.sh"" | ssh -v -T -D 61386 login.haicore.berlin bash --login -c bash\n2025-07-16 20:03:24.417 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:03:24.417 [info] Waiting for server to install via process(75830)...\n2025-07-16 20:03:24.423 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-16 20:03:24.423 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:03:24.423 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 20:03:24.423 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:03:24.423 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:03:24.424 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:03:24.424 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:03:24.425 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:03:24.425 [info] Retrying connection in 5 seconds...\n2025-07-16 20:03:29.430 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_6723.sh\n2025-07-16 20:03:29.431 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:03:29.435 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_97558.sh"" | ssh -v -T -D 61388 login.haicore.berlin bash --login -c bash\n2025-07-16 20:03:29.436 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:03:29.436 [info] Waiting for server to install via process(75837)...\n2025-07-16 20:03:29.450 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-16 20:03:29.450 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:03:29.451 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 20:03:29.451 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:03:29.451 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:03:29.453 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:03:29.453 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:03:29.453 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:03:29.453 [info] Retrying connection in 5 seconds...\n2025-07-16 20:03:34.468 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_97558.sh\n2025-07-16 20:03:34.471 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:03:34.559 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_73177.sh"" | ssh -v -T -D 61390 login.haicore.berlin bash --login -c bash\n2025-07-16 20:03:34.559 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:03:34.560 [info] Waiting for server to install via process(75848)...\n2025-07-16 20:03:34.652 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:03:34.655 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\ndebug1: Reading configuration data /etc/ssh/crypto.conf\ndebug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:03:35.650 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:03:35.662 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:03:35.665 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:03:35.665 [info] Retrying connection in 5 seconds...\n2025-07-16 20:03:40.669 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_73177.sh\n2025-07-16 20:03:40.670 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:03:40.679 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_34233.sh"" | ssh -v -T -D 61394 login.haicore.berlin bash --login -c bash\n2025-07-16 20:03:40.679 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:03:40.679 [info] Waiting for server to install via process(75855)...\n2025-07-16 20:03:41.103 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:03:41.106 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\ndebug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:03:41.107 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:03:41.120 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:03:41.122 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:03:41.123 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:03:41.123 [info] Retrying connection in 5 seconds...\n2025-07-16 20:04:21.975 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_34233.sh\n2025-07-16 20:04:21.976 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:04:21.978 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_87157.sh"" | ssh -v -T -D 61397 login.haicore.berlin bash --login -c bash\n2025-07-16 20:04:21.978 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:04:21.978 [info] Waiting for server to install via process(75869)...\n2025-07-16 20:04:21.997 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:04:21.997 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 20:04:21.997 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:04:21.997 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:04:22.002 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:04:22.002 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:04:22.002 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:04:22.002 [info] Retrying connection in 5 seconds...\n2025-07-16 20:04:27.013 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_87157.sh\n2025-07-16 20:04:27.014 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:04:27.017 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_17896.sh"" | ssh -v -T -D 61400 login.haicore.berlin bash --login -c bash\n2025-07-16 20:04:27.017 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:04:27.017 [info] Waiting for server to install via process(75880)...\n2025-07-16 20:04:27.032 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-16 20:04:27.032 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:04:27.032 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 20:04:27.032 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:04:27.032 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:04:27.035 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:04:27.036 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:04:27.036 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:04:27.036 [info] Retrying connection in 5 seconds...\n2025-07-16 20:04:32.045 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_17896.sh\n2025-07-16 20:04:32.046 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:04:32.050 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_1651.sh"" | ssh -v -T -D 61402 login.haicore.berlin bash --login -c bash\n2025-07-16 20:04:32.050 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:04:32.050 [info] Waiting for server to install via process(75888)...\n2025-07-16 20:04:32.072 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-16 20:04:32.072 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:04:32.073 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 20:04:32.073 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:04:32.073 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:04:32.078 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:04:32.080 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:04:32.081 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:04:32.081 [info] Retrying connection in 5 seconds...\n2025-07-16 20:04:37.091 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_1651.sh\n2025-07-16 20:04:37.092 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:04:37.094 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_70117.sh"" | ssh -v -T -D 61404 login.haicore.berlin bash --login -c bash\n2025-07-16 20:04:37.094 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:04:37.094 [info] Waiting for server to install via process(75895)...\n2025-07-16 20:04:37.103 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\n\n2025-07-16 20:04:37.104 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:04:37.104 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 20:04:37.104 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:04:37.104 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:04:37.105 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:04:37.106 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:04:37.106 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:04:37.106 [info] Retrying connection in 5 seconds...\n2025-07-16 20:04:42.110 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_70117.sh\n2025-07-16 20:04:42.110 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:04:42.113 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_78179.sh"" | ssh -v -T -D 61407 login.haicore.berlin bash --login -c bash\n2025-07-16 20:04:42.113 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:04:42.113 [info] Waiting for server to install via process(75903)...\n2025-07-16 20:04:42.122 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-16 20:04:42.122 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:04:42.122 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 20:04:42.122 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:04:42.122 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:04:42.124 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:04:42.124 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:04:42.125 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:04:42.125 [info] Retrying connection in 5 seconds...\n2025-07-16 20:04:47.126 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_78179.sh\n2025-07-16 20:04:47.126 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:04:47.128 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_69707.sh"" | ssh -v -T -D 61409 login.haicore.berlin bash --login -c bash\n2025-07-16 20:04:47.128 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:04:47.128 [info] Waiting for server to install via process(75910)...\n2025-07-16 20:04:47.136 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-16 20:04:47.136 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:04:47.136 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 20:04:47.136 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:04:47.137 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:04:47.138 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:04:47.138 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:04:47.138 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:04:47.138 [info] Retrying connection in 5 seconds...\n2025-07-16 20:04:52.140 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_69707.sh\n2025-07-16 20:04:52.140 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:04:52.142 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_67641.sh"" | ssh -v -T -D 61411 login.haicore.berlin bash --login -c bash\n2025-07-16 20:04:52.142 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:04:52.142 [info] Waiting for server to install via process(75919)...\n2025-07-16 20:04:52.149 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-16 20:04:52.150 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:04:52.150 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-16 20:04:52.150 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-16 20:04:52.150 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:04:52.151 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:04:52.152 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:04:52.152 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:04:52.152 [info] Retrying connection in 5 seconds...\n2025-07-16 20:11:57.479 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_67641.sh\n2025-07-16 20:11:57.480 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.21/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-2ySBKB/socket.sock\n2025-07-16 20:11:57.485 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_63169.sh"" | ssh -v -T -D 61413 login.haicore.berlin bash --login -c bash\n2025-07-16 20:11:57.485 [info] Started installation script. Waiting for it to finish...\n2025-07-16 20:11:57.485 [info] Waiting for server to install via process(75926)...\n2025-07-16 20:11:57.526 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-16 20:11:57.526 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-16 20:11:57.529 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\ndebug1: Reading configuration data /etc/ssh/crypto.conf\ndebug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-16 20:11:57.576 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-16 20:11:57.579 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-16 20:11:57.579 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:11:57.579 [error] Failed to connect after 13 attempts: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-16 20:11:57.579 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_63169.sh\n2025-07-16 20:11:57.580 [error] Error resolving SSH authority Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 14:40:55.819 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #1)\n2025-07-20 14:40:55.831 [info] SSH askpass server listening on /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 14:40:55.831 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-20 14:40:55.833 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 14:40:55.836 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_63299.sh"" | ssh -v -T -D 65277 login.haicore.berlin bash --login -c bash\n2025-07-20 14:40:55.836 [info] Started installation script. Waiting for it to finish...\n2025-07-20 14:40:55.836 [info] Waiting for server to install via process(63787)...\n2025-07-20 14:40:55.841 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-20 14:40:55.841 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 14:40:55.841 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 14:40:55.842 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 14:40:55.842 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 14:40:55.872 [info] (ssh_tunnel) stderr: debug1: Connection established.\n\n2025-07-20 14:40:55.873 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519 type 3\ndebug1: identity file /Users/franzsrambical/.ssh/id_ed25519-cert type -1\n\n2025-07-20 14:40:55.873 [info] (ssh_tunnel) stderr: debug1: Local version string SSH-2.0-OpenSSH_9.9\n\n2025-07-20 14:40:55.903 [info] (ssh_tunnel) stderr: debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7\ndebug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000\ndebug1: Authenticating to login.haicore.berlin:22 as 'franz.srambical'\n\n2025-07-20 14:40:55.904 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-20 14:40:55.904 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: SSH2_MSG_KEXINIT sent\n\n2025-07-20 14:40:55.928 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT received\ndebug1: kex: algorithm: ecdh-sha2-nistp256\ndebug1: kex: host key algorithm: ssh-ed25519\n\n2025-07-20 14:40:55.928 [info] (ssh_tunnel) stderr: debug1: kex: server->client cipher: aes128-gcm@openssh.com MAC: compression: none\ndebug1: kex: client->server cipher: aes128-gcm@openssh.com MAC: compression: none\n\n2025-07-20 14:40:55.929 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_KEX_ECDH_REPLY\n\n2025-07-20 14:40:55.956 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEX_ECDH_REPLY received\ndebug1: Server host key: ssh-ed25519 SHA256:3/BGZ1UNXR9SufKdsZVtx4Yd+kZTnZzSvRH0l6rtbvo\n\n2025-07-20 14:40:55.957 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-20 14:40:55.957 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: Host 'login.haicore.berlin' is known and matches the ED25519 host key.\ndebug1: Found key in /Users/franzsrambical/.ssh/known_hosts:17\n\n2025-07-20 14:40:55.960 [info] (ssh_tunnel) stderr: debug1: ssh_packet_send2_wrapped: resetting send seqnr 3\ndebug1: rekey out after 4294967296 blocks\ndebug1: SSH2_MSG_NEWKEYS sent\ndebug1: expecting SSH2_MSG_NEWKEYS\n\n2025-07-20 14:40:55.960 [info] (ssh_tunnel) stderr: debug1: ssh_packet_read_poll2: resetting read seqnr 3\ndebug1: SSH2_MSG_NEWKEYS received\ndebug1: rekey in after 4294967296 blocks\ndebug1: SSH2_MSG_EXT_INFO received\ndebug1: kex_ext_info_client_parse: server-sig-algs=\n\n2025-07-20 14:40:56.054 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_SERVICE_ACCEPT received\n\n2025-07-20 14:40:56.171 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: publickey\n\n2025-07-20 14:40:56.173 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: bound agent to hostkey\n\n2025-07-20 14:40:56.174 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: ssh_fetch_identitylist: agent contains no identities\ndebug1: Will attempt key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\ndebug1: Offering public key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-20 14:40:56.220 [info] (ssh_tunnel) stderr: debug1: Server accepts key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-20 14:40:56.364 [info] (ssh_tunnel) stderr: Authenticated to login.haicore.berlin ([141.80.150.4]:22) using ""publickey"".\ndebug1: Local connections to LOCALHOST:65277 forwarded to remote address socks:0\ndebug1: Local forwarding listening on ::1 port 65277.\n\n2025-07-20 14:40:56.364 [info] (ssh_tunnel) stderr: debug1: channel 0: new port-listener [port listener] (inactive timeout: 0)\ndebug1: Local forwarding listening on 127.0.0.1 port 65277.\ndebug1: channel 1: new port-listener [port listener] (inactive timeout: 0)\ndebug1: channel 2: new session [client-session] (inactive timeout: 0)\ndebug1: Requesting no-more-sessions@openssh.com\n\n2025-07-20 14:40:56.365 [info] (ssh_tunnel) stderr: debug1: Entering interactive session.\ndebug1: pledge: filesystem\n\n2025-07-20 14:40:56.480 [info] (ssh_tunnel) stderr: debug1: client_input_global_request: rtype hostkeys-00@openssh.com want_reply 0\n\n2025-07-20 14:40:56.481 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts for login.haicore.berlin / (none)\n\n2025-07-20 14:40:56.493 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts2 for login.haicore.berlin / (none)\ndebug1: client_input_hostkeys: hostkeys file /Users/franzsrambical/.ssh/known_hosts2 does not exist\ndebug1: client_input_hostkeys: no new or deprecated keys from server\n\n2025-07-20 14:40:56.493 [info] (ssh_tunnel) stderr: debug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\n\n2025-07-20 14:40:56.507 [info] (ssh_tunnel) stderr: debug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\ndebug1: Sending environment.\ndebug1: Sending command: bash --login -c bash\ndebug1: pledge: network\n\n2025-07-20 14:40:56.933 [info] (ssh_tunnel) stdout: Using TMP_DIR: /run/user/961800067\n\n2025-07-20 14:40:56.972 [info] (ssh_tunnel) stdout: Locking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-20 14:40:56.974 [info] (ssh_tunnel) stdout: Server script already installed in /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server\n\n2025-07-20 14:40:56.975 [info] (ssh_tunnel) stdout: Checking node executable\n\n2025-07-20 14:40:58.006 [info] (ssh_tunnel) stdout: v20.18.2\n\n2025-07-20 14:40:58.014 [info] (ssh_tunnel) stdout: Checking for running multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-20 14:40:58.030 [info] (ssh_tunnel) stdout: Running multiplex server: \n\n2025-07-20 14:40:58.031 [info] (ssh_tunnel) stdout: Creating multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-20 14:40:58.050 [info] (ssh_tunnel) stdout: Creating directory for multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server\n\n2025-07-20 14:40:58.070 [info] (ssh_tunnel) stdout: Writing multiplex server script to /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-20 14:40:58.070 [info] (ssh_tunnel) stdout: Starting multiplex server: /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js b95d4e43-7bf1-49d3-8caa-4997bd952c30\nMultiplex server started with PID 3045651 and wrote pid to file /run/user/961800067/cursor-remote-multiplex.pid.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nReading multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-20 14:40:58.070 [info] (ssh_tunnel) stdout: Multiplex server token file found\n\n2025-07-20 14:40:58.087 [info] (ssh_tunnel) stdout: Reading multiplex server log file /run/user/961800067/cursor-remote-multiplex.log.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-20 14:40:58.555 [info] (ssh_tunnel) stdout: Checking for code servers\n\n2025-07-20 14:40:58.573 [info] (ssh_tunnel) stdout: Code server script is not running\nCreating code server token file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0\nStarting code server script /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server --start-server --host=127.0.0.1 --port 0 --connection-token-file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0 --telemetry-level off --enable-remote-auto-shutdown --accept-server-license-terms &> /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0 &\n\n2025-07-20 14:40:58.575 [info] (ssh_tunnel) stdout: Code server started with PID 3045675 and wrote pid to file /run/user/961800067/cursor-remote-code.pid.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-20 14:40:58.575 [info] (ssh_tunnel) stdout: Code server log file is /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-20 14:40:59.089 [info] (ssh_tunnel) stdout: 1d494f06d5774a057e35d73a: start\nexitCode==0==\nnodeExecutable==/home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node==\nerrorMessage====\nisFatalError==false==\nmultiplexListeningOn==40717==\nmultiplexConnectionToken==b95d4e43-7bf1-49d3-8caa-4997bd952c30==\ncodeListeningOn==40083==\ncodeConnectionToken==bfffc766-6ca3-4cf3-b9c4-891f3dd218bd==\ndetectedPlatform==linux==\narch==x64==\nSSH_AUTH_SOCK====\n1d494f06d5774a057e35d73a: end\n\n2025-07-20 14:40:59.091 [info] Server install command exit code: 0\n2025-07-20 14:40:59.091 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_63299.sh\n2025-07-20 14:40:59.094 [info] [forwarding][code] creating new forwarding server\n2025-07-20 14:40:59.095 [info] [forwarding][code] server listening on 127.0.0.1:65285\n2025-07-20 14:40:59.095 [info] [forwarding][code] Set up server\n2025-07-20 14:40:59.096 [info] [remote-ssh] codeListeningOn (remote=[object Object]; local=[object Object]) codeConnectionToken: bfffc766-6ca3-4cf3-b9c4-891f3dd218bd\n2025-07-20 14:40:59.096 [info] [forwarding][multiplex] creating new forwarding server\n2025-07-20 14:40:59.096 [info] [forwarding][multiplex] server listening on 127.0.0.1:65286\n2025-07-20 14:40:59.097 [info] [forwarding][multiplex] Set up server\n2025-07-20 14:40:59.099 [info] [remote-ssh] multiplexListeningOn (remote=[object Object]; local=[object Object]) multiplexConnectionToken: b95d4e43-7bf1-49d3-8caa-4997bd952c30\n2025-07-20 14:40:59.099 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:40:59.106 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][7f786514-14e6-4ec4-8069-6b470cdd90bc] received connection request\n2025-07-20 14:40:59.107 [info] [command][a683bd7a-4edd-4533-a886-db52d2572438] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""a683bd7a-4edd-4533-a886-db52d2572438""}\n2025-07-20 14:40:59.108 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:40:59.108 [info] (ssh_tunnel) stdout: Unlocking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-20 14:40:59.122 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:40083][b6049a09-0a08-45f6-8600-c9ee2d6541f4] received connection request\n2025-07-20 14:40:59.122 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:40:59.144 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7f786514-14e6-4ec4-8069-6b470cdd90bc] socks forwarding established\n2025-07-20 14:40:59.149 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][b6049a09-0a08-45f6-8600-c9ee2d6541f4] socks forwarding established\n2025-07-20 14:40:59.201 [info] [command][a683bd7a-4edd-4533-a886-db52d2572438] Process exited with code 0\n2025-07-20 14:40:59.201 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7f786514-14e6-4ec4-8069-6b470cdd90bc] socks connection closed\n2025-07-20 14:40:59.201 [info] [command][a683bd7a-4edd-4533-a886-db52d2572438] Socket close event received\n2025-07-20 14:40:59.333 [info] (ssh_tunnel) stderr: debug1: channel 3: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 65288 to 127.0.0.1 port 65277, nchannels 5\n\n2025-07-20 14:40:59.442 [info] Successfully connected to Cursor server at http://127.0.0.1:65285/version\n2025-07-20 14:40:59.442 [info] [execServer][spawn] command: echo, args: 1, options: {}\n2025-07-20 14:40:59.442 [info] [command][c0630173-f98c-4f45-950f-e08c7206da58] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""c0630173-f98c-4f45-950f-e08c7206da58""}\n2025-07-20 14:40:59.443 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][35160f72-fbcb-4bc8-8911-71b2c7e19722] received connection request\n2025-07-20 14:40:59.443 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:40:59.470 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][35160f72-fbcb-4bc8-8911-71b2c7e19722] socks forwarding established\n2025-07-20 14:40:59.638 [info] [command][c0630173-f98c-4f45-950f-e08c7206da58] Process exited with code 0\n2025-07-20 14:40:59.638 [info] Successfully ran 'echo 1' against the multiplex server\n2025-07-20 14:40:59.639 [info] [remote-ssh] Resolved exec server. Socks port: 65277\n2025-07-20 14:40:59.639 [info] [remote-ssh] Resolved authority: {""host"":""127.0.0.1"",""port"":65285,""connectionToken"":""bfffc766-6ca3-4cf3-b9c4-891f3dd218bd"",""extensionHostEnv"":{}}. Socks port: 65277\n2025-07-20 14:40:59.639 [info] [command][c0630173-f98c-4f45-950f-e08c7206da58] Socket close event received\n2025-07-20 14:40:59.640 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][35160f72-fbcb-4bc8-8911-71b2c7e19722] socks connection closed\n2025-07-20 14:40:59.672 [info] (ssh_tunnel) stderr: debug1: channel 3: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 65293 to 127.0.0.1 port 65277, nchannels 5\n\n2025-07-20 14:40:59.673 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:40083][bc77bcc5-6f69-47e3-be85-b11666e2e9a2] received connection request\n2025-07-20 14:40:59.673 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:40:59.700 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][bc77bcc5-6f69-47e3-be85-b11666e2e9a2] socks forwarding established\n2025-07-20 14:40:59.738 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:40083][5523ab7e-449d-41e2-bec3-6c71c0076cdf] received connection request\n2025-07-20 14:40:59.739 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 5: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:40:59.846 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][5523ab7e-449d-41e2-bec3-6c71c0076cdf] socks forwarding established\n2025-07-20 14:41:00.005 [info] Saved platform linux for remote host login.haicore.berlin\n2025-07-20 14:41:03.636 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40083, connect from 127.0.0.1 port 65290 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:41:03.636 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][b6049a09-0a08-45f6-8600-c9ee2d6541f4] socks connection closed\n2025-07-20 14:41:59.206 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:41:59.209 [info] [command][b1dae6af-828c-41db-9e4e-eb20f84ba30c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""b1dae6af-828c-41db-9e4e-eb20f84ba30c""}\n2025-07-20 14:41:59.210 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][7dbb6d5d-0e39-40e1-b1dd-24f334fb8d03] received connection request\n2025-07-20 14:41:59.210 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:41:59.238 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7dbb6d5d-0e39-40e1-b1dd-24f334fb8d03] socks forwarding established\n2025-07-20 14:41:59.269 [info] [command][b1dae6af-828c-41db-9e4e-eb20f84ba30c] Process exited with code 0\n2025-07-20 14:41:59.269 [info] [command][b1dae6af-828c-41db-9e4e-eb20f84ba30c] Socket close event received\n2025-07-20 14:41:59.272 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7dbb6d5d-0e39-40e1-b1dd-24f334fb8d03] socks connection closed\n2025-07-20 14:41:59.295 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 65471 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:42:59.274 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:42:59.276 [info] [command][1836d787-8779-4f0b-945e-467befdd4a2f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""1836d787-8779-4f0b-945e-467befdd4a2f""}\n2025-07-20 14:42:59.277 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][5f7c1308-3f0b-4087-a7d7-b634387837e9] received connection request\n2025-07-20 14:42:59.279 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:42:59.337 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][5f7c1308-3f0b-4087-a7d7-b634387837e9] socks forwarding established\n2025-07-20 14:42:59.447 [info] [command][1836d787-8779-4f0b-945e-467befdd4a2f] Process exited with code 0\n2025-07-20 14:42:59.448 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][5f7c1308-3f0b-4087-a7d7-b634387837e9] socks connection closed\n2025-07-20 14:42:59.448 [info] [command][1836d787-8779-4f0b-945e-467befdd4a2f] Socket close event received\n2025-07-20 14:42:59.471 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49167 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:43:59.452 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:43:59.453 [info] [command][e62d324d-1b66-4f5e-8669-f32abc2e9e2c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""e62d324d-1b66-4f5e-8669-f32abc2e9e2c""}\n2025-07-20 14:43:59.453 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][4e69e797-fa08-49cf-a4c8-c8d2d85f1f2c] received connection request\n2025-07-20 14:43:59.453 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:43:59.480 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4e69e797-fa08-49cf-a4c8-c8d2d85f1f2c] socks forwarding established\n2025-07-20 14:43:59.506 [info] [command][e62d324d-1b66-4f5e-8669-f32abc2e9e2c] Process exited with code 0\n2025-07-20 14:43:59.506 [info] [command][e62d324d-1b66-4f5e-8669-f32abc2e9e2c] Socket close event received\n2025-07-20 14:43:59.507 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4e69e797-fa08-49cf-a4c8-c8d2d85f1f2c] socks connection closed\n2025-07-20 14:43:59.531 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49229 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:44:59.509 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:44:59.510 [info] [command][abf9e8da-488f-4b74-b81d-c46e56a16e7b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""abf9e8da-488f-4b74-b81d-c46e56a16e7b""}\n2025-07-20 14:44:59.510 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][85ccde5f-fba9-41e2-bcba-82cd57774000] received connection request\n2025-07-20 14:44:59.510 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:44:59.535 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][85ccde5f-fba9-41e2-bcba-82cd57774000] socks forwarding established\n2025-07-20 14:44:59.564 [info] [command][abf9e8da-488f-4b74-b81d-c46e56a16e7b] Process exited with code 0\n2025-07-20 14:44:59.565 [info] [command][abf9e8da-488f-4b74-b81d-c46e56a16e7b] Socket close event received\n2025-07-20 14:44:59.567 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][85ccde5f-fba9-41e2-bcba-82cd57774000] socks connection closed\n2025-07-20 14:44:59.594 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49278 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:45:59.570 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:45:59.571 [info] [command][844c587a-9713-4d4a-a98b-e06bca6c02dc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""844c587a-9713-4d4a-a98b-e06bca6c02dc""}\n2025-07-20 14:45:59.572 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][894e74cc-a540-4de6-9a79-85925aeb8b1c] received connection request\n2025-07-20 14:45:59.572 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:45:59.597 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][894e74cc-a540-4de6-9a79-85925aeb8b1c] socks forwarding established\n2025-07-20 14:45:59.628 [info] [command][844c587a-9713-4d4a-a98b-e06bca6c02dc] Process exited with code 0\n2025-07-20 14:45:59.628 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][894e74cc-a540-4de6-9a79-85925aeb8b1c] socks connection closed\n2025-07-20 14:45:59.628 [info] [command][844c587a-9713-4d4a-a98b-e06bca6c02dc] Socket close event received\n2025-07-20 14:45:59.652 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49346 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:46:59.629 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:46:59.630 [info] [command][4d67fb63-e58b-401f-9372-fa818671e1fb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""4d67fb63-e58b-401f-9372-fa818671e1fb""}\n2025-07-20 14:46:59.631 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][7b3e899d-775b-43b0-9713-4f419e57432a] received connection request\n2025-07-20 14:46:59.631 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 14:46:59.631 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:46:59.678 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7b3e899d-775b-43b0-9713-4f419e57432a] socks forwarding established\n2025-07-20 14:46:59.706 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7b3e899d-775b-43b0-9713-4f419e57432a] socks connection closed\n2025-07-20 14:46:59.706 [info] [command][4d67fb63-e58b-401f-9372-fa818671e1fb] Process exited with code 0\n2025-07-20 14:46:59.706 [info] [command][4d67fb63-e58b-401f-9372-fa818671e1fb] Socket close event received\n2025-07-20 14:46:59.730 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49447 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:47:59.711 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:47:59.714 [info] [command][0739efa5-ea6e-4da1-a1da-6c7ebcb91c1a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""0739efa5-ea6e-4da1-a1da-6c7ebcb91c1a""}\n2025-07-20 14:47:59.714 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][99c20234-e18f-4a1b-bf5c-8d32be4a8c3e] received connection request\n2025-07-20 14:47:59.715 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 14:47:59.715 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:47:59.800 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][99c20234-e18f-4a1b-bf5c-8d32be4a8c3e] socks forwarding established\n2025-07-20 14:47:59.835 [info] [command][0739efa5-ea6e-4da1-a1da-6c7ebcb91c1a] Process exited with code 0\n2025-07-20 14:47:59.835 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][99c20234-e18f-4a1b-bf5c-8d32be4a8c3e] socks connection closed\n2025-07-20 14:47:59.835 [info] [command][0739efa5-ea6e-4da1-a1da-6c7ebcb91c1a] Socket close event received\n2025-07-20 14:47:59.859 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49541 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:48:59.840 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:48:59.841 [info] [command][faf6da9a-311c-4339-84e8-a9e800904aab] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""faf6da9a-311c-4339-84e8-a9e800904aab""}\n2025-07-20 14:48:59.841 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][84cdb25a-bf04-45bb-9466-7de258635944] received connection request\n2025-07-20 14:48:59.841 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:48:59.917 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][84cdb25a-bf04-45bb-9466-7de258635944] socks forwarding established\n2025-07-20 14:48:59.949 [info] [command][faf6da9a-311c-4339-84e8-a9e800904aab] Process exited with code 0\n2025-07-20 14:48:59.950 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][84cdb25a-bf04-45bb-9466-7de258635944] socks connection closed\n2025-07-20 14:48:59.950 [info] [command][faf6da9a-311c-4339-84e8-a9e800904aab] Socket close event received\n2025-07-20 14:48:59.973 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49597 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:49:59.951 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:49:59.953 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][0edcaafd-561b-41a4-95ae-fb1a54b0effc] received connection request\n2025-07-20 14:49:59.954 [info] [command][b763b703-f3ad-4051-8d1d-e3af8501bfce] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""b763b703-f3ad-4051-8d1d-e3af8501bfce""}\n2025-07-20 14:49:59.954 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:49:59.982 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][0edcaafd-561b-41a4-95ae-fb1a54b0effc] socks forwarding established\n2025-07-20 14:50:00.016 [info] [command][b763b703-f3ad-4051-8d1d-e3af8501bfce] Process exited with code 0\n2025-07-20 14:50:00.016 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][0edcaafd-561b-41a4-95ae-fb1a54b0effc] socks connection closed\n2025-07-20 14:50:00.016 [info] [command][b763b703-f3ad-4051-8d1d-e3af8501bfce] Socket close event received\n2025-07-20 14:50:00.041 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49640 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:51:00.018 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:51:00.020 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][ff6dd49c-dfc2-43f3-8a42-0e2aa84a18e1] received connection request\n2025-07-20 14:51:00.020 [info] [command][7399b20a-8493-4362-918d-58e1a3a4faf7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""7399b20a-8493-4362-918d-58e1a3a4faf7""}\n2025-07-20 14:51:00.021 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:51:00.073 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ff6dd49c-dfc2-43f3-8a42-0e2aa84a18e1] socks forwarding established\n2025-07-20 14:51:00.118 [info] [command][7399b20a-8493-4362-918d-58e1a3a4faf7] Process exited with code 0\n2025-07-20 14:51:00.118 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ff6dd49c-dfc2-43f3-8a42-0e2aa84a18e1] socks connection closed\n2025-07-20 14:51:00.118 [info] [command][7399b20a-8493-4362-918d-58e1a3a4faf7] Socket close event received\n2025-07-20 14:51:00.141 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49740 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:52:00.120 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:52:00.123 [info] [command][4dcbf440-857e-42ee-8cb3-76bdef227789] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""4dcbf440-857e-42ee-8cb3-76bdef227789""}\n2025-07-20 14:52:00.124 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][c3aba37e-cd7d-40df-9457-5ddeeaa31bc3] received connection request\n2025-07-20 14:52:00.124 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:52:00.150 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][c3aba37e-cd7d-40df-9457-5ddeeaa31bc3] socks forwarding established\n2025-07-20 14:52:00.180 [info] [command][4dcbf440-857e-42ee-8cb3-76bdef227789] Process exited with code 0\n2025-07-20 14:52:00.181 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][c3aba37e-cd7d-40df-9457-5ddeeaa31bc3] socks connection closed\n2025-07-20 14:52:00.181 [info] [command][4dcbf440-857e-42ee-8cb3-76bdef227789] Socket close event received\n2025-07-20 14:52:00.281 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49805 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:53:00.182 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:53:00.185 [info] [command][160dd685-9230-4e60-9b1b-d9402d0a4250] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""160dd685-9230-4e60-9b1b-d9402d0a4250""}\n2025-07-20 14:53:00.186 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][aa8369f3-63b3-4106-8327-4142c8d095fe] received connection request\n2025-07-20 14:53:00.186 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:53:00.409 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][aa8369f3-63b3-4106-8327-4142c8d095fe] socks forwarding established\n2025-07-20 14:53:00.467 [info] [command][160dd685-9230-4e60-9b1b-d9402d0a4250] Process exited with code 0\n2025-07-20 14:53:00.467 [info] [command][160dd685-9230-4e60-9b1b-d9402d0a4250] Socket close event received\n2025-07-20 14:53:00.582 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][aa8369f3-63b3-4106-8327-4142c8d095fe] socks connection closed\n2025-07-20 14:53:00.589 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49880 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:54:00.467 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:54:00.468 [info] [command][c0890aa4-fd22-45b8-aa60-b82ffcb24837] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""c0890aa4-fd22-45b8-aa60-b82ffcb24837""}\n2025-07-20 14:54:00.469 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][03ff43bc-9fd7-46e7-bff4-0839f6a32e9e] received connection request\n2025-07-20 14:54:00.469 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:54:00.494 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][03ff43bc-9fd7-46e7-bff4-0839f6a32e9e] socks forwarding established\n2025-07-20 14:54:00.524 [info] [command][c0890aa4-fd22-45b8-aa60-b82ffcb24837] Process exited with code 0\n2025-07-20 14:54:00.525 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][03ff43bc-9fd7-46e7-bff4-0839f6a32e9e] socks connection closed\n2025-07-20 14:54:00.525 [info] [command][c0890aa4-fd22-45b8-aa60-b82ffcb24837] Socket close event received\n2025-07-20 14:54:00.553 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49938 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:55:00.525 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:55:00.526 [info] [command][1f339480-460a-4d3a-b347-c0de75895fdb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""1f339480-460a-4d3a-b347-c0de75895fdb""}\n2025-07-20 14:55:00.527 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][ffa832d8-10d9-423c-87c7-84647b9d2f44] received connection request\n2025-07-20 14:55:00.527 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 14:55:00.527 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:55:00.663 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ffa832d8-10d9-423c-87c7-84647b9d2f44] socks forwarding established\n2025-07-20 14:55:00.763 [info] [command][1f339480-460a-4d3a-b347-c0de75895fdb] Process exited with code 0\n2025-07-20 14:55:00.763 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ffa832d8-10d9-423c-87c7-84647b9d2f44] socks connection closed\n2025-07-20 14:55:00.764 [info] [command][1f339480-460a-4d3a-b347-c0de75895fdb] Socket close event received\n2025-07-20 14:55:00.788 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 49981 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:56:00.768 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:56:00.770 [info] [command][9f2198a8-acba-4646-85fa-45d3dd31cfec] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""9f2198a8-acba-4646-85fa-45d3dd31cfec""}\n2025-07-20 14:56:00.770 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][fb65b0fd-4bb4-4dcd-ab25-558cb8264809] received connection request\n2025-07-20 14:56:00.771 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:56:00.797 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][fb65b0fd-4bb4-4dcd-ab25-558cb8264809] socks forwarding established\n2025-07-20 14:56:00.825 [info] [command][9f2198a8-acba-4646-85fa-45d3dd31cfec] Process exited with code 0\n2025-07-20 14:56:00.825 [info] [command][9f2198a8-acba-4646-85fa-45d3dd31cfec] Socket close event received\n2025-07-20 14:56:00.825 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][fb65b0fd-4bb4-4dcd-ab25-558cb8264809] socks connection closed\n2025-07-20 14:56:00.849 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50064 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:57:00.827 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:57:00.828 [info] [command][d69e35eb-0b39-42d8-a8c7-3c0de49ee572] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""d69e35eb-0b39-42d8-a8c7-3c0de49ee572""}\n2025-07-20 14:57:00.828 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][95992f2c-fe5b-4c00-be92-26fc7bfcf150] received connection request\n2025-07-20 14:57:00.828 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:57:01.001 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][95992f2c-fe5b-4c00-be92-26fc7bfcf150] socks forwarding established\n2025-07-20 14:57:01.076 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][95992f2c-fe5b-4c00-be92-26fc7bfcf150] socks connection closed\n2025-07-20 14:57:01.076 [info] [command][d69e35eb-0b39-42d8-a8c7-3c0de49ee572] Process exited with code 0\n2025-07-20 14:57:01.076 [info] [command][d69e35eb-0b39-42d8-a8c7-3c0de49ee572] Socket close event received\n2025-07-20 14:57:01.312 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50130 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:58:01.080 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:58:01.083 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][bf1f304f-09a5-4744-be2e-e80b0ad05cea] received connection request\n2025-07-20 14:58:01.084 [info] [command][94a90411-47b2-4429-9b2b-8dfd2badf521] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""94a90411-47b2-4429-9b2b-8dfd2badf521""}\n2025-07-20 14:58:01.084 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:58:01.217 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][bf1f304f-09a5-4744-be2e-e80b0ad05cea] socks forwarding established\n2025-07-20 14:58:01.246 [info] [command][94a90411-47b2-4429-9b2b-8dfd2badf521] Process exited with code 0\n2025-07-20 14:58:01.246 [info] [command][94a90411-47b2-4429-9b2b-8dfd2badf521] Socket close event received\n2025-07-20 14:58:01.247 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][bf1f304f-09a5-4744-be2e-e80b0ad05cea] socks connection closed\n2025-07-20 14:58:01.337 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50206 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:58:50.120 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:40083][668cbc7c-ebb5-419b-8bf6-924df9570baf] received connection request\n2025-07-20 14:58:50.121 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 14:58:50.124 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:58:50.269 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][668cbc7c-ebb5-419b-8bf6-924df9570baf] socks forwarding established\n2025-07-20 14:58:56.304 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][668cbc7c-ebb5-419b-8bf6-924df9570baf] socks connection closed\n2025-07-20 14:58:56.338 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40083, connect from 127.0.0.1 port 50252 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 14:59:01.249 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 14:59:01.262 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][9548a4eb-a4de-4bf1-8729-7f251baa28bc] received connection request\n2025-07-20 14:59:01.263 [info] [command][c5b54ce3-0439-4899-9da4-c5b75d62608e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""c5b54ce3-0439-4899-9da4-c5b75d62608e""}\n2025-07-20 14:59:01.263 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 14:59:01.295 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][9548a4eb-a4de-4bf1-8729-7f251baa28bc] socks forwarding established\n2025-07-20 14:59:01.324 [info] [command][c5b54ce3-0439-4899-9da4-c5b75d62608e] Process exited with code 0\n2025-07-20 14:59:01.324 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][9548a4eb-a4de-4bf1-8729-7f251baa28bc] socks connection closed\n2025-07-20 14:59:01.324 [info] [command][c5b54ce3-0439-4899-9da4-c5b75d62608e] Socket close event received\n2025-07-20 14:59:01.348 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50262 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:00:01.329 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:00:01.330 [info] [command][a2b8c6ed-fd0d-4a3c-8974-c311a9ea4585] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""a2b8c6ed-fd0d-4a3c-8974-c311a9ea4585""}\n2025-07-20 15:00:01.331 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][4c89451d-ef70-4202-9a2f-59051b5daea7] received connection request\n2025-07-20 15:00:01.331 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:00:01.355 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4c89451d-ef70-4202-9a2f-59051b5daea7] socks forwarding established\n2025-07-20 15:00:01.384 [info] [command][a2b8c6ed-fd0d-4a3c-8974-c311a9ea4585] Process exited with code 0\n2025-07-20 15:00:01.384 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4c89451d-ef70-4202-9a2f-59051b5daea7] socks connection closed\n2025-07-20 15:00:01.384 [info] [command][a2b8c6ed-fd0d-4a3c-8974-c311a9ea4585] Socket close event received\n2025-07-20 15:00:01.408 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50313 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:01:01.386 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:01:01.387 [info] [command][aebdb29d-f814-46ec-af04-d175c2a06255] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""aebdb29d-f814-46ec-af04-d175c2a06255""}\n2025-07-20 15:01:01.387 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][792b554d-e2db-44cc-aa70-7b7587175193] received connection request\n2025-07-20 15:01:01.388 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:01:01.476 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][792b554d-e2db-44cc-aa70-7b7587175193] socks forwarding established\n2025-07-20 15:01:01.507 [info] [command][aebdb29d-f814-46ec-af04-d175c2a06255] Process exited with code 0\n2025-07-20 15:01:01.507 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][792b554d-e2db-44cc-aa70-7b7587175193] socks connection closed\n2025-07-20 15:01:01.507 [info] [command][aebdb29d-f814-46ec-af04-d175c2a06255] Socket close event received\n2025-07-20 15:01:01.531 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50384 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:02:01.508 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:02:01.509 [info] [command][4939efb3-0856-4630-a745-d8b8883fcf4f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""4939efb3-0856-4630-a745-d8b8883fcf4f""}\n2025-07-20 15:02:01.510 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][a983831b-473b-4781-a79d-fa0f64521e4b] received connection request\n2025-07-20 15:02:01.510 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:02:01.638 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a983831b-473b-4781-a79d-fa0f64521e4b] socks forwarding established\n2025-07-20 15:02:01.668 [info] [command][4939efb3-0856-4630-a745-d8b8883fcf4f] Process exited with code 0\n2025-07-20 15:02:01.669 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a983831b-473b-4781-a79d-fa0f64521e4b] socks connection closed\n2025-07-20 15:02:01.669 [info] [command][4939efb3-0856-4630-a745-d8b8883fcf4f] Socket close event received\n2025-07-20 15:02:01.783 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50450 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:03:01.674 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:03:01.677 [info] [command][6382aa79-4a9e-4411-987d-815bc366c127] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""6382aa79-4a9e-4411-987d-815bc366c127""}\n2025-07-20 15:03:01.678 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][07541fb0-12ab-4801-9785-d270952920ce] received connection request\n2025-07-20 15:03:01.678 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:03:01.744 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][07541fb0-12ab-4801-9785-d270952920ce] socks forwarding established\n2025-07-20 15:03:01.783 [info] [command][6382aa79-4a9e-4411-987d-815bc366c127] Process exited with code 0\n2025-07-20 15:03:01.783 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][07541fb0-12ab-4801-9785-d270952920ce] socks connection closed\n2025-07-20 15:03:01.784 [info] [command][6382aa79-4a9e-4411-987d-815bc366c127] Socket close event received\n2025-07-20 15:03:01.816 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50540 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:04:01.785 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:04:01.788 [info] [command][2d430e8b-8422-46c7-a0b9-03e7b3429adf] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""2d430e8b-8422-46c7-a0b9-03e7b3429adf""}\n2025-07-20 15:04:01.789 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][a0cfd265-010c-4613-9f6e-b5821df17f4e] received connection request\n2025-07-20 15:04:01.789 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:04:01.878 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a0cfd265-010c-4613-9f6e-b5821df17f4e] socks forwarding established\n2025-07-20 15:04:01.910 [info] [command][2d430e8b-8422-46c7-a0b9-03e7b3429adf] Process exited with code 0\n2025-07-20 15:04:01.911 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a0cfd265-010c-4613-9f6e-b5821df17f4e] socks connection closed\n2025-07-20 15:04:01.911 [info] [command][2d430e8b-8422-46c7-a0b9-03e7b3429adf] Socket close event received\n2025-07-20 15:04:01.981 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50586 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:05:01.912 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:05:01.915 [info] [command][e3d7f4bc-04e0-4828-b8d4-e76be4428e74] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""e3d7f4bc-04e0-4828-b8d4-e76be4428e74""}\n2025-07-20 15:05:01.915 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][aa176f9c-f5ae-4716-8e2d-341396c98a59] received connection request\n2025-07-20 15:05:01.916 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:05:01.966 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][aa176f9c-f5ae-4716-8e2d-341396c98a59] socks forwarding established\n2025-07-20 15:05:02.141 [info] [command][e3d7f4bc-04e0-4828-b8d4-e76be4428e74] Process exited with code 0\n2025-07-20 15:05:02.141 [info] [command][e3d7f4bc-04e0-4828-b8d4-e76be4428e74] Socket close event received\n2025-07-20 15:05:02.166 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][aa176f9c-f5ae-4716-8e2d-341396c98a59] socks connection closed\n2025-07-20 15:05:02.205 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50640 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:06:02.147 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:06:02.150 [info] [command][2a9be4f5-4b41-42df-96fa-83d2a2ac15c6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""2a9be4f5-4b41-42df-96fa-83d2a2ac15c6""}\n2025-07-20 15:06:02.152 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][746f3310-cd5f-4fce-bd34-162016ac7ba4] received connection request\n2025-07-20 15:06:02.153 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:06:02.198 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][746f3310-cd5f-4fce-bd34-162016ac7ba4] socks forwarding established\n2025-07-20 15:06:02.246 [info] [command][2a9be4f5-4b41-42df-96fa-83d2a2ac15c6] Process exited with code 0\n2025-07-20 15:06:02.246 [info] [command][2a9be4f5-4b41-42df-96fa-83d2a2ac15c6] Socket close event received\n2025-07-20 15:06:02.260 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][746f3310-cd5f-4fce-bd34-162016ac7ba4] socks connection closed\n2025-07-20 15:06:02.292 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50710 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:07:02.247 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:07:02.249 [info] [command][4379faf2-73ad-4e22-af48-bdef9914faeb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""4379faf2-73ad-4e22-af48-bdef9914faeb""}\n2025-07-20 15:07:02.249 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][f3bf3ef3-6efe-4be5-a1df-cebe070d0a85] received connection request\n2025-07-20 15:07:02.250 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:07:02.301 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f3bf3ef3-6efe-4be5-a1df-cebe070d0a85] socks forwarding established\n2025-07-20 15:07:02.357 [info] [command][4379faf2-73ad-4e22-af48-bdef9914faeb] Process exited with code 0\n2025-07-20 15:07:02.357 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f3bf3ef3-6efe-4be5-a1df-cebe070d0a85] socks connection closed\n2025-07-20 15:07:02.357 [info] [command][4379faf2-73ad-4e22-af48-bdef9914faeb] Socket close event received\n2025-07-20 15:07:02.430 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50803 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:08:02.360 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:08:02.363 [info] [command][4d45200b-4654-493a-a9b7-aa579130ae7c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""4d45200b-4654-493a-a9b7-aa579130ae7c""}\n2025-07-20 15:08:02.364 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][d23e38a5-4ace-41ca-965f-fa694aee241f] received connection request\n2025-07-20 15:08:02.366 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:08:02.412 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][d23e38a5-4ace-41ca-965f-fa694aee241f] socks forwarding established\n2025-07-20 15:08:02.455 [info] [command][4d45200b-4654-493a-a9b7-aa579130ae7c] Process exited with code 0\n2025-07-20 15:08:02.456 [info] [command][4d45200b-4654-493a-a9b7-aa579130ae7c] Socket close event received\n2025-07-20 15:08:02.490 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][d23e38a5-4ace-41ca-965f-fa694aee241f] socks connection closed\n2025-07-20 15:08:02.496 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50911 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:09:02.460 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:09:02.461 [info] [command][7f402860-6e37-4b91-b294-ff1348afb639] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""7f402860-6e37-4b91-b294-ff1348afb639""}\n2025-07-20 15:09:02.462 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][51aff89a-7f8a-4dd8-9bbc-ed15713e7f1b] received connection request\n2025-07-20 15:09:02.462 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:09:02.516 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][51aff89a-7f8a-4dd8-9bbc-ed15713e7f1b] socks forwarding established\n2025-07-20 15:09:02.552 [info] [command][7f402860-6e37-4b91-b294-ff1348afb639] Process exited with code 0\n2025-07-20 15:09:02.552 [info] [command][7f402860-6e37-4b91-b294-ff1348afb639] Socket close event received\n2025-07-20 15:09:02.586 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][51aff89a-7f8a-4dd8-9bbc-ed15713e7f1b] socks connection closed\n2025-07-20 15:09:02.606 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50946 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:10:02.556 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:10:02.559 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][e08a8cfd-b89f-41a8-b431-023eb452c256] received connection request\n2025-07-20 15:10:02.560 [info] [command][5490a807-f85c-4a48-a383-3b33df532876] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""5490a807-f85c-4a48-a383-3b33df532876""}\n2025-07-20 15:10:02.560 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:10:02.624 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e08a8cfd-b89f-41a8-b431-023eb452c256] socks forwarding established\n2025-07-20 15:10:02.659 [info] [command][5490a807-f85c-4a48-a383-3b33df532876] Process exited with code 0\n2025-07-20 15:10:02.659 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e08a8cfd-b89f-41a8-b431-023eb452c256] socks connection closed\n2025-07-20 15:10:02.660 [info] [command][5490a807-f85c-4a48-a383-3b33df532876] Socket close event received\n2025-07-20 15:10:02.800 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 50993 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:11:02.663 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:11:02.665 [info] [command][a2882461-9b0a-4495-9c37-0550d7ba272d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""a2882461-9b0a-4495-9c37-0550d7ba272d""}\n2025-07-20 15:11:02.665 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][dff7d01d-11f2-411f-ab22-04bc532e5dac] received connection request\n2025-07-20 15:11:02.665 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:11:02.703 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dff7d01d-11f2-411f-ab22-04bc532e5dac] socks forwarding established\n2025-07-20 15:11:02.737 [info] [command][a2882461-9b0a-4495-9c37-0550d7ba272d] Process exited with code 0\n2025-07-20 15:11:02.737 [info] [command][a2882461-9b0a-4495-9c37-0550d7ba272d] Socket close event received\n2025-07-20 15:11:02.737 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dff7d01d-11f2-411f-ab22-04bc532e5dac] socks connection closed\n2025-07-20 15:11:02.819 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51059 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:12:02.737 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:12:02.740 [info] [command][a43eaf6e-4914-4efe-90c2-739c4c2f3b09] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""a43eaf6e-4914-4efe-90c2-739c4c2f3b09""}\n2025-07-20 15:12:02.741 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][2953d5d4-09a0-4d48-8e13-cc2a5dff0cd5] received connection request\n2025-07-20 15:12:02.741 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:12:02.860 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][2953d5d4-09a0-4d48-8e13-cc2a5dff0cd5] socks forwarding established\n2025-07-20 15:12:02.890 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][2953d5d4-09a0-4d48-8e13-cc2a5dff0cd5] socks connection closed\n2025-07-20 15:12:02.891 [info] [command][a43eaf6e-4914-4efe-90c2-739c4c2f3b09] Process exited with code 0\n2025-07-20 15:12:02.891 [info] [command][a43eaf6e-4914-4efe-90c2-739c4c2f3b09] Socket close event received\n2025-07-20 15:12:02.915 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51148 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:13:02.895 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:13:02.898 [info] [command][f8bd53f5-d50b-4b05-a045-265d218313e8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f8bd53f5-d50b-4b05-a045-265d218313e8""}\n2025-07-20 15:13:02.899 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][2f84ecea-0223-4052-be0c-b95dbb282ed7] received connection request\n2025-07-20 15:13:02.900 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:13:03.019 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][2f84ecea-0223-4052-be0c-b95dbb282ed7] socks forwarding established\n2025-07-20 15:13:03.057 [info] [command][f8bd53f5-d50b-4b05-a045-265d218313e8] Process exited with code 0\n2025-07-20 15:13:03.057 [info] [command][f8bd53f5-d50b-4b05-a045-265d218313e8] Socket close event received\n2025-07-20 15:13:03.062 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][2f84ecea-0223-4052-be0c-b95dbb282ed7] socks connection closed\n2025-07-20 15:13:03.086 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51214 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:14:03.061 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:14:03.063 [info] [command][35a03ec9-df3b-48a3-a26a-6d709f9f8575] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""35a03ec9-df3b-48a3-a26a-6d709f9f8575""}\n2025-07-20 15:14:03.064 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][1b4c9f3c-83a6-4929-a082-83bd6d812574] received connection request\n2025-07-20 15:14:03.064 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:14:03.088 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1b4c9f3c-83a6-4929-a082-83bd6d812574] socks forwarding established\n2025-07-20 15:14:03.120 [info] [command][35a03ec9-df3b-48a3-a26a-6d709f9f8575] Process exited with code 0\n2025-07-20 15:14:03.120 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1b4c9f3c-83a6-4929-a082-83bd6d812574] socks connection closed\n2025-07-20 15:14:03.120 [info] [command][35a03ec9-df3b-48a3-a26a-6d709f9f8575] Socket close event received\n2025-07-20 15:14:03.148 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51264 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:15:03.125 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:15:03.126 [info] [command][0f051c14-3d62-406c-8631-8be3af9f6239] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""0f051c14-3d62-406c-8631-8be3af9f6239""}\n2025-07-20 15:15:03.127 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][a4294ab3-2325-4630-9291-124f04409862] received connection request\n2025-07-20 15:15:03.127 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 15:15:03.127 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:15:04.103 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a4294ab3-2325-4630-9291-124f04409862] socks forwarding established\n2025-07-20 15:15:04.137 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a4294ab3-2325-4630-9291-124f04409862] socks connection closed\n2025-07-20 15:15:04.137 [info] [command][0f051c14-3d62-406c-8631-8be3af9f6239] Process exited with code 0\n2025-07-20 15:15:04.137 [info] [command][0f051c14-3d62-406c-8631-8be3af9f6239] Socket close event received\n2025-07-20 15:15:04.190 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51325 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:16:04.143 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:16:04.146 [info] [command][ad52f80e-6cd3-4927-86d1-2627f32c263a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""ad52f80e-6cd3-4927-86d1-2627f32c263a""}\n2025-07-20 15:16:04.147 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][00b3a56e-b0fc-4927-ad2c-4c31c1a0b4c1] received connection request\n2025-07-20 15:16:04.147 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:16:04.185 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][00b3a56e-b0fc-4927-ad2c-4c31c1a0b4c1] socks forwarding established\n2025-07-20 15:16:04.217 [info] [command][ad52f80e-6cd3-4927-86d1-2627f32c263a] Process exited with code 0\n2025-07-20 15:16:04.217 [info] [command][ad52f80e-6cd3-4927-86d1-2627f32c263a] Socket close event received\n2025-07-20 15:16:04.218 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][00b3a56e-b0fc-4927-ad2c-4c31c1a0b4c1] socks connection closed\n2025-07-20 15:16:04.246 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51381 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:17:04.220 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:17:04.222 [info] [command][c9b877ef-e27e-4352-98ea-d568aae24b66] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""c9b877ef-e27e-4352-98ea-d568aae24b66""}\n2025-07-20 15:17:04.223 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][4df0646c-1a18-47e5-9700-29918118e987] received connection request\n2025-07-20 15:17:04.224 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:17:04.250 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4df0646c-1a18-47e5-9700-29918118e987] socks forwarding established\n2025-07-20 15:17:04.281 [info] [command][c9b877ef-e27e-4352-98ea-d568aae24b66] Process exited with code 0\n2025-07-20 15:17:04.281 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4df0646c-1a18-47e5-9700-29918118e987] socks connection closed\n2025-07-20 15:17:04.281 [info] [command][c9b877ef-e27e-4352-98ea-d568aae24b66] Socket close event received\n2025-07-20 15:17:04.309 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51449 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:18:04.286 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:18:04.288 [info] [command][d14f1e80-c70c-44cf-9bd1-6d6a274726d1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""d14f1e80-c70c-44cf-9bd1-6d6a274726d1""}\n2025-07-20 15:18:04.289 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][b84b86a9-441a-4f31-855e-a2e6ba32b406] received connection request\n2025-07-20 15:18:04.289 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:18:04.314 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b84b86a9-441a-4f31-855e-a2e6ba32b406] socks forwarding established\n2025-07-20 15:18:04.344 [info] [command][d14f1e80-c70c-44cf-9bd1-6d6a274726d1] Process exited with code 0\n2025-07-20 15:18:04.344 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b84b86a9-441a-4f31-855e-a2e6ba32b406] socks connection closed\n2025-07-20 15:18:04.345 [info] [command][d14f1e80-c70c-44cf-9bd1-6d6a274726d1] Socket close event received\n2025-07-20 15:18:04.371 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51514 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:19:04.349 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:19:04.349 [info] [command][38e4333b-e51c-4341-a9ae-39f152dd9d2d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""38e4333b-e51c-4341-a9ae-39f152dd9d2d""}\n2025-07-20 15:19:04.350 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][ba009122-3cba-4af6-b784-25c5d180f1b7] received connection request\n2025-07-20 15:19:04.350 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:19:04.455 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ba009122-3cba-4af6-b784-25c5d180f1b7] socks forwarding established\n2025-07-20 15:19:04.504 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ba009122-3cba-4af6-b784-25c5d180f1b7] socks connection closed\n2025-07-20 15:19:04.504 [info] [command][38e4333b-e51c-4341-a9ae-39f152dd9d2d] Process exited with code 0\n2025-07-20 15:19:04.504 [info] [command][38e4333b-e51c-4341-a9ae-39f152dd9d2d] Socket close event received\n2025-07-20 15:19:04.530 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51555 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:20:04.506 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:20:04.511 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][fdfe8bfd-fb88-4b4d-82b9-1a978202aecb] received connection request\n2025-07-20 15:20:04.511 [info] [command][d0ebe693-c67a-4dec-a7ff-8a41fd1ff282] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""d0ebe693-c67a-4dec-a7ff-8a41fd1ff282""}\n2025-07-20 15:20:04.512 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:20:04.539 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][fdfe8bfd-fb88-4b4d-82b9-1a978202aecb] socks forwarding established\n2025-07-20 15:20:04.567 [info] [command][d0ebe693-c67a-4dec-a7ff-8a41fd1ff282] Process exited with code 0\n2025-07-20 15:20:04.567 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][fdfe8bfd-fb88-4b4d-82b9-1a978202aecb] socks connection closed\n2025-07-20 15:20:04.567 [info] [command][d0ebe693-c67a-4dec-a7ff-8a41fd1ff282] Socket close event received\n2025-07-20 15:20:04.601 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51625 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:21:04.572 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:21:04.574 [info] [command][9035b739-47f2-42b7-934c-23b310fb086f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""9035b739-47f2-42b7-934c-23b310fb086f""}\n2025-07-20 15:21:04.575 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][8d17b38b-805c-40b4-9c6a-24103d921a44] received connection request\n2025-07-20 15:21:04.575 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:21:04.615 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8d17b38b-805c-40b4-9c6a-24103d921a44] socks forwarding established\n2025-07-20 15:21:04.648 [info] [command][9035b739-47f2-42b7-934c-23b310fb086f] Process exited with code 0\n2025-07-20 15:21:04.649 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8d17b38b-805c-40b4-9c6a-24103d921a44] socks connection closed\n2025-07-20 15:21:04.649 [info] [command][9035b739-47f2-42b7-934c-23b310fb086f] Socket close event received\n2025-07-20 15:21:04.677 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51681 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:22:04.654 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:22:04.657 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][ce8361c7-41da-4748-872c-4549cebfe348] received connection request\n2025-07-20 15:22:04.658 [info] [command][d2855afd-7a4f-47b7-ba9f-ee192ffd415b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""d2855afd-7a4f-47b7-ba9f-ee192ffd415b""}\n2025-07-20 15:22:04.658 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:22:04.759 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ce8361c7-41da-4748-872c-4549cebfe348] socks forwarding established\n2025-07-20 15:22:04.790 [info] [command][d2855afd-7a4f-47b7-ba9f-ee192ffd415b] Process exited with code 0\n2025-07-20 15:22:04.791 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ce8361c7-41da-4748-872c-4549cebfe348] socks connection closed\n2025-07-20 15:22:04.791 [info] [command][d2855afd-7a4f-47b7-ba9f-ee192ffd415b] Socket close event received\n2025-07-20 15:22:04.817 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51747 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:23:04.796 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:23:04.800 [info] [command][1ea9b73b-c9e0-4e2e-8328-86b0724f0360] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""1ea9b73b-c9e0-4e2e-8328-86b0724f0360""}\n2025-07-20 15:23:04.800 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][6d316e41-cd39-4f6e-b6bd-601a796fc1c7] received connection request\n2025-07-20 15:23:04.801 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:23:06.112 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6d316e41-cd39-4f6e-b6bd-601a796fc1c7] socks forwarding established\n2025-07-20 15:23:06.214 [info] [command][1ea9b73b-c9e0-4e2e-8328-86b0724f0360] Process exited with code 0\n2025-07-20 15:23:06.214 [info] [command][1ea9b73b-c9e0-4e2e-8328-86b0724f0360] Socket close event received\n2025-07-20 15:23:06.297 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6d316e41-cd39-4f6e-b6bd-601a796fc1c7] socks connection closed\n2025-07-20 15:23:06.297 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51828 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:24:06.219 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:24:06.223 [info] [command][ca99bc50-9aaa-4a81-8f0a-b15049c05d7e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""ca99bc50-9aaa-4a81-8f0a-b15049c05d7e""}\n2025-07-20 15:24:06.224 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][79278c55-3578-4049-8b3e-616bbd763c16] received connection request\n2025-07-20 15:24:06.224 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:24:06.274 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][79278c55-3578-4049-8b3e-616bbd763c16] socks forwarding established\n2025-07-20 15:24:06.341 [info] [command][ca99bc50-9aaa-4a81-8f0a-b15049c05d7e] Process exited with code 0\n2025-07-20 15:24:06.341 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][79278c55-3578-4049-8b3e-616bbd763c16] socks connection closed\n2025-07-20 15:24:06.341 [info] [command][ca99bc50-9aaa-4a81-8f0a-b15049c05d7e] Socket close event received\n2025-07-20 15:24:06.380 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51886 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:25:06.344 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:25:06.346 [info] [command][9119a276-4cc2-4e44-8821-dec76ece0f0d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""9119a276-4cc2-4e44-8821-dec76ece0f0d""}\n2025-07-20 15:25:06.347 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][7191df3b-1c71-4782-ba36-db93ededd9b9] received connection request\n2025-07-20 15:25:06.347 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:25:06.482 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7191df3b-1c71-4782-ba36-db93ededd9b9] socks forwarding established\n2025-07-20 15:25:06.522 [info] [command][9119a276-4cc2-4e44-8821-dec76ece0f0d] Process exited with code 0\n2025-07-20 15:25:06.522 [info] [command][9119a276-4cc2-4e44-8821-dec76ece0f0d] Socket close event received\n2025-07-20 15:25:06.542 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7191df3b-1c71-4782-ba36-db93ededd9b9] socks connection closed\n2025-07-20 15:25:06.549 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51934 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:26:06.527 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:26:06.529 [info] [command][b8dc652d-9dee-4c39-9bf2-c65c78d09dbf] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""b8dc652d-9dee-4c39-9bf2-c65c78d09dbf""}\n2025-07-20 15:26:06.530 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][b385366c-10ae-4ff3-a7f3-eda57ab49301] received connection request\n2025-07-20 15:26:06.530 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:26:06.602 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b385366c-10ae-4ff3-a7f3-eda57ab49301] socks forwarding established\n2025-07-20 15:26:06.635 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b385366c-10ae-4ff3-a7f3-eda57ab49301] socks connection closed\n2025-07-20 15:26:06.635 [info] [command][b8dc652d-9dee-4c39-9bf2-c65c78d09dbf] Process exited with code 0\n2025-07-20 15:26:06.635 [info] [command][b8dc652d-9dee-4c39-9bf2-c65c78d09dbf] Socket close event received\n2025-07-20 15:26:06.660 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 51993 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:27:06.640 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:27:06.642 [info] [command][f3f2d34f-a1d5-48ab-b174-99391475f59f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f3f2d34f-a1d5-48ab-b174-99391475f59f""}\n2025-07-20 15:27:06.643 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][d0040447-31e4-4a33-bcaa-ce239d61cb35] received connection request\n2025-07-20 15:27:06.644 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 15:27:06.644 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:27:06.670 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][d0040447-31e4-4a33-bcaa-ce239d61cb35] socks forwarding established\n2025-07-20 15:27:06.702 [info] [command][f3f2d34f-a1d5-48ab-b174-99391475f59f] Process exited with code 0\n2025-07-20 15:27:06.703 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][d0040447-31e4-4a33-bcaa-ce239d61cb35] socks connection closed\n2025-07-20 15:27:06.703 [info] [command][f3f2d34f-a1d5-48ab-b174-99391475f59f] Socket close event received\n2025-07-20 15:27:06.780 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52054 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:28:06.704 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:28:06.705 [info] [command][8b726fb2-34e0-4e74-90eb-c6dde0ac5803] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""8b726fb2-34e0-4e74-90eb-c6dde0ac5803""}\n2025-07-20 15:28:06.705 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][25135530-820f-493c-8913-9a718ece3b12] received connection request\n2025-07-20 15:28:06.706 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 15:28:06.706 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:28:06.731 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][25135530-820f-493c-8913-9a718ece3b12] socks forwarding established\n2025-07-20 15:28:06.857 [info] [command][8b726fb2-34e0-4e74-90eb-c6dde0ac5803] Process exited with code 0\n2025-07-20 15:28:06.858 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][25135530-820f-493c-8913-9a718ece3b12] socks connection closed\n2025-07-20 15:28:06.858 [info] [command][8b726fb2-34e0-4e74-90eb-c6dde0ac5803] Socket close event received\n2025-07-20 15:28:06.891 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52116 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:29:06.862 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:29:06.863 [info] [command][0470a458-8524-4049-af98-008729add335] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""0470a458-8524-4049-af98-008729add335""}\n2025-07-20 15:29:06.864 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][f464edfd-2bb5-4f61-a192-93408d89e883] received connection request\n2025-07-20 15:29:06.865 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:29:06.892 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f464edfd-2bb5-4f61-a192-93408d89e883] socks forwarding established\n2025-07-20 15:29:06.924 [info] [command][0470a458-8524-4049-af98-008729add335] Process exited with code 0\n2025-07-20 15:29:06.924 [info] [command][0470a458-8524-4049-af98-008729add335] Socket close event received\n2025-07-20 15:29:06.948 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f464edfd-2bb5-4f61-a192-93408d89e883] socks connection closed\n2025-07-20 15:29:06.948 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52167 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:30:06.929 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:30:06.931 [info] [command][918dd0de-ae2d-49eb-b62a-fe108b07f08f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""918dd0de-ae2d-49eb-b62a-fe108b07f08f""}\n2025-07-20 15:30:06.932 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][cbb15364-00df-4b16-9024-ab873fe12ea5] received connection request\n2025-07-20 15:30:06.933 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:30:07.047 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][cbb15364-00df-4b16-9024-ab873fe12ea5] socks forwarding established\n2025-07-20 15:30:07.083 [info] [command][918dd0de-ae2d-49eb-b62a-fe108b07f08f] Process exited with code 0\n2025-07-20 15:30:07.083 [info] [command][918dd0de-ae2d-49eb-b62a-fe108b07f08f] Socket close event received\n2025-07-20 15:30:07.108 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][cbb15364-00df-4b16-9024-ab873fe12ea5] socks connection closed\n2025-07-20 15:30:07.126 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52234 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:31:07.088 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:31:07.090 [info] [command][1b24143e-b93b-403a-b2f3-d840df4174fc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""1b24143e-b93b-403a-b2f3-d840df4174fc""}\n2025-07-20 15:31:07.090 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][52fd656c-3c62-4f91-94b6-1e54fa6e87da] received connection request\n2025-07-20 15:31:07.091 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:31:07.206 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][52fd656c-3c62-4f91-94b6-1e54fa6e87da] socks forwarding established\n2025-07-20 15:31:07.270 [info] [command][1b24143e-b93b-403a-b2f3-d840df4174fc] Process exited with code 0\n2025-07-20 15:31:07.271 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][52fd656c-3c62-4f91-94b6-1e54fa6e87da] socks connection closed\n2025-07-20 15:31:07.271 [info] [command][1b24143e-b93b-403a-b2f3-d840df4174fc] Socket close event received\n2025-07-20 15:31:07.297 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52300 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:32:07.276 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:32:07.278 [info] [command][6eaf5ed4-151a-4845-8e42-6a18aba60987] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""6eaf5ed4-151a-4845-8e42-6a18aba60987""}\n2025-07-20 15:32:07.279 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][088d6a8f-7b2a-4ebd-8793-7e24c2099717] received connection request\n2025-07-20 15:32:07.279 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:32:07.305 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][088d6a8f-7b2a-4ebd-8793-7e24c2099717] socks forwarding established\n2025-07-20 15:32:07.350 [info] [command][6eaf5ed4-151a-4845-8e42-6a18aba60987] Process exited with code 0\n2025-07-20 15:32:07.350 [info] [command][6eaf5ed4-151a-4845-8e42-6a18aba60987] Socket close event received\n2025-07-20 15:32:07.372 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][088d6a8f-7b2a-4ebd-8793-7e24c2099717] socks connection closed\n2025-07-20 15:32:07.375 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52360 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:33:07.351 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:33:07.354 [info] [command][9a59b826-0ae2-4d82-97f8-4036e7c57a1e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""9a59b826-0ae2-4d82-97f8-4036e7c57a1e""}\n2025-07-20 15:33:07.354 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][3c01fb12-45dd-4b9c-8346-be8efb19a8e6] received connection request\n2025-07-20 15:33:07.355 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:33:07.384 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][3c01fb12-45dd-4b9c-8346-be8efb19a8e6] socks forwarding established\n2025-07-20 15:33:07.414 [info] [command][9a59b826-0ae2-4d82-97f8-4036e7c57a1e] Process exited with code 0\n2025-07-20 15:33:07.414 [info] [command][9a59b826-0ae2-4d82-97f8-4036e7c57a1e] Socket close event received\n2025-07-20 15:33:07.439 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52426 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:33:07.439 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][3c01fb12-45dd-4b9c-8346-be8efb19a8e6] socks connection closed\n2025-07-20 15:34:07.415 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:34:07.417 [info] [command][5144b8c7-f1fa-4816-8dcd-e7b248686d0c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""5144b8c7-f1fa-4816-8dcd-e7b248686d0c""}\n2025-07-20 15:34:07.417 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][db794674-a09e-44fd-be0f-40a96bf56f1a] received connection request\n2025-07-20 15:34:07.418 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:34:07.445 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][db794674-a09e-44fd-be0f-40a96bf56f1a] socks forwarding established\n2025-07-20 15:34:07.564 [info] [command][5144b8c7-f1fa-4816-8dcd-e7b248686d0c] Process exited with code 0\n2025-07-20 15:34:07.565 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][db794674-a09e-44fd-be0f-40a96bf56f1a] socks connection closed\n2025-07-20 15:34:07.566 [info] [command][5144b8c7-f1fa-4816-8dcd-e7b248686d0c] Socket close event received\n2025-07-20 15:34:07.593 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52485 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:35:07.569 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:35:07.572 [info] [command][6837432a-7413-4ffe-bb2c-88bb2505408c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""6837432a-7413-4ffe-bb2c-88bb2505408c""}\n2025-07-20 15:35:07.572 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][e0d7a1f3-1e10-41a2-a5f0-16b5aff51647] received connection request\n2025-07-20 15:35:07.573 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:35:07.603 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e0d7a1f3-1e10-41a2-a5f0-16b5aff51647] socks forwarding established\n2025-07-20 15:35:07.636 [info] [command][6837432a-7413-4ffe-bb2c-88bb2505408c] Process exited with code 0\n2025-07-20 15:35:07.636 [info] [command][6837432a-7413-4ffe-bb2c-88bb2505408c] Socket close event received\n2025-07-20 15:35:07.660 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e0d7a1f3-1e10-41a2-a5f0-16b5aff51647] socks connection closed\n2025-07-20 15:35:07.672 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52550 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:36:07.636 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:36:07.638 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][da88a8c7-4b03-4507-a3a6-c5fb20d860be] received connection request\n2025-07-20 15:36:07.638 [info] [command][9ba3e0aa-18d6-42eb-9de5-58697d5e4632] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""9ba3e0aa-18d6-42eb-9de5-58697d5e4632""}\n2025-07-20 15:36:07.638 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 15:36:07.639 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:36:07.666 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][da88a8c7-4b03-4507-a3a6-c5fb20d860be] socks forwarding established\n2025-07-20 15:36:07.709 [info] [command][9ba3e0aa-18d6-42eb-9de5-58697d5e4632] Process exited with code 0\n2025-07-20 15:36:07.709 [info] [command][9ba3e0aa-18d6-42eb-9de5-58697d5e4632] Socket close event received\n2025-07-20 15:36:07.725 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][da88a8c7-4b03-4507-a3a6-c5fb20d860be] socks connection closed\n2025-07-20 15:36:07.736 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52612 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:37:07.711 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:37:07.712 [info] [command][2e0d4bcf-e412-4d85-9cfc-a583e767b9ff] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""2e0d4bcf-e412-4d85-9cfc-a583e767b9ff""}\n2025-07-20 15:37:07.712 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][a2d3f7b3-3e16-4405-ba62-2a165a9a7d8e] received connection request\n2025-07-20 15:37:07.713 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:37:07.741 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a2d3f7b3-3e16-4405-ba62-2a165a9a7d8e] socks forwarding established\n2025-07-20 15:37:07.774 [info] [command][2e0d4bcf-e412-4d85-9cfc-a583e767b9ff] Process exited with code 0\n2025-07-20 15:37:07.775 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a2d3f7b3-3e16-4405-ba62-2a165a9a7d8e] socks connection closed\n2025-07-20 15:37:07.775 [info] [command][2e0d4bcf-e412-4d85-9cfc-a583e767b9ff] Socket close event received\n2025-07-20 15:37:07.815 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52682 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:38:07.780 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:38:07.781 [info] [command][d6965fe9-08f2-4fd1-b0e3-4b5e3358087d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""d6965fe9-08f2-4fd1-b0e3-4b5e3358087d""}\n2025-07-20 15:38:07.782 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][44afc3a3-90ee-48c8-aea7-ca31269c6361] received connection request\n2025-07-20 15:38:07.782 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:38:07.849 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][44afc3a3-90ee-48c8-aea7-ca31269c6361] socks forwarding established\n2025-07-20 15:38:07.881 [info] [command][d6965fe9-08f2-4fd1-b0e3-4b5e3358087d] Process exited with code 0\n2025-07-20 15:38:07.881 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][44afc3a3-90ee-48c8-aea7-ca31269c6361] socks connection closed\n2025-07-20 15:38:07.881 [info] [command][d6965fe9-08f2-4fd1-b0e3-4b5e3358087d] Socket close event received\n2025-07-20 15:38:07.917 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52745 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:39:07.886 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:39:07.888 [info] [command][7dfbcefd-ce56-4a1d-a30a-59bfce9681ca] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""7dfbcefd-ce56-4a1d-a30a-59bfce9681ca""}\n2025-07-20 15:39:07.888 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][3f564859-3586-4a21-afc7-7fc6edac6525] received connection request\n2025-07-20 15:39:07.888 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 15:39:07.888 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:39:08.014 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][3f564859-3586-4a21-afc7-7fc6edac6525] socks forwarding established\n2025-07-20 15:39:08.051 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][3f564859-3586-4a21-afc7-7fc6edac6525] socks connection closed\n2025-07-20 15:39:08.051 [info] [command][7dfbcefd-ce56-4a1d-a30a-59bfce9681ca] Process exited with code 0\n2025-07-20 15:39:08.052 [info] [command][7dfbcefd-ce56-4a1d-a30a-59bfce9681ca] Socket close event received\n2025-07-20 15:39:08.079 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52790 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:40:08.054 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:40:08.056 [info] [command][841014ab-b07e-4a84-8951-344d07a7fd7d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""841014ab-b07e-4a84-8951-344d07a7fd7d""}\n2025-07-20 15:40:08.057 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][63b885c7-0513-463d-97a8-5aa4c833dd3c] received connection request\n2025-07-20 15:40:08.058 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:40:08.083 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][63b885c7-0513-463d-97a8-5aa4c833dd3c] socks forwarding established\n2025-07-20 15:40:08.115 [info] [command][841014ab-b07e-4a84-8951-344d07a7fd7d] Process exited with code 0\n2025-07-20 15:40:08.115 [info] [command][841014ab-b07e-4a84-8951-344d07a7fd7d] Socket close event received\n2025-07-20 15:40:08.117 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][63b885c7-0513-463d-97a8-5aa4c833dd3c] socks connection closed\n2025-07-20 15:40:08.139 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52849 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:41:08.116 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:41:08.119 [info] [command][cce7082c-6f69-4444-95f0-94e5634a1686] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""cce7082c-6f69-4444-95f0-94e5634a1686""}\n2025-07-20 15:41:08.120 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][30450303-e559-4df8-96a6-b0c0e1db9d82] received connection request\n2025-07-20 15:41:08.121 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:41:08.146 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][30450303-e559-4df8-96a6-b0c0e1db9d82] socks forwarding established\n2025-07-20 15:41:08.177 [info] [command][cce7082c-6f69-4444-95f0-94e5634a1686] Process exited with code 0\n2025-07-20 15:41:08.177 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][30450303-e559-4df8-96a6-b0c0e1db9d82] socks connection closed\n2025-07-20 15:41:08.177 [info] [command][cce7082c-6f69-4444-95f0-94e5634a1686] Socket close event received\n2025-07-20 15:41:08.202 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52907 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:42:08.182 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:42:08.186 [info] [command][a162c783-922d-4170-ba09-b45f34100c01] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""a162c783-922d-4170-ba09-b45f34100c01""}\n2025-07-20 15:42:08.186 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][835752ce-1385-447e-b15f-bda7423ec07b] received connection request\n2025-07-20 15:42:08.187 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:42:08.214 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][835752ce-1385-447e-b15f-bda7423ec07b] socks forwarding established\n2025-07-20 15:42:08.247 [info] [command][a162c783-922d-4170-ba09-b45f34100c01] Process exited with code 0\n2025-07-20 15:42:08.247 [info] [command][a162c783-922d-4170-ba09-b45f34100c01] Socket close event received\n2025-07-20 15:42:08.267 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][835752ce-1385-447e-b15f-bda7423ec07b] socks connection closed\n2025-07-20 15:42:08.272 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 52971 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:43:08.248 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:43:08.251 [info] [command][b8be6a08-80d0-431b-9c7a-b624157a8f90] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""b8be6a08-80d0-431b-9c7a-b624157a8f90""}\n2025-07-20 15:43:08.252 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][3fca4621-2c77-466c-a020-0ebc62472e33] received connection request\n2025-07-20 15:43:08.254 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:43:08.286 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][3fca4621-2c77-466c-a020-0ebc62472e33] socks forwarding established\n2025-07-20 15:43:08.316 [info] [command][b8be6a08-80d0-431b-9c7a-b624157a8f90] Process exited with code 0\n2025-07-20 15:43:08.317 [info] [command][b8be6a08-80d0-431b-9c7a-b624157a8f90] Socket close event received\n2025-07-20 15:43:08.338 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][3fca4621-2c77-466c-a020-0ebc62472e33] socks connection closed\n2025-07-20 15:43:08.343 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53021 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:44:08.321 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:44:08.322 [info] [command][9f7b29aa-95aa-42d4-ab3f-e2cd4041b2b3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""9f7b29aa-95aa-42d4-ab3f-e2cd4041b2b3""}\n2025-07-20 15:44:08.322 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][e9f8336a-2505-4d02-9a51-515f8160e6c2] received connection request\n2025-07-20 15:44:08.324 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 15:44:08.324 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:44:08.352 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e9f8336a-2505-4d02-9a51-515f8160e6c2] socks forwarding established\n2025-07-20 15:44:08.382 [info] [command][9f7b29aa-95aa-42d4-ab3f-e2cd4041b2b3] Process exited with code 0\n2025-07-20 15:44:08.382 [info] [command][9f7b29aa-95aa-42d4-ab3f-e2cd4041b2b3] Socket close event received\n2025-07-20 15:44:08.401 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e9f8336a-2505-4d02-9a51-515f8160e6c2] socks connection closed\n2025-07-20 15:44:08.408 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53072 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:45:08.385 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:45:08.387 [info] [command][7823c057-4a56-4aa6-8c85-28b517f35dda] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""7823c057-4a56-4aa6-8c85-28b517f35dda""}\n2025-07-20 15:45:08.387 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][8cfa8a62-0c3e-4e16-9509-ff2ab1af852e] received connection request\n2025-07-20 15:45:08.388 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:45:08.416 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8cfa8a62-0c3e-4e16-9509-ff2ab1af852e] socks forwarding established\n2025-07-20 15:45:08.458 [info] [command][7823c057-4a56-4aa6-8c85-28b517f35dda] Process exited with code 0\n2025-07-20 15:45:08.458 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8cfa8a62-0c3e-4e16-9509-ff2ab1af852e] socks connection closed\n2025-07-20 15:45:08.458 [info] [command][7823c057-4a56-4aa6-8c85-28b517f35dda] Socket close event received\n2025-07-20 15:45:08.483 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53122 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:46:08.463 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:46:08.464 [info] [command][de2992b4-5ce2-4edd-9c27-13be5953d8c7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""de2992b4-5ce2-4edd-9c27-13be5953d8c7""}\n2025-07-20 15:46:08.465 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][ce55bc84-1bff-41aa-840d-76c11d05eeb7] received connection request\n2025-07-20 15:46:08.465 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:46:08.491 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ce55bc84-1bff-41aa-840d-76c11d05eeb7] socks forwarding established\n2025-07-20 15:46:08.524 [info] [command][de2992b4-5ce2-4edd-9c27-13be5953d8c7] Process exited with code 0\n2025-07-20 15:46:08.524 [info] [command][de2992b4-5ce2-4edd-9c27-13be5953d8c7] Socket close event received\n2025-07-20 15:46:08.524 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ce55bc84-1bff-41aa-840d-76c11d05eeb7] socks connection closed\n2025-07-20 15:46:08.550 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53193 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:47:08.529 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:47:08.532 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][d00e156c-7b80-422c-a6e4-0546bac0259d] received connection request\n2025-07-20 15:47:08.532 [info] [command][ed8f7598-6e10-4760-9642-b9adf29e1c4b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""ed8f7598-6e10-4760-9642-b9adf29e1c4b""}\n2025-07-20 15:47:08.534 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:47:08.570 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][d00e156c-7b80-422c-a6e4-0546bac0259d] socks forwarding established\n2025-07-20 15:47:08.603 [info] [command][ed8f7598-6e10-4760-9642-b9adf29e1c4b] Process exited with code 0\n2025-07-20 15:47:08.603 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][d00e156c-7b80-422c-a6e4-0546bac0259d] socks connection closed\n2025-07-20 15:47:08.603 [info] [command][ed8f7598-6e10-4760-9642-b9adf29e1c4b] Socket close event received\n2025-07-20 15:47:08.627 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53265 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:48:08.608 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:48:08.610 [info] [command][400e9aef-e3a3-4910-acf1-2d4fb4d0e2f1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""400e9aef-e3a3-4910-acf1-2d4fb4d0e2f1""}\n2025-07-20 15:48:08.610 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][50dda335-1700-4472-8092-23af93666eeb] received connection request\n2025-07-20 15:48:08.611 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:48:08.636 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][50dda335-1700-4472-8092-23af93666eeb] socks forwarding established\n2025-07-20 15:48:08.664 [info] [command][400e9aef-e3a3-4910-acf1-2d4fb4d0e2f1] Process exited with code 0\n2025-07-20 15:48:08.664 [info] [command][400e9aef-e3a3-4910-acf1-2d4fb4d0e2f1] Socket close event received\n2025-07-20 15:48:08.665 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][50dda335-1700-4472-8092-23af93666eeb] socks connection closed\n2025-07-20 15:48:08.694 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53315 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:49:08.668 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:49:08.671 [info] [command][170486f8-6567-41c2-9d44-a2747a095fc1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""170486f8-6567-41c2-9d44-a2747a095fc1""}\n2025-07-20 15:49:08.671 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][1e33c6ed-2e01-44dc-a8c6-91dcda03c554] received connection request\n2025-07-20 15:49:08.672 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:49:08.699 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1e33c6ed-2e01-44dc-a8c6-91dcda03c554] socks forwarding established\n2025-07-20 15:49:08.731 [info] [command][170486f8-6567-41c2-9d44-a2747a095fc1] Process exited with code 0\n2025-07-20 15:49:08.731 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1e33c6ed-2e01-44dc-a8c6-91dcda03c554] socks connection closed\n2025-07-20 15:49:08.731 [info] [command][170486f8-6567-41c2-9d44-a2747a095fc1] Socket close event received\n2025-07-20 15:49:08.755 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53362 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:50:08.738 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:50:08.740 [info] [command][7ae519ab-d02c-42b1-8ef5-63a759f01319] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""7ae519ab-d02c-42b1-8ef5-63a759f01319""}\n2025-07-20 15:50:08.741 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][0fbfd3a7-3d83-4ac7-afcb-4b7694eaf542] received connection request\n2025-07-20 15:50:08.742 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:50:08.768 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][0fbfd3a7-3d83-4ac7-afcb-4b7694eaf542] socks forwarding established\n2025-07-20 15:50:08.802 [info] [command][7ae519ab-d02c-42b1-8ef5-63a759f01319] Process exited with code 0\n2025-07-20 15:50:08.802 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][0fbfd3a7-3d83-4ac7-afcb-4b7694eaf542] socks connection closed\n2025-07-20 15:50:08.803 [info] [command][7ae519ab-d02c-42b1-8ef5-63a759f01319] Socket close event received\n2025-07-20 15:50:08.827 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53399 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:51:08.807 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:51:08.807 [info] [command][5cb3d970-449e-4896-a61f-269c53f4623a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""5cb3d970-449e-4896-a61f-269c53f4623a""}\n2025-07-20 15:51:08.807 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][b922a319-d6e4-459e-ad0e-a1f7ac26d82a] received connection request\n2025-07-20 15:51:08.808 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:51:08.856 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b922a319-d6e4-459e-ad0e-a1f7ac26d82a] socks forwarding established\n2025-07-20 15:51:08.890 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b922a319-d6e4-459e-ad0e-a1f7ac26d82a] socks connection closed\n2025-07-20 15:51:08.891 [info] [command][5cb3d970-449e-4896-a61f-269c53f4623a] Process exited with code 0\n2025-07-20 15:51:08.891 [info] [command][5cb3d970-449e-4896-a61f-269c53f4623a] Socket close event received\n2025-07-20 15:51:08.926 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53451 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:52:08.893 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:52:08.895 [info] [command][d73e82eb-4c96-49af-9bf6-950b87dd5d9e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""d73e82eb-4c96-49af-9bf6-950b87dd5d9e""}\n2025-07-20 15:52:08.895 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][a8f3b020-4a3a-476e-8cb9-acb670e07227] received connection request\n2025-07-20 15:52:08.895 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:52:08.927 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a8f3b020-4a3a-476e-8cb9-acb670e07227] socks forwarding established\n2025-07-20 15:52:08.954 [info] [command][d73e82eb-4c96-49af-9bf6-950b87dd5d9e] Process exited with code 0\n2025-07-20 15:52:08.954 [info] [command][d73e82eb-4c96-49af-9bf6-950b87dd5d9e] Socket close event received\n2025-07-20 15:52:08.954 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a8f3b020-4a3a-476e-8cb9-acb670e07227] socks connection closed\n2025-07-20 15:52:08.977 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53548 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:53:08.953 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:53:08.953 [info] [command][f63716b1-3a0c-4f77-aafd-11eeeb43caad] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f63716b1-3a0c-4f77-aafd-11eeeb43caad""}\n2025-07-20 15:53:08.954 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][b5ba53d9-e2a6-40f7-9918-be908a96ce61] received connection request\n2025-07-20 15:53:08.954 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:53:09.000 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b5ba53d9-e2a6-40f7-9918-be908a96ce61] socks forwarding established\n2025-07-20 15:53:09.085 [info] [command][f63716b1-3a0c-4f77-aafd-11eeeb43caad] Process exited with code 0\n2025-07-20 15:53:09.086 [info] [command][f63716b1-3a0c-4f77-aafd-11eeeb43caad] Socket close event received\n2025-07-20 15:53:09.102 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b5ba53d9-e2a6-40f7-9918-be908a96ce61] socks connection closed\n2025-07-20 15:53:09.149 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53615 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:54:09.090 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:54:09.093 [info] [command][5e17de68-4f6b-4d6b-af4e-ceca3b1126fe] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""5e17de68-4f6b-4d6b-af4e-ceca3b1126fe""}\n2025-07-20 15:54:09.093 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][059c5d33-a2d3-4f13-962e-73d5a7c2efa6] received connection request\n2025-07-20 15:54:09.094 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:54:09.128 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][059c5d33-a2d3-4f13-962e-73d5a7c2efa6] socks forwarding established\n2025-07-20 15:54:09.217 [info] [command][5e17de68-4f6b-4d6b-af4e-ceca3b1126fe] Process exited with code 0\n2025-07-20 15:54:09.217 [info] [command][5e17de68-4f6b-4d6b-af4e-ceca3b1126fe] Socket close event received\n2025-07-20 15:54:09.272 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][059c5d33-a2d3-4f13-962e-73d5a7c2efa6] socks connection closed\n2025-07-20 15:54:09.284 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53677 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:55:09.218 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:55:09.220 [info] [command][797083c0-0fe3-4c78-9282-4f46b7047711] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""797083c0-0fe3-4c78-9282-4f46b7047711""}\n2025-07-20 15:55:09.221 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][a7132726-e842-4eca-9346-8732eed06351] received connection request\n2025-07-20 15:55:09.221 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:55:09.245 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a7132726-e842-4eca-9346-8732eed06351] socks forwarding established\n2025-07-20 15:55:09.354 [info] [command][797083c0-0fe3-4c78-9282-4f46b7047711] Process exited with code 0\n2025-07-20 15:55:09.354 [info] [command][797083c0-0fe3-4c78-9282-4f46b7047711] Socket close event received\n2025-07-20 15:55:09.356 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][a7132726-e842-4eca-9346-8732eed06351] socks connection closed\n2025-07-20 15:55:09.381 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53725 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:56:09.355 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:56:09.356 [info] [command][e8cf9692-85ae-4ad1-b0ff-cd9e1a497294] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""e8cf9692-85ae-4ad1-b0ff-cd9e1a497294""}\n2025-07-20 15:56:09.357 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][b0c20ccc-24c5-4fe1-a6dc-57a97c48f214] received connection request\n2025-07-20 15:56:09.357 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:56:09.382 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b0c20ccc-24c5-4fe1-a6dc-57a97c48f214] socks forwarding established\n2025-07-20 15:56:09.431 [info] [command][e8cf9692-85ae-4ad1-b0ff-cd9e1a497294] Process exited with code 0\n2025-07-20 15:56:09.431 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b0c20ccc-24c5-4fe1-a6dc-57a97c48f214] socks connection closed\n2025-07-20 15:56:09.431 [info] [command][e8cf9692-85ae-4ad1-b0ff-cd9e1a497294] Socket close event received\n2025-07-20 15:56:09.459 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53788 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:57:09.436 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:57:09.438 [info] [command][9cbd48b8-dedb-465d-b640-77100c6c66db] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""9cbd48b8-dedb-465d-b640-77100c6c66db""}\n2025-07-20 15:57:09.439 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][22a164e4-b416-4b82-8ed5-a2dce57b6b5a] received connection request\n2025-07-20 15:57:09.439 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:57:09.476 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][22a164e4-b416-4b82-8ed5-a2dce57b6b5a] socks forwarding established\n2025-07-20 15:57:09.505 [info] [command][9cbd48b8-dedb-465d-b640-77100c6c66db] Process exited with code 0\n2025-07-20 15:57:09.506 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][22a164e4-b416-4b82-8ed5-a2dce57b6b5a] socks connection closed\n2025-07-20 15:57:09.506 [info] [command][9cbd48b8-dedb-465d-b640-77100c6c66db] Socket close event received\n2025-07-20 15:57:09.608 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53855 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:58:09.509 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:58:09.511 [info] [command][302b88ef-75de-46c9-9a2e-5adf1cd17633] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""302b88ef-75de-46c9-9a2e-5adf1cd17633""}\n2025-07-20 15:58:09.512 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][14abfc5c-b953-496d-a492-713012272f92] received connection request\n2025-07-20 15:58:09.512 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:58:09.543 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][14abfc5c-b953-496d-a492-713012272f92] socks forwarding established\n2025-07-20 15:58:09.576 [info] [command][302b88ef-75de-46c9-9a2e-5adf1cd17633] Process exited with code 0\n2025-07-20 15:58:09.576 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][14abfc5c-b953-496d-a492-713012272f92] socks connection closed\n2025-07-20 15:58:09.577 [info] [command][302b88ef-75de-46c9-9a2e-5adf1cd17633] Socket close event received\n2025-07-20 15:58:09.602 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53934 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 15:59:09.580 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 15:59:09.582 [info] [command][8acb2744-eb80-49f6-8d7c-f9087133bf87] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""8acb2744-eb80-49f6-8d7c-f9087133bf87""}\n2025-07-20 15:59:09.583 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][4d8dfbe8-3549-48cf-922f-75f625344bfd] received connection request\n2025-07-20 15:59:09.583 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 15:59:09.610 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4d8dfbe8-3549-48cf-922f-75f625344bfd] socks forwarding established\n2025-07-20 15:59:09.640 [info] [command][8acb2744-eb80-49f6-8d7c-f9087133bf87] Process exited with code 0\n2025-07-20 15:59:09.640 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4d8dfbe8-3549-48cf-922f-75f625344bfd] socks connection closed\n2025-07-20 15:59:09.640 [info] [command][8acb2744-eb80-49f6-8d7c-f9087133bf87] Socket close event received\n2025-07-20 15:59:09.666 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 53978 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:00:09.642 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:00:09.643 [info] [command][fbc264ce-cae0-4525-ad2b-55172487ed2b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""fbc264ce-cae0-4525-ad2b-55172487ed2b""}\n2025-07-20 16:00:09.644 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][327a1ab4-0b3b-4617-8041-733bf4798adc] received connection request\n2025-07-20 16:00:09.645 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:00:09.676 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][327a1ab4-0b3b-4617-8041-733bf4798adc] socks forwarding established\n2025-07-20 16:00:09.710 [info] [command][fbc264ce-cae0-4525-ad2b-55172487ed2b] Process exited with code 0\n2025-07-20 16:00:09.710 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][327a1ab4-0b3b-4617-8041-733bf4798adc] socks connection closed\n2025-07-20 16:00:09.711 [info] [command][fbc264ce-cae0-4525-ad2b-55172487ed2b] Socket close event received\n2025-07-20 16:00:09.735 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54028 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:01:09.709 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:01:09.711 [info] [command][041d8b6e-67ad-4b73-a678-ee9e8353079f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""041d8b6e-67ad-4b73-a678-ee9e8353079f""}\n2025-07-20 16:01:09.712 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][1748d2f0-e125-4c96-9123-13a8b3a743ce] received connection request\n2025-07-20 16:01:09.712 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:01:09.752 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1748d2f0-e125-4c96-9123-13a8b3a743ce] socks forwarding established\n2025-07-20 16:01:09.788 [info] [command][041d8b6e-67ad-4b73-a678-ee9e8353079f] Process exited with code 0\n2025-07-20 16:01:09.788 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1748d2f0-e125-4c96-9123-13a8b3a743ce] socks connection closed\n2025-07-20 16:01:09.788 [info] [command][041d8b6e-67ad-4b73-a678-ee9e8353079f] Socket close event received\n2025-07-20 16:01:09.815 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54108 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:02:09.790 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:02:09.793 [info] [command][88372daa-a060-453a-b845-6f72c80b466d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""88372daa-a060-453a-b845-6f72c80b466d""}\n2025-07-20 16:02:09.793 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][649a4dc4-997a-4a8d-a65e-f4665088e897] received connection request\n2025-07-20 16:02:09.794 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:02:09.818 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][649a4dc4-997a-4a8d-a65e-f4665088e897] socks forwarding established\n2025-07-20 16:02:09.846 [info] [command][88372daa-a060-453a-b845-6f72c80b466d] Process exited with code 0\n2025-07-20 16:02:09.847 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][649a4dc4-997a-4a8d-a65e-f4665088e897] socks connection closed\n2025-07-20 16:02:09.847 [info] [command][88372daa-a060-453a-b845-6f72c80b466d] Socket close event received\n2025-07-20 16:02:09.870 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54182 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:03:09.849 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:03:09.851 [info] [command][140bcdd8-9995-4473-86c0-afbc9e842b6a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""140bcdd8-9995-4473-86c0-afbc9e842b6a""}\n2025-07-20 16:03:09.852 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][20ae62e3-785c-4e79-8033-2edd59b42b40] received connection request\n2025-07-20 16:03:09.852 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:03:09.878 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][20ae62e3-785c-4e79-8033-2edd59b42b40] socks forwarding established\n2025-07-20 16:03:09.908 [info] [command][140bcdd8-9995-4473-86c0-afbc9e842b6a] Process exited with code 0\n2025-07-20 16:03:09.908 [info] [command][140bcdd8-9995-4473-86c0-afbc9e842b6a] Socket close event received\n2025-07-20 16:03:09.932 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][20ae62e3-785c-4e79-8033-2edd59b42b40] socks connection closed\n2025-07-20 16:03:09.933 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54260 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:04:09.914 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:04:09.915 [info] [command][12dc4470-b1e4-4803-8751-aa9337975e26] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""12dc4470-b1e4-4803-8751-aa9337975e26""}\n2025-07-20 16:04:09.915 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][1fe465aa-fb85-4631-8d58-9283eed5ea1d] received connection request\n2025-07-20 16:04:09.916 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:04:09.941 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1fe465aa-fb85-4631-8d58-9283eed5ea1d] socks forwarding established\n2025-07-20 16:04:09.969 [info] [command][12dc4470-b1e4-4803-8751-aa9337975e26] Process exited with code 0\n2025-07-20 16:04:09.969 [info] [command][12dc4470-b1e4-4803-8751-aa9337975e26] Socket close event received\n2025-07-20 16:04:10.100 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1fe465aa-fb85-4631-8d58-9283eed5ea1d] socks connection closed\n2025-07-20 16:04:10.100 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54297 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:05:09.969 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:05:09.973 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][6d7ac720-2056-4ada-b8d1-0f97304a45a5] received connection request\n2025-07-20 16:05:09.973 [info] [command][bf6379ea-af92-45ba-9d7a-64b934cf3597] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""bf6379ea-af92-45ba-9d7a-64b934cf3597""}\n2025-07-20 16:05:09.973 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:05:09.998 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6d7ac720-2056-4ada-b8d1-0f97304a45a5] socks forwarding established\n2025-07-20 16:05:10.029 [info] [command][bf6379ea-af92-45ba-9d7a-64b934cf3597] Process exited with code 0\n2025-07-20 16:05:10.029 [info] [command][bf6379ea-af92-45ba-9d7a-64b934cf3597] Socket close event received\n2025-07-20 16:05:10.051 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6d7ac720-2056-4ada-b8d1-0f97304a45a5] socks connection closed\n2025-07-20 16:05:10.054 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54340 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:06:10.034 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:06:10.036 [info] [command][9cf1d9a9-d478-422f-b4ac-e6af4d9044f5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""9cf1d9a9-d478-422f-b4ac-e6af4d9044f5""}\n2025-07-20 16:06:10.036 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][ee594d19-12c4-47f3-80d3-7a5916def563] received connection request\n2025-07-20 16:06:10.037 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:06:10.062 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ee594d19-12c4-47f3-80d3-7a5916def563] socks forwarding established\n2025-07-20 16:06:10.117 [info] [command][9cf1d9a9-d478-422f-b4ac-e6af4d9044f5] Process exited with code 0\n2025-07-20 16:06:10.117 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ee594d19-12c4-47f3-80d3-7a5916def563] socks connection closed\n2025-07-20 16:06:10.117 [info] [command][9cf1d9a9-d478-422f-b4ac-e6af4d9044f5] Socket close event received\n2025-07-20 16:06:10.142 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54426 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:07:10.120 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:07:10.122 [info] [command][42b12a41-8324-4c21-9872-39580301f670] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""42b12a41-8324-4c21-9872-39580301f670""}\n2025-07-20 16:07:10.123 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][139c540c-5f06-4be8-8fca-100bde350a44] received connection request\n2025-07-20 16:07:10.124 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:07:10.240 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][139c540c-5f06-4be8-8fca-100bde350a44] socks forwarding established\n2025-07-20 16:07:10.270 [info] [command][42b12a41-8324-4c21-9872-39580301f670] Process exited with code 0\n2025-07-20 16:07:10.270 [info] [command][42b12a41-8324-4c21-9872-39580301f670] Socket close event received\n2025-07-20 16:07:10.294 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54489 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:07:10.294 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][139c540c-5f06-4be8-8fca-100bde350a44] socks connection closed\n2025-07-20 16:08:10.275 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:08:10.277 [info] [command][32344b4f-fdd5-4543-96d4-d6f546ff90b5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""32344b4f-fdd5-4543-96d4-d6f546ff90b5""}\n2025-07-20 16:08:10.278 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][84cca126-b7fa-4216-8595-692f87f8d2fa] received connection request\n2025-07-20 16:08:10.278 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:08:10.304 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][84cca126-b7fa-4216-8595-692f87f8d2fa] socks forwarding established\n2025-07-20 16:08:10.372 [info] [command][32344b4f-fdd5-4543-96d4-d6f546ff90b5] Process exited with code 0\n2025-07-20 16:08:10.372 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][84cca126-b7fa-4216-8595-692f87f8d2fa] socks connection closed\n2025-07-20 16:08:10.372 [info] [command][32344b4f-fdd5-4543-96d4-d6f546ff90b5] Socket close event received\n2025-07-20 16:08:10.397 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54551 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:09:10.377 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:09:10.380 [info] [command][004828b5-92a3-41c0-ade7-735e7c79f372] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""004828b5-92a3-41c0-ade7-735e7c79f372""}\n2025-07-20 16:09:10.381 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][f8546528-6f92-41b8-a34e-6d67d85c7f76] received connection request\n2025-07-20 16:09:10.382 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:09:10.497 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f8546528-6f92-41b8-a34e-6d67d85c7f76] socks forwarding established\n2025-07-20 16:09:10.527 [info] [command][004828b5-92a3-41c0-ade7-735e7c79f372] Process exited with code 0\n2025-07-20 16:09:10.527 [info] [command][004828b5-92a3-41c0-ade7-735e7c79f372] Socket close event received\n2025-07-20 16:09:10.573 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f8546528-6f92-41b8-a34e-6d67d85c7f76] socks connection closed\n2025-07-20 16:09:10.585 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54590 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:10:10.527 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:10:10.529 [info] [command][399a4474-2761-41fb-a3de-8fd307e88f76] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""399a4474-2761-41fb-a3de-8fd307e88f76""}\n2025-07-20 16:10:10.529 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][8359bc68-2e73-43f0-8b0e-f5b7aff39ddb] received connection request\n2025-07-20 16:10:10.529 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:10:10.609 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8359bc68-2e73-43f0-8b0e-f5b7aff39ddb] socks forwarding established\n2025-07-20 16:10:10.637 [info] [command][399a4474-2761-41fb-a3de-8fd307e88f76] Process exited with code 0\n2025-07-20 16:10:10.637 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8359bc68-2e73-43f0-8b0e-f5b7aff39ddb] socks connection closed\n2025-07-20 16:10:10.637 [info] [command][399a4474-2761-41fb-a3de-8fd307e88f76] Socket close event received\n2025-07-20 16:10:10.669 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54644 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:11:10.641 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:11:10.643 [info] [command][b89570a8-28b2-4155-ae2b-2ddc08f43a21] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""b89570a8-28b2-4155-ae2b-2ddc08f43a21""}\n2025-07-20 16:11:10.643 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][1191ebeb-021e-41fa-83e1-ab56ea4eb24e] received connection request\n2025-07-20 16:11:10.643 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 16:11:10.644 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:11:10.677 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1191ebeb-021e-41fa-83e1-ab56ea4eb24e] socks forwarding established\n2025-07-20 16:11:10.707 [info] [command][b89570a8-28b2-4155-ae2b-2ddc08f43a21] Process exited with code 0\n2025-07-20 16:11:10.707 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1191ebeb-021e-41fa-83e1-ab56ea4eb24e] socks connection closed\n2025-07-20 16:11:10.707 [info] [command][b89570a8-28b2-4155-ae2b-2ddc08f43a21] Socket close event received\n2025-07-20 16:11:10.733 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54708 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:12:10.712 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:12:10.713 [info] [command][372e4b9a-1493-43df-998b-85c55df2ba61] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""372e4b9a-1493-43df-998b-85c55df2ba61""}\n2025-07-20 16:12:10.714 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][e53f26ff-dea9-4f31-b7e3-1e9c0a6c99e5] received connection request\n2025-07-20 16:12:10.715 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:12:10.740 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e53f26ff-dea9-4f31-b7e3-1e9c0a6c99e5] socks forwarding established\n2025-07-20 16:12:10.846 [info] [command][372e4b9a-1493-43df-998b-85c55df2ba61] Process exited with code 0\n2025-07-20 16:12:10.846 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e53f26ff-dea9-4f31-b7e3-1e9c0a6c99e5] socks connection closed\n2025-07-20 16:12:10.846 [info] [command][372e4b9a-1493-43df-998b-85c55df2ba61] Socket close event received\n2025-07-20 16:12:10.870 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54769 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:13:10.851 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:13:10.852 [info] [command][15cbaaf5-1f54-46ce-92f8-8cd862431827] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""15cbaaf5-1f54-46ce-92f8-8cd862431827""}\n2025-07-20 16:13:10.853 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][1cfe59c4-285e-4a71-b5c0-3cb9a6160d48] received connection request\n2025-07-20 16:13:10.854 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:13:10.968 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1cfe59c4-285e-4a71-b5c0-3cb9a6160d48] socks forwarding established\n2025-07-20 16:13:10.997 [info] [command][15cbaaf5-1f54-46ce-92f8-8cd862431827] Process exited with code 0\n2025-07-20 16:13:10.998 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1cfe59c4-285e-4a71-b5c0-3cb9a6160d48] socks connection closed\n2025-07-20 16:13:10.998 [info] [command][15cbaaf5-1f54-46ce-92f8-8cd862431827] Socket close event received\n2025-07-20 16:13:11.045 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54826 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:14:11.001 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:14:11.003 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][dccb2f9d-09a7-44b5-b779-abae6307b36a] received connection request\n2025-07-20 16:14:11.004 [info] [command][cd03cc7d-e4a7-4d2e-b9a0-2808358fcf3e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""cd03cc7d-e4a7-4d2e-b9a0-2808358fcf3e""}\n2025-07-20 16:14:11.007 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:14:11.135 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dccb2f9d-09a7-44b5-b779-abae6307b36a] socks forwarding established\n2025-07-20 16:14:11.241 [info] [command][cd03cc7d-e4a7-4d2e-b9a0-2808358fcf3e] Process exited with code 0\n2025-07-20 16:14:11.241 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dccb2f9d-09a7-44b5-b779-abae6307b36a] socks connection closed\n2025-07-20 16:14:11.241 [info] [command][cd03cc7d-e4a7-4d2e-b9a0-2808358fcf3e] Socket close event received\n2025-07-20 16:14:11.357 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54868 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:15:11.246 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:15:11.247 [info] [command][e5160ab9-7df1-4b42-80db-f66aa0e09afe] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""e5160ab9-7df1-4b42-80db-f66aa0e09afe""}\n2025-07-20 16:15:11.248 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][b6d5c3d2-9bcd-4c4f-a541-21e1199eb482] received connection request\n2025-07-20 16:15:11.248 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:15:11.273 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b6d5c3d2-9bcd-4c4f-a541-21e1199eb482] socks forwarding established\n2025-07-20 16:15:11.304 [info] [command][e5160ab9-7df1-4b42-80db-f66aa0e09afe] Process exited with code 0\n2025-07-20 16:15:11.304 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b6d5c3d2-9bcd-4c4f-a541-21e1199eb482] socks connection closed\n2025-07-20 16:15:11.304 [info] [command][e5160ab9-7df1-4b42-80db-f66aa0e09afe] Socket close event received\n2025-07-20 16:15:11.331 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54916 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:16:11.303 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:16:11.306 [info] [command][496d3fc0-5074-4eae-91a5-9a126d6d438c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""496d3fc0-5074-4eae-91a5-9a126d6d438c""}\n2025-07-20 16:16:11.307 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][9794056b-8268-4d6c-a539-44848ab0312f] received connection request\n2025-07-20 16:16:11.307 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:16:11.343 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][9794056b-8268-4d6c-a539-44848ab0312f] socks forwarding established\n2025-07-20 16:16:11.373 [info] [command][496d3fc0-5074-4eae-91a5-9a126d6d438c] Process exited with code 0\n2025-07-20 16:16:11.374 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][9794056b-8268-4d6c-a539-44848ab0312f] socks connection closed\n2025-07-20 16:16:11.374 [info] [command][496d3fc0-5074-4eae-91a5-9a126d6d438c] Socket close event received\n2025-07-20 16:16:11.397 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 54964 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:17:11.378 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:17:11.380 [info] [command][230a17f1-2bf9-4a00-a780-b9dd7ce33259] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""230a17f1-2bf9-4a00-a780-b9dd7ce33259""}\n2025-07-20 16:17:11.381 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][dd87102b-90c8-4325-ac6e-f87092ea48e2] received connection request\n2025-07-20 16:17:11.382 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:17:11.605 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dd87102b-90c8-4325-ac6e-f87092ea48e2] socks forwarding established\n2025-07-20 16:17:11.637 [info] [command][230a17f1-2bf9-4a00-a780-b9dd7ce33259] Process exited with code 0\n2025-07-20 16:17:11.637 [info] [command][230a17f1-2bf9-4a00-a780-b9dd7ce33259] Socket close event received\n2025-07-20 16:17:11.642 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dd87102b-90c8-4325-ac6e-f87092ea48e2] socks connection closed\n2025-07-20 16:17:11.792 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55019 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:18:11.641 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:18:11.643 [info] [command][4a2129c8-e00e-4f88-9f6b-bc4d8c88b018] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""4a2129c8-e00e-4f88-9f6b-bc4d8c88b018""}\n2025-07-20 16:18:11.643 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][e028e66c-43f3-435f-87e0-3ef44a7ada84] received connection request\n2025-07-20 16:18:11.644 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:18:11.670 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e028e66c-43f3-435f-87e0-3ef44a7ada84] socks forwarding established\n2025-07-20 16:18:11.699 [info] [command][4a2129c8-e00e-4f88-9f6b-bc4d8c88b018] Process exited with code 0\n2025-07-20 16:18:11.699 [info] [command][4a2129c8-e00e-4f88-9f6b-bc4d8c88b018] Socket close event received\n2025-07-20 16:18:11.700 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e028e66c-43f3-435f-87e0-3ef44a7ada84] socks connection closed\n2025-07-20 16:18:11.725 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55074 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:19:11.701 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:19:11.702 [info] [command][3d5f009d-0f6f-4e1a-b669-f8930dce1757] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""3d5f009d-0f6f-4e1a-b669-f8930dce1757""}\n2025-07-20 16:19:11.703 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][222644e7-7213-4b1f-9b3c-9b150f4c8e1f] received connection request\n2025-07-20 16:19:11.703 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:19:11.728 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][222644e7-7213-4b1f-9b3c-9b150f4c8e1f] socks forwarding established\n2025-07-20 16:19:11.759 [info] [command][3d5f009d-0f6f-4e1a-b669-f8930dce1757] Process exited with code 0\n2025-07-20 16:19:11.759 [info] [command][3d5f009d-0f6f-4e1a-b669-f8930dce1757] Socket close event received\n2025-07-20 16:19:11.759 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][222644e7-7213-4b1f-9b3c-9b150f4c8e1f] socks connection closed\n2025-07-20 16:19:11.784 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55123 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:20:11.762 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:20:11.764 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][8d55acef-245f-46a0-b3a4-9429ba533423] received connection request\n2025-07-20 16:20:11.765 [info] [command][c4e22154-e41f-4d59-b15d-375340ebc46a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""c4e22154-e41f-4d59-b15d-375340ebc46a""}\n2025-07-20 16:20:11.765 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:20:11.794 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8d55acef-245f-46a0-b3a4-9429ba533423] socks forwarding established\n2025-07-20 16:20:11.824 [info] [command][c4e22154-e41f-4d59-b15d-375340ebc46a] Process exited with code 0\n2025-07-20 16:20:11.825 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8d55acef-245f-46a0-b3a4-9429ba533423] socks connection closed\n2025-07-20 16:20:11.825 [info] [command][c4e22154-e41f-4d59-b15d-375340ebc46a] Socket close event received\n2025-07-20 16:20:11.950 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55173 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:21:11.829 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:21:11.831 [info] [command][15304322-61e7-4ccc-8e6a-16dcb600f38d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""15304322-61e7-4ccc-8e6a-16dcb600f38d""}\n2025-07-20 16:21:11.832 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][23de4515-70ea-45d1-ab0e-41ee97d30c63] received connection request\n2025-07-20 16:21:11.832 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:21:11.940 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][23de4515-70ea-45d1-ab0e-41ee97d30c63] socks forwarding established\n2025-07-20 16:21:11.976 [info] [command][15304322-61e7-4ccc-8e6a-16dcb600f38d] Process exited with code 0\n2025-07-20 16:21:11.976 [info] [command][15304322-61e7-4ccc-8e6a-16dcb600f38d] Socket close event received\n2025-07-20 16:21:11.979 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][23de4515-70ea-45d1-ab0e-41ee97d30c63] socks connection closed\n2025-07-20 16:21:12.042 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55227 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:22:11.979 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:22:11.981 [info] [command][2fa9c209-d007-413f-ab12-1c64f9457646] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""2fa9c209-d007-413f-ab12-1c64f9457646""}\n2025-07-20 16:22:11.981 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][81135d87-c609-4803-8c11-99cb4d5ce60d] received connection request\n2025-07-20 16:22:11.981 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:22:12.041 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][81135d87-c609-4803-8c11-99cb4d5ce60d] socks forwarding established\n2025-07-20 16:22:12.086 [info] [command][2fa9c209-d007-413f-ab12-1c64f9457646] Process exited with code 0\n2025-07-20 16:22:12.086 [info] [command][2fa9c209-d007-413f-ab12-1c64f9457646] Socket close event received\n2025-07-20 16:22:12.100 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][81135d87-c609-4803-8c11-99cb4d5ce60d] socks connection closed\n2025-07-20 16:22:12.113 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55284 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:23:12.087 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:23:12.090 [info] [command][b8a81bf8-2e85-4def-8530-e54054133bb7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""b8a81bf8-2e85-4def-8530-e54054133bb7""}\n2025-07-20 16:23:12.091 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][829de282-6cc2-4c4f-8968-70046b6a418e] received connection request\n2025-07-20 16:23:12.091 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:23:12.116 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][829de282-6cc2-4c4f-8968-70046b6a418e] socks forwarding established\n2025-07-20 16:23:12.147 [info] [command][b8a81bf8-2e85-4def-8530-e54054133bb7] Process exited with code 0\n2025-07-20 16:23:12.147 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][829de282-6cc2-4c4f-8968-70046b6a418e] socks connection closed\n2025-07-20 16:23:12.147 [info] [command][b8a81bf8-2e85-4def-8530-e54054133bb7] Socket close event received\n2025-07-20 16:23:12.173 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55339 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:24:12.152 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:24:12.154 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][14f97731-73be-4d35-8b82-528530639aa9] received connection request\n2025-07-20 16:24:12.154 [info] [command][384adbc8-091b-4b44-a88b-3deddf37cfe0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""384adbc8-091b-4b44-a88b-3deddf37cfe0""}\n2025-07-20 16:24:12.154 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:24:12.178 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][14f97731-73be-4d35-8b82-528530639aa9] socks forwarding established\n2025-07-20 16:24:12.275 [info] [command][384adbc8-091b-4b44-a88b-3deddf37cfe0] Process exited with code 0\n2025-07-20 16:24:12.276 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][14f97731-73be-4d35-8b82-528530639aa9] socks connection closed\n2025-07-20 16:24:12.276 [info] [command][384adbc8-091b-4b44-a88b-3deddf37cfe0] Socket close event received\n2025-07-20 16:24:12.298 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55381 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:25:12.278 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:25:12.281 [info] [command][65afbc5a-d326-4ee8-8841-8bc35117431b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""65afbc5a-d326-4ee8-8841-8bc35117431b""}\n2025-07-20 16:25:12.281 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][dc74491a-9b45-451b-bef0-218de2f24845] received connection request\n2025-07-20 16:25:12.282 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:25:12.306 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dc74491a-9b45-451b-bef0-218de2f24845] socks forwarding established\n2025-07-20 16:25:12.333 [info] [command][65afbc5a-d326-4ee8-8841-8bc35117431b] Process exited with code 0\n2025-07-20 16:25:12.333 [info] [command][65afbc5a-d326-4ee8-8841-8bc35117431b] Socket close event received\n2025-07-20 16:25:12.333 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dc74491a-9b45-451b-bef0-218de2f24845] socks connection closed\n2025-07-20 16:25:12.356 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55428 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:26:12.334 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:26:12.336 [info] [command][8826de55-877d-44bb-9dac-ae053cf1bce1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""8826de55-877d-44bb-9dac-ae053cf1bce1""}\n2025-07-20 16:26:12.336 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][619d6efe-54aa-4ac7-b4dc-a8b74f825d2b] received connection request\n2025-07-20 16:26:12.337 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:26:12.362 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][619d6efe-54aa-4ac7-b4dc-a8b74f825d2b] socks forwarding established\n2025-07-20 16:26:12.391 [info] [command][8826de55-877d-44bb-9dac-ae053cf1bce1] Process exited with code 0\n2025-07-20 16:26:12.391 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][619d6efe-54aa-4ac7-b4dc-a8b74f825d2b] socks connection closed\n2025-07-20 16:26:12.391 [info] [command][8826de55-877d-44bb-9dac-ae053cf1bce1] Socket close event received\n2025-07-20 16:26:12.415 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55474 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:27:12.396 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:27:12.398 [info] [command][8cd852aa-9c81-4945-a962-85c2810e04c4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""8cd852aa-9c81-4945-a962-85c2810e04c4""}\n2025-07-20 16:27:12.398 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][f075da91-0441-4dc0-8849-a8ef94e9de58] received connection request\n2025-07-20 16:27:12.399 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:27:12.424 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f075da91-0441-4dc0-8849-a8ef94e9de58] socks forwarding established\n2025-07-20 16:27:12.455 [info] [command][8cd852aa-9c81-4945-a962-85c2810e04c4] Process exited with code 0\n2025-07-20 16:27:12.455 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f075da91-0441-4dc0-8849-a8ef94e9de58] socks connection closed\n2025-07-20 16:27:12.455 [info] [command][8cd852aa-9c81-4945-a962-85c2810e04c4] Socket close event received\n2025-07-20 16:27:12.481 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55534 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:28:12.462 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:28:12.468 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][923fd251-e3a8-4a3e-a777-03b243f13e2b] received connection request\n2025-07-20 16:28:12.468 [info] [command][b1893cd5-d96e-4dac-8b09-c0f54d7e9d22] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""b1893cd5-d96e-4dac-8b09-c0f54d7e9d22""}\n2025-07-20 16:28:12.470 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 16:28:12.471 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:28:12.542 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][923fd251-e3a8-4a3e-a777-03b243f13e2b] socks forwarding established\n2025-07-20 16:28:12.740 [info] [command][b1893cd5-d96e-4dac-8b09-c0f54d7e9d22] Process exited with code 0\n2025-07-20 16:28:12.740 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][923fd251-e3a8-4a3e-a777-03b243f13e2b] socks connection closed\n2025-07-20 16:28:12.740 [info] [command][b1893cd5-d96e-4dac-8b09-c0f54d7e9d22] Socket close event received\n2025-07-20 16:28:12.838 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55582 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:29:12.744 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:29:12.746 [info] [command][9aea0aa8-1b2b-402d-8d79-c0d1336509fd] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""9aea0aa8-1b2b-402d-8d79-c0d1336509fd""}\n2025-07-20 16:29:12.746 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][6c3b456d-4f63-4926-afde-4e80803aede5] received connection request\n2025-07-20 16:29:12.746 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:29:12.770 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6c3b456d-4f63-4926-afde-4e80803aede5] socks forwarding established\n2025-07-20 16:29:12.798 [info] [command][9aea0aa8-1b2b-402d-8d79-c0d1336509fd] Process exited with code 0\n2025-07-20 16:29:12.798 [info] [command][9aea0aa8-1b2b-402d-8d79-c0d1336509fd] Socket close event received\n2025-07-20 16:29:12.800 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6c3b456d-4f63-4926-afde-4e80803aede5] socks connection closed\n2025-07-20 16:29:12.824 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55610 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:30:12.800 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:30:12.802 [info] [command][c9fedde2-e237-4f07-a56f-8d77d8da8313] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""c9fedde2-e237-4f07-a56f-8d77d8da8313""}\n2025-07-20 16:30:12.802 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][8f6c6b6a-a1b3-4ddb-b584-7872d0677fa9] received connection request\n2025-07-20 16:30:12.802 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:30:12.826 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8f6c6b6a-a1b3-4ddb-b584-7872d0677fa9] socks forwarding established\n2025-07-20 16:30:12.853 [info] [command][c9fedde2-e237-4f07-a56f-8d77d8da8313] Process exited with code 0\n2025-07-20 16:30:12.853 [info] [command][c9fedde2-e237-4f07-a56f-8d77d8da8313] Socket close event received\n2025-07-20 16:30:12.853 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8f6c6b6a-a1b3-4ddb-b584-7872d0677fa9] socks connection closed\n2025-07-20 16:30:12.877 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55645 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:31:12.854 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:31:12.856 [info] [command][0b74a5ee-0fba-49ac-a95e-67740f5d4a3c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""0b74a5ee-0fba-49ac-a95e-67740f5d4a3c""}\n2025-07-20 16:31:12.857 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][e734eca2-07e1-42bf-9fa5-9ec467977cd2] received connection request\n2025-07-20 16:31:12.857 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:31:12.957 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e734eca2-07e1-42bf-9fa5-9ec467977cd2] socks forwarding established\n2025-07-20 16:31:12.988 [info] [command][0b74a5ee-0fba-49ac-a95e-67740f5d4a3c] Process exited with code 0\n2025-07-20 16:31:12.988 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][e734eca2-07e1-42bf-9fa5-9ec467977cd2] socks connection closed\n2025-07-20 16:31:12.988 [info] [command][0b74a5ee-0fba-49ac-a95e-67740f5d4a3c] Socket close event received\n2025-07-20 16:31:13.018 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55706 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:32:12.989 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:32:12.992 [info] [command][77865a04-4a4f-4078-8966-f80c11f79c98] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""77865a04-4a4f-4078-8966-f80c11f79c98""}\n2025-07-20 16:32:12.992 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][68c6b744-fa0a-48cd-9a68-0ebbf761c5aa] received connection request\n2025-07-20 16:32:12.992 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:32:13.040 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][68c6b744-fa0a-48cd-9a68-0ebbf761c5aa] socks forwarding established\n2025-07-20 16:32:13.082 [info] [command][77865a04-4a4f-4078-8966-f80c11f79c98] Process exited with code 0\n2025-07-20 16:32:13.083 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][68c6b744-fa0a-48cd-9a68-0ebbf761c5aa] socks connection closed\n2025-07-20 16:32:13.083 [info] [command][77865a04-4a4f-4078-8966-f80c11f79c98] Socket close event received\n2025-07-20 16:32:13.107 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55789 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:33:13.087 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:33:13.089 [info] [command][46e0cbbe-faae-4497-80e2-266f8cbbb1ba] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""46e0cbbe-faae-4497-80e2-266f8cbbb1ba""}\n2025-07-20 16:33:13.090 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][9ef76ca6-d643-433a-8e10-3f8356540f1e] received connection request\n2025-07-20 16:33:13.090 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:33:13.115 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][9ef76ca6-d643-433a-8e10-3f8356540f1e] socks forwarding established\n2025-07-20 16:33:13.143 [info] [command][46e0cbbe-faae-4497-80e2-266f8cbbb1ba] Process exited with code 0\n2025-07-20 16:33:13.143 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][9ef76ca6-d643-433a-8e10-3f8356540f1e] socks connection closed\n2025-07-20 16:33:13.143 [info] [command][46e0cbbe-faae-4497-80e2-266f8cbbb1ba] Socket close event received\n2025-07-20 16:33:13.169 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55842 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:34:13.146 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:34:13.150 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][deed84c7-88e1-4369-9a27-810701d71881] received connection request\n2025-07-20 16:34:13.151 [info] [command][72942e89-fafc-41ba-98b1-c1b5816a9ddd] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""72942e89-fafc-41ba-98b1-c1b5816a9ddd""}\n2025-07-20 16:34:13.151 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:34:13.191 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][deed84c7-88e1-4369-9a27-810701d71881] socks forwarding established\n2025-07-20 16:34:13.218 [info] [command][72942e89-fafc-41ba-98b1-c1b5816a9ddd] Process exited with code 0\n2025-07-20 16:34:13.218 [info] [command][72942e89-fafc-41ba-98b1-c1b5816a9ddd] Socket close event received\n2025-07-20 16:34:13.240 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][deed84c7-88e1-4369-9a27-810701d71881] socks connection closed\n2025-07-20 16:34:13.241 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55899 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:35:13.223 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:35:13.224 [info] [command][64a85f97-4ac2-4a0a-9aa7-87ba14498a0c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""64a85f97-4ac2-4a0a-9aa7-87ba14498a0c""}\n2025-07-20 16:35:13.225 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][0a1462df-969e-4dd5-ae43-a159bb6b2035] received connection request\n2025-07-20 16:35:13.225 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:35:13.270 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][0a1462df-969e-4dd5-ae43-a159bb6b2035] socks forwarding established\n2025-07-20 16:35:13.384 [info] [command][64a85f97-4ac2-4a0a-9aa7-87ba14498a0c] Process exited with code 0\n2025-07-20 16:35:13.385 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][0a1462df-969e-4dd5-ae43-a159bb6b2035] socks connection closed\n2025-07-20 16:35:13.385 [info] [command][64a85f97-4ac2-4a0a-9aa7-87ba14498a0c] Socket close event received\n2025-07-20 16:35:13.411 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55931 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:36:13.388 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:36:13.389 [info] [command][d72c8380-8779-43bb-bed4-d0a818128af5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""d72c8380-8779-43bb-bed4-d0a818128af5""}\n2025-07-20 16:36:13.390 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][c35d2b55-2167-48a4-a6e8-229f2fcb1599] received connection request\n2025-07-20 16:36:13.390 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:36:13.416 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][c35d2b55-2167-48a4-a6e8-229f2fcb1599] socks forwarding established\n2025-07-20 16:36:13.450 [info] [command][d72c8380-8779-43bb-bed4-d0a818128af5] Process exited with code 0\n2025-07-20 16:36:13.450 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][c35d2b55-2167-48a4-a6e8-229f2fcb1599] socks connection closed\n2025-07-20 16:36:13.450 [info] [command][d72c8380-8779-43bb-bed4-d0a818128af5] Socket close event received\n2025-07-20 16:36:13.481 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 55978 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:37:13.455 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:37:13.457 [info] [command][1843ed7a-7e36-49ac-9ff7-08e005b7ba26] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""1843ed7a-7e36-49ac-9ff7-08e005b7ba26""}\n2025-07-20 16:37:13.457 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][824b3e98-88b5-4b1a-b7a5-92fbed86c18c] received connection request\n2025-07-20 16:37:13.458 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:37:13.483 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][824b3e98-88b5-4b1a-b7a5-92fbed86c18c] socks forwarding established\n2025-07-20 16:37:13.512 [info] [command][1843ed7a-7e36-49ac-9ff7-08e005b7ba26] Process exited with code 0\n2025-07-20 16:37:13.513 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][824b3e98-88b5-4b1a-b7a5-92fbed86c18c] socks connection closed\n2025-07-20 16:37:13.513 [info] [command][1843ed7a-7e36-49ac-9ff7-08e005b7ba26] Socket close event received\n2025-07-20 16:37:13.538 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56038 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:38:13.516 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:38:13.517 [info] [command][243ed5e1-0926-46d9-a3ab-70da9509df3c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""243ed5e1-0926-46d9-a3ab-70da9509df3c""}\n2025-07-20 16:38:13.517 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][ded6dcac-1eca-4f66-bf2b-bb908a78477b] received connection request\n2025-07-20 16:38:13.517 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:38:13.541 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ded6dcac-1eca-4f66-bf2b-bb908a78477b] socks forwarding established\n2025-07-20 16:38:13.569 [info] [command][243ed5e1-0926-46d9-a3ab-70da9509df3c] Process exited with code 0\n2025-07-20 16:38:13.569 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ded6dcac-1eca-4f66-bf2b-bb908a78477b] socks connection closed\n2025-07-20 16:38:13.569 [info] [command][243ed5e1-0926-46d9-a3ab-70da9509df3c] Socket close event received\n2025-07-20 16:38:13.597 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56088 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:39:13.574 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:39:13.576 [info] [command][d7b541c2-092d-473c-8384-a62319eb7f0a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""d7b541c2-092d-473c-8384-a62319eb7f0a""}\n2025-07-20 16:39:13.576 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][2f446277-2caa-4f7b-94b3-9f72067f9677] received connection request\n2025-07-20 16:39:13.576 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 16:39:13.576 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:39:13.599 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][2f446277-2caa-4f7b-94b3-9f72067f9677] socks forwarding established\n2025-07-20 16:39:13.708 [info] [command][d7b541c2-092d-473c-8384-a62319eb7f0a] Process exited with code 0\n2025-07-20 16:39:13.708 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][2f446277-2caa-4f7b-94b3-9f72067f9677] socks connection closed\n2025-07-20 16:39:13.710 [info] [command][d7b541c2-092d-473c-8384-a62319eb7f0a] Socket close event received\n2025-07-20 16:39:13.742 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56129 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:40:13.713 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:40:13.716 [info] [command][149212aa-b87b-4077-ae9a-45054aee039f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""149212aa-b87b-4077-ae9a-45054aee039f""}\n2025-07-20 16:40:13.717 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][b351eff5-4811-4ffe-97eb-b5932902836e] received connection request\n2025-07-20 16:40:13.717 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:40:13.756 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b351eff5-4811-4ffe-97eb-b5932902836e] socks forwarding established\n2025-07-20 16:40:13.785 [info] [command][149212aa-b87b-4077-ae9a-45054aee039f] Process exited with code 0\n2025-07-20 16:40:13.785 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b351eff5-4811-4ffe-97eb-b5932902836e] socks connection closed\n2025-07-20 16:40:13.785 [info] [command][149212aa-b87b-4077-ae9a-45054aee039f] Socket close event received\n2025-07-20 16:40:13.809 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56177 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:41:13.790 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:41:13.793 [info] [command][a37da50f-9724-40ab-94b5-3de9ed98d0c5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""a37da50f-9724-40ab-94b5-3de9ed98d0c5""}\n2025-07-20 16:41:13.793 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][1c16914c-f53b-4748-b862-54e82a6e7dc2] received connection request\n2025-07-20 16:41:13.794 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:41:13.819 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1c16914c-f53b-4748-b862-54e82a6e7dc2] socks forwarding established\n2025-07-20 16:41:13.846 [info] [command][a37da50f-9724-40ab-94b5-3de9ed98d0c5] Process exited with code 0\n2025-07-20 16:41:13.846 [info] [command][a37da50f-9724-40ab-94b5-3de9ed98d0c5] Socket close event received\n2025-07-20 16:41:13.847 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1c16914c-f53b-4748-b862-54e82a6e7dc2] socks connection closed\n2025-07-20 16:41:13.871 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56217 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:42:13.847 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:42:13.849 [info] [command][8c19b34e-5ec4-4d87-9aa8-4445396fc37b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""8c19b34e-5ec4-4d87-9aa8-4445396fc37b""}\n2025-07-20 16:42:13.850 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][f273dea4-acdd-4274-834d-bb3d33a22a93] received connection request\n2025-07-20 16:42:13.850 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:42:13.874 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f273dea4-acdd-4274-834d-bb3d33a22a93] socks forwarding established\n2025-07-20 16:42:13.900 [info] [command][8c19b34e-5ec4-4d87-9aa8-4445396fc37b] Process exited with code 0\n2025-07-20 16:42:13.901 [info] [command][8c19b34e-5ec4-4d87-9aa8-4445396fc37b] Socket close event received\n2025-07-20 16:42:13.901 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f273dea4-acdd-4274-834d-bb3d33a22a93] socks connection closed\n2025-07-20 16:42:13.924 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56274 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:43:13.901 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:43:13.903 [info] [command][f03e7d64-260b-4e0e-9eb0-5472d35ab895] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f03e7d64-260b-4e0e-9eb0-5472d35ab895""}\n2025-07-20 16:43:13.904 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][f68596a5-44ea-47e4-8554-140b2a85feb7] received connection request\n2025-07-20 16:43:13.904 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:43:13.929 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f68596a5-44ea-47e4-8554-140b2a85feb7] socks forwarding established\n2025-07-20 16:43:13.959 [info] [command][f03e7d64-260b-4e0e-9eb0-5472d35ab895] Process exited with code 0\n2025-07-20 16:43:13.959 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f68596a5-44ea-47e4-8554-140b2a85feb7] socks connection closed\n2025-07-20 16:43:13.959 [info] [command][f03e7d64-260b-4e0e-9eb0-5472d35ab895] Socket close event received\n2025-07-20 16:43:13.988 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56341 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:44:13.963 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:44:13.965 [info] [command][74804a89-2faf-4c7e-9f6b-e27345d8fa51] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""74804a89-2faf-4c7e-9f6b-e27345d8fa51""}\n2025-07-20 16:44:13.966 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][1f430d42-7755-4d6e-b80d-f4813192ac26] received connection request\n2025-07-20 16:44:13.967 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:44:13.993 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1f430d42-7755-4d6e-b80d-f4813192ac26] socks forwarding established\n2025-07-20 16:44:14.020 [info] [command][74804a89-2faf-4c7e-9f6b-e27345d8fa51] Process exited with code 0\n2025-07-20 16:44:14.020 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1f430d42-7755-4d6e-b80d-f4813192ac26] socks connection closed\n2025-07-20 16:44:14.021 [info] [command][74804a89-2faf-4c7e-9f6b-e27345d8fa51] Socket close event received\n2025-07-20 16:44:14.046 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56385 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:45:14.021 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:45:14.022 [info] [command][27b6c7ff-cf5b-43f4-8a0a-7c0265e0c3f0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""27b6c7ff-cf5b-43f4-8a0a-7c0265e0c3f0""}\n2025-07-20 16:45:14.022 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][72c50ef5-f5d9-4b3d-8679-888ac73bdfca] received connection request\n2025-07-20 16:45:14.022 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:45:14.072 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][72c50ef5-f5d9-4b3d-8679-888ac73bdfca] socks forwarding established\n2025-07-20 16:45:14.128 [info] [command][27b6c7ff-cf5b-43f4-8a0a-7c0265e0c3f0] Process exited with code 0\n2025-07-20 16:45:14.128 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][72c50ef5-f5d9-4b3d-8679-888ac73bdfca] socks connection closed\n2025-07-20 16:45:14.129 [info] [command][27b6c7ff-cf5b-43f4-8a0a-7c0265e0c3f0] Socket close event received\n2025-07-20 16:45:14.152 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56418 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:46:14.130 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:46:14.130 [info] [command][c162b5d9-3dfa-4b51-a429-52d9cc1e73b9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""c162b5d9-3dfa-4b51-a429-52d9cc1e73b9""}\n2025-07-20 16:46:14.130 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][3223758e-cc36-45c3-9f95-f1d617d18e59] received connection request\n2025-07-20 16:46:14.130 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 16:46:14.130 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:46:14.161 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][3223758e-cc36-45c3-9f95-f1d617d18e59] socks forwarding established\n2025-07-20 16:46:14.193 [info] [command][c162b5d9-3dfa-4b51-a429-52d9cc1e73b9] Process exited with code 0\n2025-07-20 16:46:14.193 [info] [command][c162b5d9-3dfa-4b51-a429-52d9cc1e73b9] Socket close event received\n2025-07-20 16:46:14.218 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][3223758e-cc36-45c3-9f95-f1d617d18e59] socks connection closed\n2025-07-20 16:46:14.224 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56469 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:47:14.195 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:47:14.196 [info] [command][2d04055a-7581-455b-b6d9-d8e12643ef56] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""2d04055a-7581-455b-b6d9-d8e12643ef56""}\n2025-07-20 16:47:14.196 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][4149a505-9f95-4e24-b650-c4cc0ec5d434] received connection request\n2025-07-20 16:47:14.196 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 16:47:14.196 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:47:14.306 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4149a505-9f95-4e24-b650-c4cc0ec5d434] socks forwarding established\n2025-07-20 16:47:14.335 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4149a505-9f95-4e24-b650-c4cc0ec5d434] socks connection closed\n2025-07-20 16:47:14.335 [info] [command][2d04055a-7581-455b-b6d9-d8e12643ef56] Process exited with code 0\n2025-07-20 16:47:14.335 [info] [command][2d04055a-7581-455b-b6d9-d8e12643ef56] Socket close event received\n2025-07-20 16:47:14.360 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56528 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:48:14.336 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:48:14.337 [info] [command][19f8a753-c099-4c19-8e97-c0091c4ef731] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""19f8a753-c099-4c19-8e97-c0091c4ef731""}\n2025-07-20 16:48:14.337 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][75212b0a-79c9-4458-b87c-e6640993bb12] received connection request\n2025-07-20 16:48:14.337 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 16:48:14.337 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:48:14.364 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][75212b0a-79c9-4458-b87c-e6640993bb12] socks forwarding established\n2025-07-20 16:48:14.394 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][75212b0a-79c9-4458-b87c-e6640993bb12] socks connection closed\n2025-07-20 16:48:14.394 [info] [command][19f8a753-c099-4c19-8e97-c0091c4ef731] Process exited with code 0\n2025-07-20 16:48:14.394 [info] [command][19f8a753-c099-4c19-8e97-c0091c4ef731] Socket close event received\n2025-07-20 16:48:14.418 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56572 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:49:14.397 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:49:14.398 [info] [command][46c2e328-d70f-4280-990e-44a3bfdcc597] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""46c2e328-d70f-4280-990e-44a3bfdcc597""}\n2025-07-20 16:49:14.398 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][01c5d322-e69a-42c9-8583-75e6ede92e8f] received connection request\n2025-07-20 16:49:14.399 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:49:14.425 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][01c5d322-e69a-42c9-8583-75e6ede92e8f] socks forwarding established\n2025-07-20 16:49:14.455 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][01c5d322-e69a-42c9-8583-75e6ede92e8f] socks connection closed\n2025-07-20 16:49:14.456 [info] [command][46c2e328-d70f-4280-990e-44a3bfdcc597] Process exited with code 0\n2025-07-20 16:49:14.456 [info] [command][46c2e328-d70f-4280-990e-44a3bfdcc597] Socket close event received\n2025-07-20 16:49:14.556 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56613 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:50:14.456 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:50:14.457 [info] [command][4b210389-9d01-4628-a001-3e02e5d9bc2d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""4b210389-9d01-4628-a001-3e02e5d9bc2d""}\n2025-07-20 16:50:14.457 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][08914ed5-5356-418a-b550-a1ac29bf18ba] received connection request\n2025-07-20 16:50:14.457 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:50:14.482 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][08914ed5-5356-418a-b550-a1ac29bf18ba] socks forwarding established\n2025-07-20 16:50:14.521 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][08914ed5-5356-418a-b550-a1ac29bf18ba] socks connection closed\n2025-07-20 16:50:14.521 [info] [command][4b210389-9d01-4628-a001-3e02e5d9bc2d] Process exited with code 0\n2025-07-20 16:50:14.521 [info] [command][4b210389-9d01-4628-a001-3e02e5d9bc2d] Socket close event received\n2025-07-20 16:50:14.546 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56645 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:51:14.526 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:51:14.528 [info] [command][e1849c83-eacb-431a-b328-90e9ccc7851a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""e1849c83-eacb-431a-b328-90e9ccc7851a""}\n2025-07-20 16:51:14.528 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][1adb45b9-667f-4488-b727-b01ac95d9f57] received connection request\n2025-07-20 16:51:14.528 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:51:14.552 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1adb45b9-667f-4488-b727-b01ac95d9f57] socks forwarding established\n2025-07-20 16:51:14.580 [info] [command][e1849c83-eacb-431a-b328-90e9ccc7851a] Process exited with code 0\n2025-07-20 16:51:14.580 [info] [command][e1849c83-eacb-431a-b328-90e9ccc7851a] Socket close event received\n2025-07-20 16:51:14.581 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][1adb45b9-667f-4488-b727-b01ac95d9f57] socks connection closed\n2025-07-20 16:51:14.608 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56701 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:52:14.580 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:52:14.581 [info] [command][4d1311a4-8854-48bc-9594-e2a0a6f1a147] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""4d1311a4-8854-48bc-9594-e2a0a6f1a147""}\n2025-07-20 16:52:14.581 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][cd5afa99-efd4-4ff3-bf4c-b9f70b6b7b20] received connection request\n2025-07-20 16:52:14.582 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:52:14.607 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][cd5afa99-efd4-4ff3-bf4c-b9f70b6b7b20] socks forwarding established\n2025-07-20 16:52:14.638 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][cd5afa99-efd4-4ff3-bf4c-b9f70b6b7b20] socks connection closed\n2025-07-20 16:52:14.638 [info] [command][4d1311a4-8854-48bc-9594-e2a0a6f1a147] Process exited with code 0\n2025-07-20 16:52:14.639 [info] [command][4d1311a4-8854-48bc-9594-e2a0a6f1a147] Socket close event received\n2025-07-20 16:52:14.688 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56775 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:53:14.639 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:53:14.641 [info] [command][43ce2b76-803d-448b-bc7e-7fa2827fb36c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""43ce2b76-803d-448b-bc7e-7fa2827fb36c""}\n2025-07-20 16:53:14.641 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][d9266f2f-f3f8-441e-b277-3c3a1f99c3a3] received connection request\n2025-07-20 16:53:14.641 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 16:53:14.641 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:53:14.666 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][d9266f2f-f3f8-441e-b277-3c3a1f99c3a3] socks forwarding established\n2025-07-20 16:53:14.710 [info] [command][43ce2b76-803d-448b-bc7e-7fa2827fb36c] Process exited with code 0\n2025-07-20 16:53:14.710 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][d9266f2f-f3f8-441e-b277-3c3a1f99c3a3] socks connection closed\n2025-07-20 16:53:14.710 [info] [command][43ce2b76-803d-448b-bc7e-7fa2827fb36c] Socket close event received\n2025-07-20 16:53:14.735 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56830 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:54:14.714 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:54:14.715 [info] [command][69fa2249-b69e-4f25-97fd-049a3ccd3d22] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""69fa2249-b69e-4f25-97fd-049a3ccd3d22""}\n2025-07-20 16:54:14.715 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][aea7dcc1-8861-44f4-8b0a-68a2f017c527] received connection request\n2025-07-20 16:54:14.716 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:54:14.749 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][aea7dcc1-8861-44f4-8b0a-68a2f017c527] socks forwarding established\n2025-07-20 16:54:14.778 [info] [command][69fa2249-b69e-4f25-97fd-049a3ccd3d22] Process exited with code 0\n2025-07-20 16:54:14.778 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][aea7dcc1-8861-44f4-8b0a-68a2f017c527] socks connection closed\n2025-07-20 16:54:14.778 [info] [command][69fa2249-b69e-4f25-97fd-049a3ccd3d22] Socket close event received\n2025-07-20 16:54:14.802 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56871 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:55:14.779 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:55:14.780 [info] [command][c6ae4da6-39b5-4778-9aeb-a954d432e2df] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""c6ae4da6-39b5-4778-9aeb-a954d432e2df""}\n2025-07-20 16:55:14.781 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][213895ab-61e3-4469-b833-d0a2e6b871e3] received connection request\n2025-07-20 16:55:14.781 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:55:14.806 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][213895ab-61e3-4469-b833-d0a2e6b871e3] socks forwarding established\n2025-07-20 16:55:14.833 [info] [command][c6ae4da6-39b5-4778-9aeb-a954d432e2df] Process exited with code 0\n2025-07-20 16:55:14.833 [info] [command][c6ae4da6-39b5-4778-9aeb-a954d432e2df] Socket close event received\n2025-07-20 16:55:14.834 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][213895ab-61e3-4469-b833-d0a2e6b871e3] socks connection closed\n2025-07-20 16:55:14.858 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56920 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:56:14.836 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:56:14.837 [info] [command][9f1c6a45-87dd-4da8-aec0-bda1d5188522] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""9f1c6a45-87dd-4da8-aec0-bda1d5188522""}\n2025-07-20 16:56:14.838 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][83d9db1a-2a45-410b-8ddc-7cf94e97631e] received connection request\n2025-07-20 16:56:14.838 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:56:14.957 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][83d9db1a-2a45-410b-8ddc-7cf94e97631e] socks forwarding established\n2025-07-20 16:56:14.990 [info] [command][9f1c6a45-87dd-4da8-aec0-bda1d5188522] Process exited with code 0\n2025-07-20 16:56:14.990 [info] [command][9f1c6a45-87dd-4da8-aec0-bda1d5188522] Socket close event received\n2025-07-20 16:56:14.991 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][83d9db1a-2a45-410b-8ddc-7cf94e97631e] socks connection closed\n2025-07-20 16:56:15.020 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 56984 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:57:14.994 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:57:14.996 [info] [command][0165d3f2-b2c2-4e50-a087-7f5bdfd18a95] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""0165d3f2-b2c2-4e50-a087-7f5bdfd18a95""}\n2025-07-20 16:57:14.997 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][0b09a96e-a5f7-48bc-a567-d695b3f6d5c0] received connection request\n2025-07-20 16:57:14.998 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:57:15.022 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][0b09a96e-a5f7-48bc-a567-d695b3f6d5c0] socks forwarding established\n2025-07-20 16:57:15.049 [info] [command][0165d3f2-b2c2-4e50-a087-7f5bdfd18a95] Process exited with code 0\n2025-07-20 16:57:15.049 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][0b09a96e-a5f7-48bc-a567-d695b3f6d5c0] socks connection closed\n2025-07-20 16:57:15.049 [info] [command][0165d3f2-b2c2-4e50-a087-7f5bdfd18a95] Socket close event received\n2025-07-20 16:57:15.073 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57053 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:58:15.055 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:58:15.056 [info] [command][87147407-9233-4d59-a4c6-ecb8bdcde081] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""87147407-9233-4d59-a4c6-ecb8bdcde081""}\n2025-07-20 16:58:15.057 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][17f63416-cb07-48f4-aa96-58977930e055] received connection request\n2025-07-20 16:58:15.057 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:58:15.090 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][17f63416-cb07-48f4-aa96-58977930e055] socks forwarding established\n2025-07-20 16:58:15.118 [info] [command][87147407-9233-4d59-a4c6-ecb8bdcde081] Process exited with code 0\n2025-07-20 16:58:15.119 [info] [command][87147407-9233-4d59-a4c6-ecb8bdcde081] Socket close event received\n2025-07-20 16:58:15.119 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][17f63416-cb07-48f4-aa96-58977930e055] socks connection closed\n2025-07-20 16:58:15.142 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57110 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 16:59:15.124 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 16:59:15.126 [info] [command][f1d0af75-d4ee-4170-8ce3-c58917de04d1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f1d0af75-d4ee-4170-8ce3-c58917de04d1""}\n2025-07-20 16:59:15.127 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][95dd3fcf-4ce6-4660-9bbf-b21b32302b59] received connection request\n2025-07-20 16:59:15.128 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 16:59:15.152 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][95dd3fcf-4ce6-4660-9bbf-b21b32302b59] socks forwarding established\n2025-07-20 16:59:15.182 [info] [command][f1d0af75-d4ee-4170-8ce3-c58917de04d1] Process exited with code 0\n2025-07-20 16:59:15.183 [info] [command][f1d0af75-d4ee-4170-8ce3-c58917de04d1] Socket close event received\n2025-07-20 16:59:15.183 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][95dd3fcf-4ce6-4660-9bbf-b21b32302b59] socks connection closed\n2025-07-20 16:59:15.207 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57143 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:00:15.185 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:00:15.187 [info] [command][acfdbad1-2dca-4eeb-beb8-f78661a6987d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""acfdbad1-2dca-4eeb-beb8-f78661a6987d""}\n2025-07-20 17:00:15.188 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][7ff5f30c-22cf-4ef3-a5d4-40493ba468c9] received connection request\n2025-07-20 17:00:15.188 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:00:15.213 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7ff5f30c-22cf-4ef3-a5d4-40493ba468c9] socks forwarding established\n2025-07-20 17:00:15.243 [info] [command][acfdbad1-2dca-4eeb-beb8-f78661a6987d] Process exited with code 0\n2025-07-20 17:00:15.243 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7ff5f30c-22cf-4ef3-a5d4-40493ba468c9] socks connection closed\n2025-07-20 17:00:15.243 [info] [command][acfdbad1-2dca-4eeb-beb8-f78661a6987d] Socket close event received\n2025-07-20 17:00:15.268 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57183 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:01:15.244 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:01:15.246 [info] [command][3c6eb969-447e-4456-9570-054b0431dcc2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""3c6eb969-447e-4456-9570-054b0431dcc2""}\n2025-07-20 17:01:15.247 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][71831c45-9aae-4721-a69c-06089a89eb79] received connection request\n2025-07-20 17:01:15.248 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:01:15.344 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][71831c45-9aae-4721-a69c-06089a89eb79] socks forwarding established\n2025-07-20 17:01:15.374 [info] [command][3c6eb969-447e-4456-9570-054b0431dcc2] Process exited with code 0\n2025-07-20 17:01:15.375 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][71831c45-9aae-4721-a69c-06089a89eb79] socks connection closed\n2025-07-20 17:01:15.375 [info] [command][3c6eb969-447e-4456-9570-054b0431dcc2] Socket close event received\n2025-07-20 17:01:15.399 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57225 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:02:15.378 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:02:15.381 [info] [command][30302e6d-8130-4c77-acfe-06552cf394aa] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""30302e6d-8130-4c77-acfe-06552cf394aa""}\n2025-07-20 17:02:15.382 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][4e5d1d07-1a8b-48b6-a1c9-dcc94b9f5b7e] received connection request\n2025-07-20 17:02:15.383 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:02:15.464 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4e5d1d07-1a8b-48b6-a1c9-dcc94b9f5b7e] socks forwarding established\n2025-07-20 17:02:15.491 [info] [command][30302e6d-8130-4c77-acfe-06552cf394aa] Process exited with code 0\n2025-07-20 17:02:15.491 [info] [command][30302e6d-8130-4c77-acfe-06552cf394aa] Socket close event received\n2025-07-20 17:02:15.492 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][4e5d1d07-1a8b-48b6-a1c9-dcc94b9f5b7e] socks connection closed\n2025-07-20 17:02:15.516 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57301 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:03:15.496 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:03:15.498 [info] [command][c6a553e0-9496-4720-8e6f-6d5c375dfc2a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""c6a553e0-9496-4720-8e6f-6d5c375dfc2a""}\n2025-07-20 17:03:15.500 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][f0ec5587-88ac-460d-83a0-54b78bbdbce3] received connection request\n2025-07-20 17:03:15.501 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:03:15.528 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f0ec5587-88ac-460d-83a0-54b78bbdbce3] socks forwarding established\n2025-07-20 17:03:15.582 [info] [command][c6a553e0-9496-4720-8e6f-6d5c375dfc2a] Process exited with code 0\n2025-07-20 17:03:15.583 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f0ec5587-88ac-460d-83a0-54b78bbdbce3] socks connection closed\n2025-07-20 17:03:15.583 [info] [command][c6a553e0-9496-4720-8e6f-6d5c375dfc2a] Socket close event received\n2025-07-20 17:03:15.608 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57344 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:04:15.590 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:04:15.591 [info] [command][1a311bee-2e94-4514-9e46-98c01329166e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""1a311bee-2e94-4514-9e46-98c01329166e""}\n2025-07-20 17:04:15.592 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][f332d4d6-f0de-42cb-96f7-fb3330f0a7df] received connection request\n2025-07-20 17:04:15.593 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:04:15.621 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f332d4d6-f0de-42cb-96f7-fb3330f0a7df] socks forwarding established\n2025-07-20 17:04:15.650 [info] [command][1a311bee-2e94-4514-9e46-98c01329166e] Process exited with code 0\n2025-07-20 17:04:15.650 [info] [command][1a311bee-2e94-4514-9e46-98c01329166e] Socket close event received\n2025-07-20 17:04:15.674 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57375 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:04:15.674 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][f332d4d6-f0de-42cb-96f7-fb3330f0a7df] socks connection closed\n2025-07-20 17:05:15.658 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:05:15.659 [info] [command][30576d83-65ec-449b-8905-b254364fbcc5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""30576d83-65ec-449b-8905-b254364fbcc5""}\n2025-07-20 17:05:15.660 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][b2b5c541-c028-4c31-a5de-f59a9402ccb8] received connection request\n2025-07-20 17:05:15.661 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:05:15.686 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b2b5c541-c028-4c31-a5de-f59a9402ccb8] socks forwarding established\n2025-07-20 17:05:15.715 [info] [command][30576d83-65ec-449b-8905-b254364fbcc5] Process exited with code 0\n2025-07-20 17:05:15.715 [info] [command][30576d83-65ec-449b-8905-b254364fbcc5] Socket close event received\n2025-07-20 17:05:15.737 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][b2b5c541-c028-4c31-a5de-f59a9402ccb8] socks connection closed\n2025-07-20 17:05:15.739 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57406 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:06:15.717 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:06:15.719 [info] [command][1b691979-fed6-4c42-bd15-1300d907505a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""1b691979-fed6-4c42-bd15-1300d907505a""}\n2025-07-20 17:06:15.720 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][df653828-5327-4765-bc9b-441e81650020] received connection request\n2025-07-20 17:06:15.720 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:06:15.788 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][df653828-5327-4765-bc9b-441e81650020] socks forwarding established\n2025-07-20 17:06:15.818 [info] [command][1b691979-fed6-4c42-bd15-1300d907505a] Process exited with code 0\n2025-07-20 17:06:15.819 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][df653828-5327-4765-bc9b-441e81650020] socks connection closed\n2025-07-20 17:06:15.819 [info] [command][1b691979-fed6-4c42-bd15-1300d907505a] Socket close event received\n2025-07-20 17:06:15.846 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57459 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:07:15.822 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:07:15.824 [info] [command][731766eb-8a42-497a-ac9a-2c5f5bbbf6a1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""731766eb-8a42-497a-ac9a-2c5f5bbbf6a1""}\n2025-07-20 17:07:15.825 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][6208e557-d47d-417c-8c0c-1fde2b4148c6] received connection request\n2025-07-20 17:07:15.826 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:07:15.853 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6208e557-d47d-417c-8c0c-1fde2b4148c6] socks forwarding established\n2025-07-20 17:07:15.879 [info] [command][731766eb-8a42-497a-ac9a-2c5f5bbbf6a1] Process exited with code 0\n2025-07-20 17:07:15.880 [info] [command][731766eb-8a42-497a-ac9a-2c5f5bbbf6a1] Socket close event received\n2025-07-20 17:07:15.880 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6208e557-d47d-417c-8c0c-1fde2b4148c6] socks connection closed\n2025-07-20 17:07:15.904 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57518 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:08:15.883 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:08:15.883 [info] [command][42eaf288-c684-49a1-9223-ffce356018df] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""42eaf288-c684-49a1-9223-ffce356018df""}\n2025-07-20 17:08:15.883 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][960a4689-d6fe-497a-b3ef-4812b21c92d5] received connection request\n2025-07-20 17:08:15.884 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:08:15.908 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][960a4689-d6fe-497a-b3ef-4812b21c92d5] socks forwarding established\n2025-07-20 17:08:15.934 [info] [command][42eaf288-c684-49a1-9223-ffce356018df] Process exited with code 0\n2025-07-20 17:08:15.934 [info] [command][42eaf288-c684-49a1-9223-ffce356018df] Socket close event received\n2025-07-20 17:08:15.936 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][960a4689-d6fe-497a-b3ef-4812b21c92d5] socks connection closed\n2025-07-20 17:08:15.959 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57559 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:09:15.935 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:09:15.936 [info] [command][ca1b0b1e-96f6-4e25-9ebc-8a78f36b57c1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""ca1b0b1e-96f6-4e25-9ebc-8a78f36b57c1""}\n2025-07-20 17:09:15.936 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][8f3d9624-13b6-4335-84ae-5f7f06d6c30b] received connection request\n2025-07-20 17:09:15.936 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:09:15.965 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8f3d9624-13b6-4335-84ae-5f7f06d6c30b] socks forwarding established\n2025-07-20 17:09:15.995 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][8f3d9624-13b6-4335-84ae-5f7f06d6c30b] socks connection closed\n2025-07-20 17:09:15.995 [info] [command][ca1b0b1e-96f6-4e25-9ebc-8a78f36b57c1] Process exited with code 0\n2025-07-20 17:09:15.995 [info] [command][ca1b0b1e-96f6-4e25-9ebc-8a78f36b57c1] Socket close event received\n2025-07-20 17:09:16.019 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57627 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:10:16.000 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:10:16.001 [info] [command][95fe3571-c769-4a07-90af-ca6893cafc52] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""95fe3571-c769-4a07-90af-ca6893cafc52""}\n2025-07-20 17:10:16.002 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][9d464fe5-e677-4223-9f1e-1a99814d0e85] received connection request\n2025-07-20 17:10:16.003 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:10:16.029 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][9d464fe5-e677-4223-9f1e-1a99814d0e85] socks forwarding established\n2025-07-20 17:10:16.058 [info] [command][95fe3571-c769-4a07-90af-ca6893cafc52] Process exited with code 0\n2025-07-20 17:10:16.058 [info] [command][95fe3571-c769-4a07-90af-ca6893cafc52] Socket close event received\n2025-07-20 17:10:16.059 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][9d464fe5-e677-4223-9f1e-1a99814d0e85] socks connection closed\n2025-07-20 17:10:16.084 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57665 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:11:16.063 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:11:16.066 [info] [command][f65a291b-d334-4be1-8180-5293aa2aa0c4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f65a291b-d334-4be1-8180-5293aa2aa0c4""}\n2025-07-20 17:11:16.066 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][6755ff2a-f6d2-4d09-b7fc-c965f553fcdc] received connection request\n2025-07-20 17:11:16.067 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:11:16.092 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6755ff2a-f6d2-4d09-b7fc-c965f553fcdc] socks forwarding established\n2025-07-20 17:11:16.123 [info] [command][f65a291b-d334-4be1-8180-5293aa2aa0c4] Process exited with code 0\n2025-07-20 17:11:16.124 [info] [command][f65a291b-d334-4be1-8180-5293aa2aa0c4] Socket close event received\n2025-07-20 17:11:16.124 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6755ff2a-f6d2-4d09-b7fc-c965f553fcdc] socks connection closed\n2025-07-20 17:11:16.151 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57739 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:12:16.128 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:12:16.129 [info] [command][c3e9affb-57e8-4a43-97b8-d44ca4edbec5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""c3e9affb-57e8-4a43-97b8-d44ca4edbec5""}\n2025-07-20 17:12:16.129 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][7a04b222-4a3d-4200-b694-d54bb201e3a2] received connection request\n2025-07-20 17:12:16.129 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\n\n2025-07-20 17:12:16.129 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:12:16.159 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7a04b222-4a3d-4200-b694-d54bb201e3a2] socks forwarding established\n2025-07-20 17:12:16.191 [info] [command][c3e9affb-57e8-4a43-97b8-d44ca4edbec5] Process exited with code 0\n2025-07-20 17:12:16.191 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7a04b222-4a3d-4200-b694-d54bb201e3a2] socks connection closed\n2025-07-20 17:12:16.191 [info] [command][c3e9affb-57e8-4a43-97b8-d44ca4edbec5] Socket close event received\n2025-07-20 17:12:16.240 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57815 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:13:16.196 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:13:16.199 [info] [command][e30e42e1-e34b-4cd9-88ea-f370fc9b1195] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""e30e42e1-e34b-4cd9-88ea-f370fc9b1195""}\n2025-07-20 17:13:16.199 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][dade329a-8006-4619-a65a-78a24dc50bb6] received connection request\n2025-07-20 17:13:16.200 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:13:16.298 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dade329a-8006-4619-a65a-78a24dc50bb6] socks forwarding established\n2025-07-20 17:13:16.352 [info] [command][e30e42e1-e34b-4cd9-88ea-f370fc9b1195] Process exited with code 0\n2025-07-20 17:13:16.352 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dade329a-8006-4619-a65a-78a24dc50bb6] socks connection closed\n2025-07-20 17:13:16.352 [info] [command][e30e42e1-e34b-4cd9-88ea-f370fc9b1195] Socket close event received\n2025-07-20 17:13:16.377 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57895 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:14:16.354 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:14:16.356 [info] [command][7ea8aee6-550b-43de-8f14-f8d2c76e0331] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""7ea8aee6-550b-43de-8f14-f8d2c76e0331""}\n2025-07-20 17:14:16.356 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][5a391cca-7469-4757-a0af-43c523dc5240] received connection request\n2025-07-20 17:14:16.356 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:14:16.381 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][5a391cca-7469-4757-a0af-43c523dc5240] socks forwarding established\n2025-07-20 17:14:16.408 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][5a391cca-7469-4757-a0af-43c523dc5240] socks connection closed\n2025-07-20 17:14:16.408 [info] [command][7ea8aee6-550b-43de-8f14-f8d2c76e0331] Process exited with code 0\n2025-07-20 17:14:16.409 [info] [command][7ea8aee6-550b-43de-8f14-f8d2c76e0331] Socket close event received\n2025-07-20 17:14:16.433 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 57968 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:15:16.412 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:15:16.415 [info] [command][496dd24b-355e-4785-aca1-a2a3a75f1e4d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""496dd24b-355e-4785-aca1-a2a3a75f1e4d""}\n2025-07-20 17:15:16.415 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][dee6186d-5b99-4785-a4d0-7df65723beac] received connection request\n2025-07-20 17:15:16.416 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:15:16.441 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dee6186d-5b99-4785-a4d0-7df65723beac] socks forwarding established\n2025-07-20 17:15:16.468 [info] [command][496dd24b-355e-4785-aca1-a2a3a75f1e4d] Process exited with code 0\n2025-07-20 17:15:16.469 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][dee6186d-5b99-4785-a4d0-7df65723beac] socks connection closed\n2025-07-20 17:15:16.469 [info] [command][496dd24b-355e-4785-aca1-a2a3a75f1e4d] Socket close event received\n2025-07-20 17:15:16.492 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58013 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:16:16.470 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:16:16.471 [info] [command][a0978791-2454-46ca-9dfb-9eba79fc8a78] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""a0978791-2454-46ca-9dfb-9eba79fc8a78""}\n2025-07-20 17:16:16.472 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][bd099cb4-bdec-4457-93a7-42ced402a849] received connection request\n2025-07-20 17:16:16.472 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:16:16.496 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][bd099cb4-bdec-4457-93a7-42ced402a849] socks forwarding established\n2025-07-20 17:16:16.526 [info] [command][a0978791-2454-46ca-9dfb-9eba79fc8a78] Process exited with code 0\n2025-07-20 17:16:16.527 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][bd099cb4-bdec-4457-93a7-42ced402a849] socks connection closed\n2025-07-20 17:16:16.527 [info] [command][a0978791-2454-46ca-9dfb-9eba79fc8a78] Socket close event received\n2025-07-20 17:16:16.648 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58067 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:17:16.528 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:17:16.529 [info] [command][f7e7775c-c77c-4b4c-be17-314be8f1e2ba] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f7e7775c-c77c-4b4c-be17-314be8f1e2ba""}\n2025-07-20 17:17:16.530 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][5c33650a-0f02-4f53-9a51-0c0151136fc4] received connection request\n2025-07-20 17:17:16.530 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:17:16.556 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][5c33650a-0f02-4f53-9a51-0c0151136fc4] socks forwarding established\n2025-07-20 17:17:16.586 [info] [command][f7e7775c-c77c-4b4c-be17-314be8f1e2ba] Process exited with code 0\n2025-07-20 17:17:16.587 [info] [command][f7e7775c-c77c-4b4c-be17-314be8f1e2ba] Socket close event received\n2025-07-20 17:17:16.606 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][5c33650a-0f02-4f53-9a51-0c0151136fc4] socks connection closed\n2025-07-20 17:17:16.611 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58153 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:18:16.592 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:18:16.594 [info] [command][f97c9948-ebce-4ca8-9d17-cdbed7b6a6b2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f97c9948-ebce-4ca8-9d17-cdbed7b6a6b2""}\n2025-07-20 17:18:16.595 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][bca70358-8024-413a-be40-4e56644e33f7] received connection request\n2025-07-20 17:18:16.595 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:18:17.163 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][bca70358-8024-413a-be40-4e56644e33f7] socks forwarding established\n2025-07-20 17:18:17.202 [info] [command][f97c9948-ebce-4ca8-9d17-cdbed7b6a6b2] Process exited with code 0\n2025-07-20 17:18:17.203 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][bca70358-8024-413a-be40-4e56644e33f7] socks connection closed\n2025-07-20 17:18:17.203 [info] [command][f97c9948-ebce-4ca8-9d17-cdbed7b6a6b2] Socket close event received\n2025-07-20 17:18:17.270 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58195 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:19:17.207 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:19:17.210 [info] [command][1b1fcca1-9f16-40ba-8b92-02aa23de4bd7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""1b1fcca1-9f16-40ba-8b92-02aa23de4bd7""}\n2025-07-20 17:19:17.211 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][23f1ba83-f4de-4c86-be90-fd285105ade9] received connection request\n2025-07-20 17:19:17.211 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:19:17.316 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][23f1ba83-f4de-4c86-be90-fd285105ade9] socks forwarding established\n2025-07-20 17:19:17.346 [info] [command][1b1fcca1-9f16-40ba-8b92-02aa23de4bd7] Process exited with code 0\n2025-07-20 17:19:17.347 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][23f1ba83-f4de-4c86-be90-fd285105ade9] socks connection closed\n2025-07-20 17:19:17.347 [info] [command][1b1fcca1-9f16-40ba-8b92-02aa23de4bd7] Socket close event received\n2025-07-20 17:19:17.375 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58231 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:20:17.349 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:20:17.353 [info] [command][cca43128-5427-4c83-8654-8254b49a7fe3] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""cca43128-5427-4c83-8654-8254b49a7fe3""}\n2025-07-20 17:20:17.354 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][6a4430c4-1968-4847-a68a-b11278e2b177] received connection request\n2025-07-20 17:20:17.354 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:20:17.381 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6a4430c4-1968-4847-a68a-b11278e2b177] socks forwarding established\n2025-07-20 17:20:17.409 [info] [command][cca43128-5427-4c83-8654-8254b49a7fe3] Process exited with code 0\n2025-07-20 17:20:17.410 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][6a4430c4-1968-4847-a68a-b11278e2b177] socks connection closed\n2025-07-20 17:20:17.410 [info] [command][cca43128-5427-4c83-8654-8254b49a7fe3] Socket close event received\n2025-07-20 17:20:17.435 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58278 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:21:17.414 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:21:17.416 [info] [command][ca299cd2-6a25-4343-b35d-113ee4c8b400] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""ca299cd2-6a25-4343-b35d-113ee4c8b400""}\n2025-07-20 17:21:17.416 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][ea79e34e-85ce-4d4d-ae9e-8c983858fda9] received connection request\n2025-07-20 17:21:17.417 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:21:17.447 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ea79e34e-85ce-4d4d-ae9e-8c983858fda9] socks forwarding established\n2025-07-20 17:21:17.479 [info] [command][ca299cd2-6a25-4343-b35d-113ee4c8b400] Process exited with code 0\n2025-07-20 17:21:17.479 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][ea79e34e-85ce-4d4d-ae9e-8c983858fda9] socks connection closed\n2025-07-20 17:21:17.479 [info] [command][ca299cd2-6a25-4343-b35d-113ee4c8b400] Socket close event received\n2025-07-20 17:21:17.506 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58333 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:22:17.484 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:22:17.486 [info] [command][8bccf506-6bf9-4a49-a840-87493d9c1fbf] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""8bccf506-6bf9-4a49-a840-87493d9c1fbf""}\n2025-07-20 17:22:17.487 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][7d64adee-47f6-4107-a253-35f56ec98fe3] received connection request\n2025-07-20 17:22:17.487 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:22:17.512 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7d64adee-47f6-4107-a253-35f56ec98fe3] socks forwarding established\n2025-07-20 17:22:17.543 [info] [command][8bccf506-6bf9-4a49-a840-87493d9c1fbf] Process exited with code 0\n2025-07-20 17:22:17.544 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][7d64adee-47f6-4107-a253-35f56ec98fe3] socks connection closed\n2025-07-20 17:22:17.544 [info] [command][8bccf506-6bf9-4a49-a840-87493d9c1fbf] Socket close event received\n2025-07-20 17:22:17.570 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58392 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:23:17.547 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:23:17.549 [info] [command][f69d6be8-852f-49fb-9be6-3cf4ae9920bd] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f69d6be8-852f-49fb-9be6-3cf4ae9920bd""}\n2025-07-20 17:23:17.550 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][08c86933-2a00-4623-8fc5-ecebeb50bd2a] received connection request\n2025-07-20 17:23:17.551 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:23:17.579 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][08c86933-2a00-4623-8fc5-ecebeb50bd2a] socks forwarding established\n2025-07-20 17:23:17.610 [info] [command][f69d6be8-852f-49fb-9be6-3cf4ae9920bd] Process exited with code 0\n2025-07-20 17:23:17.610 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][08c86933-2a00-4623-8fc5-ecebeb50bd2a] socks connection closed\n2025-07-20 17:23:17.611 [info] [command][f69d6be8-852f-49fb-9be6-3cf4ae9920bd] Socket close event received\n2025-07-20 17:23:17.635 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58428 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:24:17.615 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:24:17.617 [info] [command][dd97008f-51f6-485f-9a7b-b48664f1ea6c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""dd97008f-51f6-485f-9a7b-b48664f1ea6c""}\n2025-07-20 17:24:17.618 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][0d69ebcd-197a-489c-81c0-472ff4f08be5] received connection request\n2025-07-20 17:24:17.618 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:24:17.642 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][0d69ebcd-197a-489c-81c0-472ff4f08be5] socks forwarding established\n2025-07-20 17:24:17.669 [info] [command][dd97008f-51f6-485f-9a7b-b48664f1ea6c] Process exited with code 0\n2025-07-20 17:24:17.669 [info] [command][dd97008f-51f6-485f-9a7b-b48664f1ea6c] Socket close event received\n2025-07-20 17:24:17.670 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][0d69ebcd-197a-489c-81c0-472ff4f08be5] socks connection closed\n2025-07-20 17:24:17.699 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58463 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:25:17.674 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:25:17.676 [info] [command][7e303d22-1aea-486c-b2ff-277f94011a54] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""7e303d22-1aea-486c-b2ff-277f94011a54""}\n2025-07-20 17:25:17.677 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][60d70878-e4bb-46e5-9848-de9225c01a14] received connection request\n2025-07-20 17:25:17.677 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:25:17.706 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][60d70878-e4bb-46e5-9848-de9225c01a14] socks forwarding established\n2025-07-20 17:25:17.737 [info] [command][7e303d22-1aea-486c-b2ff-277f94011a54] Process exited with code 0\n2025-07-20 17:25:17.737 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][60d70878-e4bb-46e5-9848-de9225c01a14] socks connection closed\n2025-07-20 17:25:17.737 [info] [command][7e303d22-1aea-486c-b2ff-277f94011a54] Socket close event received\n2025-07-20 17:25:17.761 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58491 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:26:17.742 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:26:17.744 [info] [command][d834942b-1fcd-4c2a-9075-bf2e17ca1ea6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""d834942b-1fcd-4c2a-9075-bf2e17ca1ea6""}\n2025-07-20 17:26:17.745 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][de0b34c1-ef2c-4841-b7c7-b183daa48c8d] received connection request\n2025-07-20 17:26:17.746 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:26:17.774 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][de0b34c1-ef2c-4841-b7c7-b183daa48c8d] socks forwarding established\n2025-07-20 17:26:17.802 [info] [command][d834942b-1fcd-4c2a-9075-bf2e17ca1ea6] Process exited with code 0\n2025-07-20 17:26:17.802 [info] [command][d834942b-1fcd-4c2a-9075-bf2e17ca1ea6] Socket close event received\n2025-07-20 17:26:17.803 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][de0b34c1-ef2c-4841-b7c7-b183daa48c8d] socks connection closed\n2025-07-20 17:26:17.826 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58541 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:27:17.808 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:27:17.809 [info] [command][92ae035a-7caf-4cbc-8a32-6dfa26a501ed] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""92ae035a-7caf-4cbc-8a32-6dfa26a501ed""}\n2025-07-20 17:27:17.810 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][94eadb1d-6c8a-47b0-bc8c-f6172fd67105] received connection request\n2025-07-20 17:27:17.810 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:27:17.834 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][94eadb1d-6c8a-47b0-bc8c-f6172fd67105] socks forwarding established\n2025-07-20 17:27:17.864 [info] [command][92ae035a-7caf-4cbc-8a32-6dfa26a501ed] Process exited with code 0\n2025-07-20 17:27:17.865 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][94eadb1d-6c8a-47b0-bc8c-f6172fd67105] socks connection closed\n2025-07-20 17:27:17.865 [info] [command][92ae035a-7caf-4cbc-8a32-6dfa26a501ed] Socket close event received\n2025-07-20 17:27:17.888 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58605 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:28:17.868 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:28:17.869 [info] [command][59f085ba-7016-4e18-9ea0-8647e9cf4684] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""59f085ba-7016-4e18-9ea0-8647e9cf4684""}\n2025-07-20 17:28:17.870 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][918baf50-0aeb-456b-b43c-d636ef264b1a] received connection request\n2025-07-20 17:28:17.870 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:28:17.897 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][918baf50-0aeb-456b-b43c-d636ef264b1a] socks forwarding established\n2025-07-20 17:28:17.923 [info] [command][59f085ba-7016-4e18-9ea0-8647e9cf4684] Process exited with code 0\n2025-07-20 17:28:17.923 [info] [command][59f085ba-7016-4e18-9ea0-8647e9cf4684] Socket close event received\n2025-07-20 17:28:17.924 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][918baf50-0aeb-456b-b43c-d636ef264b1a] socks connection closed\n2025-07-20 17:28:17.947 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58635 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:29:17.929 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:29:17.931 [info] [command][e7f59cd5-2ce0-4d24-b8e0-18bb468e7ed8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""e7f59cd5-2ce0-4d24-b8e0-18bb468e7ed8""}\n2025-07-20 17:29:17.932 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][419cc8ad-2dff-419b-88e2-4cdb017bb043] received connection request\n2025-07-20 17:29:17.932 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:29:17.960 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][419cc8ad-2dff-419b-88e2-4cdb017bb043] socks forwarding established\n2025-07-20 17:29:17.990 [info] [command][e7f59cd5-2ce0-4d24-b8e0-18bb468e7ed8] Process exited with code 0\n2025-07-20 17:29:17.990 [info] [command][e7f59cd5-2ce0-4d24-b8e0-18bb468e7ed8] Socket close event received\n2025-07-20 17:29:17.991 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][419cc8ad-2dff-419b-88e2-4cdb017bb043] socks connection closed\n2025-07-20 17:29:18.015 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58667 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:30:17.991 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:30:17.994 [info] [command][a3a444f5-da82-40a1-8925-69e882225ba2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""a3a444f5-da82-40a1-8925-69e882225ba2""}\n2025-07-20 17:30:17.994 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][5582c371-0f76-4d53-9f94-3823bc262b74] received connection request\n2025-07-20 17:30:17.995 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:30:18.020 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][5582c371-0f76-4d53-9f94-3823bc262b74] socks forwarding established\n2025-07-20 17:30:18.050 [info] [command][a3a444f5-da82-40a1-8925-69e882225ba2] Process exited with code 0\n2025-07-20 17:30:18.050 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][5582c371-0f76-4d53-9f94-3823bc262b74] socks connection closed\n2025-07-20 17:30:18.051 [info] [command][a3a444f5-da82-40a1-8925-69e882225ba2] Socket close event received\n2025-07-20 17:30:18.079 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58711 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:31:18.051 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:31:18.053 [info] [command][f31ca900-8b4e-4897-ba34-d10c558ed1e6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f31ca900-8b4e-4897-ba34-d10c558ed1e6""}\n2025-07-20 17:31:18.054 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][31d2090d-7548-449b-b579-c93fd6f23d7f] received connection request\n2025-07-20 17:31:18.054 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:31:18.079 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][31d2090d-7548-449b-b579-c93fd6f23d7f] socks forwarding established\n2025-07-20 17:31:18.110 [info] [command][f31ca900-8b4e-4897-ba34-d10c558ed1e6] Process exited with code 0\n2025-07-20 17:31:18.110 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][31d2090d-7548-449b-b579-c93fd6f23d7f] socks connection closed\n2025-07-20 17:31:18.110 [info] [command][f31ca900-8b4e-4897-ba34-d10c558ed1e6] Socket close event received\n2025-07-20 17:31:18.136 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58756 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:32:18.110 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:32:18.112 [info] [command][f18a64ed-d873-4053-985d-e5941cc1ede1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""f18a64ed-d873-4053-985d-e5941cc1ede1""}\n2025-07-20 17:32:18.113 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:40717][04a0c192-9ae8-472e-9362-58278b069449] received connection request\n2025-07-20 17:32:18.113 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:32:18.224 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][04a0c192-9ae8-472e-9362-58278b069449] socks forwarding established\n2025-07-20 17:32:18.262 [info] [command][f18a64ed-d873-4053-985d-e5941cc1ede1] Process exited with code 0\n2025-07-20 17:32:18.262 [info] [command][f18a64ed-d873-4053-985d-e5941cc1ede1] Socket close event received\n2025-07-20 17:32:18.274 [info] [forwarding][multiplex][127.0.0.1:65286 -> 127.0.0.1:65277 -> 127.0.0.1:40717][04a0c192-9ae8-472e-9362-58278b069449] socks connection closed\n2025-07-20 17:32:18.290 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 65277 for 127.0.0.1 port 40717, connect from 127.0.0.1 port 58822 to 127.0.0.1 port 65277, nchannels 6\n\n2025-07-20 17:33:13.375 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:40083][fd18a6a9-edcc-4367-87b6-197879e60bad] received connection request\n2025-07-20 17:33:13.377 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:33:13.384 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #2)\n2025-07-20 17:33:13.384 [info] Received re-connection request; checking to see if existing connection is still valid\n2025-07-20 17:33:13.390 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:40083][7199485d-5ef2-44fc-b661-b1b1aaa020d1] received connection request\n2025-07-20 17:33:13.395 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 6: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:33:16.397 [error] Unexpected error while checking if existing connection is still valid Timeout while checking if existing connection is still valid\n2025-07-20 17:33:16.397 [error] Failed to connect to Cursor server at http://127.0.0.1:65285, attempt 1 of 3 This operation was aborted\n2025-07-20 17:33:16.398 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:40083][ebb347e4-30e9-4a55-b82f-c988775e97a8] received connection request\n2025-07-20 17:33:16.399 [info] (ssh_tunnel) stderr: debug1: Connection to port 65277 forwarding to socks port 0 requested.\ndebug1: channel 7: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-20 17:33:16.472 [info] Terminating existing SSH process with pid: 63787\n2025-07-20 17:33:16.472 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-20 17:33:16.473 [info] (ssh_tunnel): exit: code=null signal=SIGKILL\n2025-07-20 17:33:16.473 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:33:16.473 [error] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][fd18a6a9-edcc-4367-87b6-197879e60bad] error while creating socks forwarding Socket closed\n2025-07-20 17:33:16.473 [error] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][7199485d-5ef2-44fc-b661-b1b1aaa020d1] error while creating socks forwarding Socket closed\n2025-07-20 17:33:16.474 [error] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][ebb347e4-30e9-4a55-b82f-c988775e97a8] error while creating socks forwarding Socket closed\n2025-07-20 17:33:16.474 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][bc77bcc5-6f69-47e3-be85-b11666e2e9a2] socks connection closed\n2025-07-20 17:33:16.474 [info] [forwarding][code][127.0.0.1:65285 -> 127.0.0.1:65277 -> 127.0.0.1:40083][5523ab7e-449d-41e2-bec3-6c71c0076cdf] socks connection closed\n2025-07-20 17:33:16.478 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_11114.sh"" | ssh -v -T -D 58866 login.haicore.berlin bash --login -c bash\n2025-07-20 17:33:16.478 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:33:16.478 [info] Waiting for server to install via process(68833)...\n2025-07-20 17:33:16.492 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-20 17:33:16.492 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:33:16.493 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:33:16.493 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:33:16.493 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:33:16.496 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:33:16.496 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:33:16.496 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:33:16.496 [info] Retrying connection in 5 seconds...\n2025-07-20 17:33:17.407 [error] Failed to connect to Cursor server at http://127.0.0.1:65285, attempt 2 of 3 This operation was aborted\n2025-07-20 17:33:18.267 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:65286\n2025-07-20 17:33:18.268 [info] [command][dc36b6a7-835d-477f-ace2-8f15def3e7f9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b95d4e43-7bf1-49d3-8caa-4997bd952c30"",""id"":""dc36b6a7-835d-477f-ace2-8f15def3e7f9""}\n2025-07-20 17:33:18.268 [error] [forwarding][multiplex][127.0.0.1:65286 -> unknown}][8aa098a2-fb53-441d-9d1a-1dcdc5cf85da] remote server not configured\n2025-07-20 17:33:18.269 [error] [command][dc36b6a7-835d-477f-ace2-8f15def3e7f9] Socket error: Error: read ECONNRESET\n2025-07-20 17:33:18.269 [info] [command][dc36b6a7-835d-477f-ace2-8f15def3e7f9] Socket close event received\n2025-07-20 17:33:18.418 [error] Failed to connect to Cursor server at http://127.0.0.1:65285, attempt 3 of 3 This operation was aborted\n2025-07-20 17:33:18.418 [error] Could not re-use existing SOCKS connection; attempting to re-establish SOCKS forwarding Failed to connect to Cursor code server. Ensure that your remote host ssh config has 'AllowTcpForwarding yes' in '/etc/ssh/sshd_config'. Please check the logs and try reinstalling the server.\n2025-07-20 17:33:18.418 [error] Could not re-establish SOCKS forwarding; re-establishing entire SSH connection Remote server is not set\n2025-07-20 17:34:47.947 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_11114.sh\n2025-07-20 17:34:47.966 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:34:47.981 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_18597.sh"" | ssh -v -T -D 58869 login.haicore.berlin bash --login -c bash\n2025-07-20 17:34:47.982 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:34:47.982 [info] Waiting for server to install via process(68844)...\n2025-07-20 17:34:47.991 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-20 17:34:47.991 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:34:47.991 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:34:47.991 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:34:47.991 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:34:47.993 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:34:47.994 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:34:47.994 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:34:47.994 [info] Retrying connection in 5 seconds...\n2025-07-20 17:34:53.002 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_18597.sh\n2025-07-20 17:34:53.002 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:34:53.005 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_28706.sh"" | ssh -v -T -D 58872 login.haicore.berlin bash --login -c bash\n2025-07-20 17:34:53.005 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:34:53.005 [info] Waiting for server to install via process(68859)...\n2025-07-20 17:34:53.017 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-20 17:34:53.017 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:34:53.017 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:34:53.017 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:34:53.017 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:34:53.019 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:34:53.020 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:34:53.020 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:34:53.020 [info] Retrying connection in 5 seconds...\n2025-07-20 17:34:58.023 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_28706.sh\n2025-07-20 17:34:58.025 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:34:58.051 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_44158.sh"" | ssh -v -T -D 58875 login.haicore.berlin bash --login -c bash\n2025-07-20 17:34:58.051 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:34:58.051 [info] Waiting for server to install via process(68866)...\n2025-07-20 17:34:58.101 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:34:58.103 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\n\n2025-07-20 17:34:58.105 [info] (ssh_tunnel) stderr: debug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\ndebug1: Reading configuration data /etc/ssh/crypto.conf\ndebug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:34:58.119 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:34:58.120 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:34:58.121 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:34:58.121 [info] Retrying connection in 5 seconds...\n2025-07-20 17:35:17.601 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_44158.sh\n2025-07-20 17:35:17.602 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:35:17.673 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_79917.sh"" | ssh -v -T -D 58878 login.haicore.berlin bash --login -c bash\n2025-07-20 17:35:17.673 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:35:17.673 [info] Waiting for server to install via process(68875)...\n2025-07-20 17:35:17.682 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-20 17:35:17.682 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:35:17.682 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:35:17.682 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:35:17.682 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:35:17.683 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:35:17.684 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:35:17.684 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:35:17.684 [info] Retrying connection in 5 seconds...\n2025-07-20 17:35:22.692 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_79917.sh\n2025-07-20 17:35:22.692 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:35:22.694 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_56829.sh"" | ssh -v -T -D 58882 login.haicore.berlin bash --login -c bash\n2025-07-20 17:35:22.695 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:35:22.695 [info] Waiting for server to install via process(68886)...\n2025-07-20 17:35:22.712 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-20 17:35:22.712 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:35:22.712 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:35:22.712 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:35:22.712 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:35:22.718 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:35:22.722 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:35:22.722 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:35:22.722 [info] Retrying connection in 5 seconds...\n2025-07-20 17:35:27.723 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_56829.sh\n2025-07-20 17:35:27.724 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:35:27.726 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_13029.sh"" | ssh -v -T -D 58883 login.haicore.berlin bash --login -c bash\n2025-07-20 17:35:27.726 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:35:27.726 [info] Waiting for server to install via process(68894)...\n2025-07-20 17:35:27.739 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-20 17:35:27.739 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:35:27.739 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:35:27.739 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:35:27.739 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:35:27.740 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:35:27.741 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:35:27.741 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:35:27.741 [info] Retrying connection in 5 seconds...\n2025-07-20 17:36:12.308 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_13029.sh\n2025-07-20 17:36:12.309 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:36:12.317 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_90893.sh"" | ssh -v -T -D 58885 login.haicore.berlin bash --login -c bash\n2025-07-20 17:36:12.317 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:36:12.317 [info] Waiting for server to install via process(68900)...\n2025-07-20 17:36:12.328 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-20 17:36:12.328 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:36:12.329 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:36:12.329 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:36:12.330 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:36:12.334 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:36:12.335 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:36:12.335 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:36:12.335 [info] Retrying connection in 5 seconds...\n2025-07-20 17:36:17.338 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_90893.sh\n2025-07-20 17:36:17.338 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:36:17.340 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_5731.sh"" | ssh -v -T -D 58888 login.haicore.berlin bash --login -c bash\n2025-07-20 17:36:17.340 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:36:17.340 [info] Waiting for server to install via process(68911)...\n2025-07-20 17:36:17.349 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-20 17:36:17.349 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:36:17.349 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:36:17.349 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:36:17.350 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:36:17.351 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:36:17.352 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:36:17.352 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:36:17.352 [info] Retrying connection in 5 seconds...\n2025-07-20 17:36:22.361 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_5731.sh\n2025-07-20 17:36:22.362 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:36:22.364 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_48374.sh"" | ssh -v -T -D 58891 login.haicore.berlin bash --login -c bash\n2025-07-20 17:36:22.364 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:36:22.364 [info] Waiting for server to install via process(68919)...\n2025-07-20 17:36:22.372 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-20 17:36:22.372 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:36:22.372 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:36:22.372 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:36:22.373 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:36:22.374 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:36:22.375 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:36:22.375 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:36:22.375 [info] Retrying connection in 5 seconds...\n2025-07-20 17:36:27.406 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_48374.sh\n2025-07-20 17:36:27.410 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:36:27.413 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_71943.sh"" | ssh -v -T -D 58893 login.haicore.berlin bash --login -c bash\n2025-07-20 17:36:27.413 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:36:27.413 [info] Waiting for server to install via process(68927)...\n2025-07-20 17:36:27.421 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\n\n2025-07-20 17:36:27.421 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:36:27.421 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:36:27.421 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:36:27.421 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:36:27.422 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:36:27.422 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:36:27.422 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:36:27.422 [info] Retrying connection in 5 seconds...\n2025-07-20 17:52:17.539 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_71943.sh\n2025-07-20 17:52:17.542 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-U49qJn/socket.sock\n2025-07-20 17:52:17.547 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_73838.sh"" | ssh -v -T -D 58894 login.haicore.berlin bash --login -c bash\n2025-07-20 17:52:17.547 [info] Started installation script. Waiting for it to finish...\n2025-07-20 17:52:17.547 [info] Waiting for server to install via process(68934)...\n2025-07-20 17:52:17.558 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-20 17:52:17.558 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-20 17:52:17.558 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-20 17:52:17.558 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-20 17:52:17.559 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-20 17:52:17.560 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-20 17:52:17.561 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-20 17:52:17.561 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:52:17.561 [error] Failed to connect after 12 attempts: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-20 17:52:17.561 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_73838.sh\n2025-07-20 17:52:17.562 [error] Error resolving SSH authority Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 18:09:34.231 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #1)\n2025-07-21 18:09:34.246 [info] SSH askpass server listening on /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 18:09:34.247 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-21 18:09:34.249 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 18:09:34.251 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_87982.sh"" | ssh -v -T -D 51850 login.haicore.berlin bash --login -c bash\n2025-07-21 18:09:34.251 [info] Started installation script. Waiting for it to finish...\n2025-07-21 18:09:34.251 [info] Waiting for server to install via process(91396)...\n2025-07-21 18:09:34.257 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-21 18:09:34.258 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\n\n2025-07-21 18:09:34.258 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 18:09:34.260 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 18:09:34.260 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 18:09:34.261 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\n\n2025-07-21 18:09:34.261 [info] (ssh_tunnel) stderr: debug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 18:09:34.530 [info] (ssh_tunnel) stderr: debug1: Connection established.\n\n2025-07-21 18:09:34.532 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519 type 3\ndebug1: identity file /Users/franzsrambical/.ssh/id_ed25519-cert type -1\n\n2025-07-21 18:09:34.532 [info] (ssh_tunnel) stderr: debug1: Local version string SSH-2.0-OpenSSH_9.9\n\n2025-07-21 18:09:34.561 [info] (ssh_tunnel) stderr: debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7\ndebug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000\ndebug1: Authenticating to login.haicore.berlin:22 as 'franz.srambical'\n\n2025-07-21 18:09:34.562 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\n\n2025-07-21 18:09:34.562 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT sent\n\n2025-07-21 18:09:34.583 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT received\ndebug1: kex: algorithm: ecdh-sha2-nistp256\ndebug1: kex: host key algorithm: ssh-ed25519\n\n2025-07-21 18:09:34.583 [info] (ssh_tunnel) stderr: debug1: kex: server->client cipher: aes128-gcm@openssh.com MAC: compression: none\ndebug1: kex: client->server cipher: aes128-gcm@openssh.com MAC: compression: none\n\n2025-07-21 18:09:34.584 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_KEX_ECDH_REPLY\n\n2025-07-21 18:09:34.612 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEX_ECDH_REPLY received\ndebug1: Server host key: ssh-ed25519 SHA256:3/BGZ1UNXR9SufKdsZVtx4Yd+kZTnZzSvRH0l6rtbvo\n\n2025-07-21 18:09:34.614 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: Host 'login.haicore.berlin' is known and matches the ED25519 host key.\n\n2025-07-21 18:09:34.614 [info] (ssh_tunnel) stderr: debug1: Found key in /Users/franzsrambical/.ssh/known_hosts:17\n\n2025-07-21 18:09:34.619 [info] (ssh_tunnel) stderr: debug1: ssh_packet_send2_wrapped: resetting send seqnr 3\ndebug1: rekey out after 4294967296 blocks\ndebug1: SSH2_MSG_NEWKEYS sent\ndebug1: expecting SSH2_MSG_NEWKEYS\n\n2025-07-21 18:09:34.619 [info] (ssh_tunnel) stderr: debug1: ssh_packet_read_poll2: resetting read seqnr 3\ndebug1: SSH2_MSG_NEWKEYS received\ndebug1: rekey in after 4294967296 blocks\n\n2025-07-21 18:09:34.619 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_EXT_INFO received\ndebug1: kex_ext_info_client_parse: server-sig-algs=\n\n2025-07-21 18:09:34.708 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_SERVICE_ACCEPT received\n\n2025-07-21 18:09:34.737 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: publickey\n\n2025-07-21 18:09:34.740 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: bound agent to hostkey\n\n2025-07-21 18:09:34.740 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: ssh_fetch_identitylist: agent contains no identities\ndebug1: Will attempt key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\ndebug1: Offering public key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-21 18:09:34.811 [info] (ssh_tunnel) stderr: debug1: Server accepts key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-21 18:09:34.878 [info] (ssh_tunnel) stderr: Authenticated to login.haicore.berlin ([141.80.150.4]:22) using ""publickey"".\ndebug1: Local connections to LOCALHOST:51850 forwarded to remote address socks:0\n\n2025-07-21 18:09:34.879 [info] (ssh_tunnel) stderr: debug1: Local forwarding listening on ::1 port 51850.\ndebug1: channel 0: new port-listener [port listener] (inactive timeout: 0)\ndebug1: Local forwarding listening on 127.0.0.1 port 51850.\n\n2025-07-21 18:09:34.879 [info] (ssh_tunnel) stderr: debug1: channel 1: new port-listener [port listener] (inactive timeout: 0)\n\n2025-07-21 18:09:34.880 [info] (ssh_tunnel) stderr: debug1: channel 2: new session [client-session] (inactive timeout: 0)\n\n2025-07-21 18:09:34.880 [info] (ssh_tunnel) stderr: debug1: Requesting no-more-sessions@openssh.com\n\n2025-07-21 18:09:34.880 [info] (ssh_tunnel) stderr: debug1: Entering interactive session.\ndebug1: pledge: filesystem\n\n2025-07-21 18:09:35.078 [info] (ssh_tunnel) stderr: debug1: client_input_global_request: rtype hostkeys-00@openssh.com want_reply 0\n\n2025-07-21 18:09:35.079 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts for login.haicore.berlin / (none)\n\n2025-07-21 18:09:35.090 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts2 for login.haicore.berlin / (none)\ndebug1: client_input_hostkeys: hostkeys file /Users/franzsrambical/.ssh/known_hosts2 does not exist\ndebug1: client_input_hostkeys: no new or deprecated keys from server\ndebug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\n\n2025-07-21 18:09:35.101 [info] (ssh_tunnel) stderr: debug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\ndebug1: Sending environment.\ndebug1: Sending command: bash --login -c bash\ndebug1: pledge: network\n\n2025-07-21 18:09:35.489 [info] (ssh_tunnel) stdout: Using TMP_DIR: /run/user/961800067\n\n2025-07-21 18:09:35.528 [info] (ssh_tunnel) stdout: Locking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-21 18:09:35.529 [info] (ssh_tunnel) stdout: Server script already installed in /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server\nChecking node executable\n\n2025-07-21 18:09:35.535 [info] (ssh_tunnel) stdout: v20.18.2\n\n2025-07-21 18:09:35.548 [info] (ssh_tunnel) stdout: Checking for running multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-21 18:09:35.565 [info] (ssh_tunnel) stdout: Running multiplex server: \n\n2025-07-21 18:09:35.565 [info] (ssh_tunnel) stdout: Creating multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-21 18:09:35.569 [info] (ssh_tunnel) stdout: Creating directory for multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server\n\n2025-07-21 18:09:35.572 [info] (ssh_tunnel) stdout: Writing multiplex server script to /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-21 18:09:35.577 [info] (ssh_tunnel) stdout: Starting multiplex server: /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js b5322cab-ea7d-4ba2-9953-741fbc1d862f\n\n2025-07-21 18:09:35.580 [info] (ssh_tunnel) stdout: Multiplex server started with PID 1608886 and wrote pid to file /run/user/961800067/cursor-remote-multiplex.pid.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nReading multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nMultiplex server token file found\nReading multiplex server log file /run/user/961800067/cursor-remote-multiplex.log.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-21 18:09:36.090 [info] (ssh_tunnel) stdout: Checking for code servers\n\n2025-07-21 18:09:36.106 [info] (ssh_tunnel) stdout: Code server script is not running\nCreating code server token file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-21 18:09:36.108 [info] (ssh_tunnel) stdout: Starting code server script /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server --start-server --host=127.0.0.1 --port 0 --connection-token-file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0 --telemetry-level off --enable-remote-auto-shutdown --accept-server-license-terms &> /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0 &\n\n2025-07-21 18:09:36.110 [info] (ssh_tunnel) stdout: Code server started with PID 1608910 and wrote pid to file /run/user/961800067/cursor-remote-code.pid.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-21 18:09:36.111 [info] (ssh_tunnel) stdout: Code server log file is /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-21 18:09:36.680 [info] (ssh_tunnel) stdout: 52930ed763def12b0badc48b: start\nexitCode==0==\nnodeExecutable==/home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node==\nerrorMessage====\nisFatalError==false==\nmultiplexListeningOn==38723==\nmultiplexConnectionToken==b5322cab-ea7d-4ba2-9953-741fbc1d862f==\ncodeListeningOn==34715==\ncodeConnectionToken==d56c0086-b507-48dc-bbeb-73bed3240052==\ndetectedPlatform==linux==\narch==x64==\nSSH_AUTH_SOCK====\n52930ed763def12b0badc48b: end\nUnlocking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-21 18:09:36.680 [info] Server install command exit code: 0\n2025-07-21 18:09:36.680 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_87982.sh\n2025-07-21 18:09:36.682 [info] [forwarding][code] creating new forwarding server\n2025-07-21 18:09:36.682 [info] [forwarding][code] server listening on 127.0.0.1:51858\n2025-07-21 18:09:36.682 [info] [forwarding][code] Set up server\n2025-07-21 18:09:36.682 [info] [remote-ssh] codeListeningOn (remote=[object Object]; local=[object Object]) codeConnectionToken: d56c0086-b507-48dc-bbeb-73bed3240052\n2025-07-21 18:09:36.682 [info] [forwarding][multiplex] creating new forwarding server\n2025-07-21 18:09:36.682 [info] [forwarding][multiplex] server listening on 127.0.0.1:51859\n2025-07-21 18:09:36.682 [info] [forwarding][multiplex] Set up server\n2025-07-21 18:09:36.683 [info] [remote-ssh] multiplexListeningOn (remote=[object Object]; local=[object Object]) multiplexConnectionToken: b5322cab-ea7d-4ba2-9953-741fbc1d862f\n2025-07-21 18:09:36.683 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:09:36.686 [info] [command][6ad07db1-6133-4fc8-905c-9a9f389f3d8e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""6ad07db1-6133-4fc8-905c-9a9f389f3d8e""}\n2025-07-21 18:09:36.687 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][fbc903fb-b395-4725-b186-57f32893e8aa] received connection request\n2025-07-21 18:09:36.692 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:09:36.693 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:34715][e83e6770-26d4-4ba5-8c40-76e90050618a] received connection request\n2025-07-21 18:09:36.694 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:09:36.718 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][fbc903fb-b395-4725-b186-57f32893e8aa] socks forwarding established\n2025-07-21 18:09:36.718 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:51850 -> 127.0.0.1:34715][e83e6770-26d4-4ba5-8c40-76e90050618a] socks forwarding established\n2025-07-21 18:09:36.749 [info] Successfully connected to Cursor server at http://127.0.0.1:51858/version\n2025-07-21 18:09:36.749 [info] [execServer][spawn] command: echo, args: 1, options: {}\n2025-07-21 18:09:36.749 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][086cb395-ca48-4986-a127-f129f8ad3f18] received connection request\n2025-07-21 18:09:36.749 [info] [command][cd419a86-f3a4-4ec2-9aec-b30631b4f4f8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""cd419a86-f3a4-4ec2-9aec-b30631b4f4f8""}\n2025-07-21 18:09:36.750 [info] [command][6ad07db1-6133-4fc8-905c-9a9f389f3d8e] Process exited with code 0\n2025-07-21 18:09:36.750 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 5: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:09:36.750 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][fbc903fb-b395-4725-b186-57f32893e8aa] socks connection closed\n2025-07-21 18:09:36.750 [info] [command][6ad07db1-6133-4fc8-905c-9a9f389f3d8e] Socket close event received\n2025-07-21 18:09:36.773 [info] (ssh_tunnel) stderr: debug1: channel 3: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 51861 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:09:36.774 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][086cb395-ca48-4986-a127-f129f8ad3f18] socks forwarding established\n2025-07-21 18:09:36.800 [info] [command][cd419a86-f3a4-4ec2-9aec-b30631b4f4f8] Process exited with code 0\n2025-07-21 18:09:36.800 [info] Successfully ran 'echo 1' against the multiplex server\n2025-07-21 18:09:36.801 [info] [remote-ssh] Resolved exec server. Socks port: 51850\n2025-07-21 18:09:36.801 [info] [remote-ssh] Resolved authority: {""host"":""127.0.0.1"",""port"":51858,""connectionToken"":""d56c0086-b507-48dc-bbeb-73bed3240052"",""extensionHostEnv"":{}}. Socks port: 51850\n2025-07-21 18:09:36.801 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][086cb395-ca48-4986-a127-f129f8ad3f18] socks connection closed\n2025-07-21 18:09:36.801 [info] [command][cd419a86-f3a4-4ec2-9aec-b30631b4f4f8] Socket close event received\n2025-07-21 18:09:36.818 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:34715][212940ee-8b89-4d7f-be42-7b17f458dcbf] received connection request\n2025-07-21 18:09:36.818 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:09:36.824 [info] (ssh_tunnel) stderr: debug1: channel 5: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 51865 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:09:36.842 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:51850 -> 127.0.0.1:34715][212940ee-8b89-4d7f-be42-7b17f458dcbf] socks forwarding established\n2025-07-21 18:09:36.883 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:34715][d2130de6-f45b-4602-a7fa-a9d9c97b22d3] received connection request\n2025-07-21 18:09:36.883 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 5: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:09:36.906 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:51850 -> 127.0.0.1:34715][d2130de6-f45b-4602-a7fa-a9d9c97b22d3] socks forwarding established\n2025-07-21 18:09:39.780 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 34715, connect from 127.0.0.1 port 51863 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:09:39.781 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:51850 -> 127.0.0.1:34715][e83e6770-26d4-4ba5-8c40-76e90050618a] socks connection closed\n2025-07-21 18:09:43.081 [info] Saved platform linux for remote host login.haicore.berlin\n2025-07-21 18:10:36.755 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:10:36.756 [info] [command][8977e160-5ea6-42f5-a7ca-32ec6adce47d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""8977e160-5ea6-42f5-a7ca-32ec6adce47d""}\n2025-07-21 18:10:36.756 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][1b8999d9-88bb-4001-a508-4d4eddc3d549] received connection request\n2025-07-21 18:10:36.756 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:10:36.780 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][1b8999d9-88bb-4001-a508-4d4eddc3d549] socks forwarding established\n2025-07-21 18:10:36.810 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][1b8999d9-88bb-4001-a508-4d4eddc3d549] socks connection closed\n2025-07-21 18:10:36.810 [info] [command][8977e160-5ea6-42f5-a7ca-32ec6adce47d] Process exited with code 0\n2025-07-21 18:10:36.810 [info] [command][8977e160-5ea6-42f5-a7ca-32ec6adce47d] Socket close event received\n2025-07-21 18:10:36.834 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 51976 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:11:36.812 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:11:36.813 [info] [command][ca7e79aa-1984-4ab2-9ddd-353e1824578b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""ca7e79aa-1984-4ab2-9ddd-353e1824578b""}\n2025-07-21 18:11:36.814 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][936dd636-ef13-4d9b-b703-7fb51938c98c] received connection request\n2025-07-21 18:11:36.814 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:11:36.839 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][936dd636-ef13-4d9b-b703-7fb51938c98c] socks forwarding established\n2025-07-21 18:11:36.865 [info] [command][ca7e79aa-1984-4ab2-9ddd-353e1824578b] Process exited with code 0\n2025-07-21 18:11:36.865 [info] [command][ca7e79aa-1984-4ab2-9ddd-353e1824578b] Socket close event received\n2025-07-21 18:11:36.866 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][936dd636-ef13-4d9b-b703-7fb51938c98c] socks connection closed\n2025-07-21 18:11:36.889 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52025 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:12:36.870 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:12:36.872 [info] [command][49cd1a30-532d-4fa4-beb6-7afc0f0ba674] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""49cd1a30-532d-4fa4-beb6-7afc0f0ba674""}\n2025-07-21 18:12:36.873 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][e2dce368-b1f1-4186-b7b6-c9761f4fad19] received connection request\n2025-07-21 18:12:36.873 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:12:36.897 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][e2dce368-b1f1-4186-b7b6-c9761f4fad19] socks forwarding established\n2025-07-21 18:12:36.922 [info] [command][49cd1a30-532d-4fa4-beb6-7afc0f0ba674] Process exited with code 0\n2025-07-21 18:12:36.923 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][e2dce368-b1f1-4186-b7b6-c9761f4fad19] socks connection closed\n2025-07-21 18:12:36.923 [info] [command][49cd1a30-532d-4fa4-beb6-7afc0f0ba674] Socket close event received\n2025-07-21 18:12:36.947 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52062 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:13:36.928 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:13:36.930 [info] [command][108189a1-b696-43d9-b2d8-acee1b965680] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""108189a1-b696-43d9-b2d8-acee1b965680""}\n2025-07-21 18:13:36.930 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][eb27b3ae-f625-4096-8f17-d6e465fb9c06] received connection request\n2025-07-21 18:13:36.931 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\n\n2025-07-21 18:13:36.931 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:13:36.956 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][eb27b3ae-f625-4096-8f17-d6e465fb9c06] socks forwarding established\n2025-07-21 18:13:36.985 [info] [command][108189a1-b696-43d9-b2d8-acee1b965680] Process exited with code 0\n2025-07-21 18:13:36.985 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][eb27b3ae-f625-4096-8f17-d6e465fb9c06] socks connection closed\n2025-07-21 18:13:36.985 [info] [command][108189a1-b696-43d9-b2d8-acee1b965680] Socket close event received\n2025-07-21 18:13:37.009 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52086 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:14:36.990 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:14:36.993 [info] [command][9c570fcd-9e71-4174-ad59-c9ae8bd818e2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""9c570fcd-9e71-4174-ad59-c9ae8bd818e2""}\n2025-07-21 18:14:36.994 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][ab6c20a7-3e9e-44e2-8b91-a7f327c94425] received connection request\n2025-07-21 18:14:36.994 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:14:37.019 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][ab6c20a7-3e9e-44e2-8b91-a7f327c94425] socks forwarding established\n2025-07-21 18:14:37.048 [info] [command][9c570fcd-9e71-4174-ad59-c9ae8bd818e2] Process exited with code 0\n2025-07-21 18:14:37.048 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][ab6c20a7-3e9e-44e2-8b91-a7f327c94425] socks connection closed\n2025-07-21 18:14:37.049 [info] [command][9c570fcd-9e71-4174-ad59-c9ae8bd818e2] Socket close event received\n2025-07-21 18:14:37.072 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52137 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:15:37.053 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:15:37.055 [info] [command][b46c7f91-0856-4594-ac70-e477092b686f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""b46c7f91-0856-4594-ac70-e477092b686f""}\n2025-07-21 18:15:37.055 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][fd4b6ac3-6e73-49b7-a6b3-4bc5b0d694e8] received connection request\n2025-07-21 18:15:37.056 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:15:37.080 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][fd4b6ac3-6e73-49b7-a6b3-4bc5b0d694e8] socks forwarding established\n2025-07-21 18:15:37.107 [info] [command][b46c7f91-0856-4594-ac70-e477092b686f] Process exited with code 0\n2025-07-21 18:15:37.107 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][fd4b6ac3-6e73-49b7-a6b3-4bc5b0d694e8] socks connection closed\n2025-07-21 18:15:37.107 [info] [command][b46c7f91-0856-4594-ac70-e477092b686f] Socket close event received\n2025-07-21 18:15:37.131 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52168 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:16:37.112 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:16:37.115 [info] [command][2db5dc0d-eb42-455d-bc12-ca95d2b27656] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""2db5dc0d-eb42-455d-bc12-ca95d2b27656""}\n2025-07-21 18:16:37.116 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][11e763c0-2ee3-484e-a0bf-e5858cd42813] received connection request\n2025-07-21 18:16:37.117 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:16:37.141 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][11e763c0-2ee3-484e-a0bf-e5858cd42813] socks forwarding established\n2025-07-21 18:16:37.171 [info] [command][2db5dc0d-eb42-455d-bc12-ca95d2b27656] Process exited with code 0\n2025-07-21 18:16:37.172 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][11e763c0-2ee3-484e-a0bf-e5858cd42813] socks connection closed\n2025-07-21 18:16:37.172 [info] [command][2db5dc0d-eb42-455d-bc12-ca95d2b27656] Socket close event received\n2025-07-21 18:16:37.196 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52222 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:17:37.177 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:17:37.180 [info] [command][48b56721-c4d8-47a6-a27a-d53709fb0a7a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""48b56721-c4d8-47a6-a27a-d53709fb0a7a""}\n2025-07-21 18:17:37.180 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][8c65fb8f-b4f5-426f-8ce4-fab38759bdd7] received connection request\n2025-07-21 18:17:37.181 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:17:37.206 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][8c65fb8f-b4f5-426f-8ce4-fab38759bdd7] socks forwarding established\n2025-07-21 18:17:37.233 [info] [command][48b56721-c4d8-47a6-a27a-d53709fb0a7a] Process exited with code 0\n2025-07-21 18:17:37.234 [info] [command][48b56721-c4d8-47a6-a27a-d53709fb0a7a] Socket close event received\n2025-07-21 18:17:37.234 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][8c65fb8f-b4f5-426f-8ce4-fab38759bdd7] socks connection closed\n2025-07-21 18:17:37.258 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52244 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:18:37.239 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:18:37.242 [info] [command][bbbf12bf-f0d3-40c6-991a-f07bb87a226f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""bbbf12bf-f0d3-40c6-991a-f07bb87a226f""}\n2025-07-21 18:18:37.243 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][2cde3f80-2d3c-4c10-acaf-045b695b1229] received connection request\n2025-07-21 18:18:37.243 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:18:37.266 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][2cde3f80-2d3c-4c10-acaf-045b695b1229] socks forwarding established\n2025-07-21 18:18:37.293 [info] [command][bbbf12bf-f0d3-40c6-991a-f07bb87a226f] Process exited with code 0\n2025-07-21 18:18:37.293 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][2cde3f80-2d3c-4c10-acaf-045b695b1229] socks connection closed\n2025-07-21 18:18:37.293 [info] [command][bbbf12bf-f0d3-40c6-991a-f07bb87a226f] Socket close event received\n2025-07-21 18:18:37.317 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52265 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:19:37.299 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:19:37.301 [info] [command][b9dabf6f-05c2-4777-90f6-cd432360b9b8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""b9dabf6f-05c2-4777-90f6-cd432360b9b8""}\n2025-07-21 18:19:37.302 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][023fea89-bcd5-49b5-bb4b-41db0b281ea2] received connection request\n2025-07-21 18:19:37.303 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:19:37.327 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][023fea89-bcd5-49b5-bb4b-41db0b281ea2] socks forwarding established\n2025-07-21 18:19:37.355 [info] [command][b9dabf6f-05c2-4777-90f6-cd432360b9b8] Process exited with code 0\n2025-07-21 18:19:37.356 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][023fea89-bcd5-49b5-bb4b-41db0b281ea2] socks connection closed\n2025-07-21 18:19:37.356 [info] [command][b9dabf6f-05c2-4777-90f6-cd432360b9b8] Socket close event received\n2025-07-21 18:19:37.379 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52316 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:20:37.360 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:20:37.361 [info] [command][271a8a28-0879-4c6f-a6c6-c09fbe8b7f49] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""271a8a28-0879-4c6f-a6c6-c09fbe8b7f49""}\n2025-07-21 18:20:37.362 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][0154a8bb-b746-4c89-9b6d-2773b6238744] received connection request\n2025-07-21 18:20:37.363 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:20:37.385 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][0154a8bb-b746-4c89-9b6d-2773b6238744] socks forwarding established\n2025-07-21 18:20:37.411 [info] [command][271a8a28-0879-4c6f-a6c6-c09fbe8b7f49] Process exited with code 0\n2025-07-21 18:20:37.411 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][0154a8bb-b746-4c89-9b6d-2773b6238744] socks connection closed\n2025-07-21 18:20:37.411 [info] [command][271a8a28-0879-4c6f-a6c6-c09fbe8b7f49] Socket close event received\n2025-07-21 18:20:37.434 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52341 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:21:37.416 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:21:37.419 [info] [command][e213db02-3b52-418a-8dae-3b9229851e01] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""e213db02-3b52-418a-8dae-3b9229851e01""}\n2025-07-21 18:21:37.420 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][e844ea91-67bc-4b96-a3fb-9419a27d91bc] received connection request\n2025-07-21 18:21:37.421 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:21:37.448 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][e844ea91-67bc-4b96-a3fb-9419a27d91bc] socks forwarding established\n2025-07-21 18:21:37.474 [info] [command][e213db02-3b52-418a-8dae-3b9229851e01] Process exited with code 0\n2025-07-21 18:21:37.474 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][e844ea91-67bc-4b96-a3fb-9419a27d91bc] socks connection closed\n2025-07-21 18:21:37.475 [info] [command][e213db02-3b52-418a-8dae-3b9229851e01] Socket close event received\n2025-07-21 18:21:37.498 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52381 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:22:37.480 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:22:37.482 [info] [command][9b63881d-61d1-419c-9be5-546563e588d1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""9b63881d-61d1-419c-9be5-546563e588d1""}\n2025-07-21 18:22:37.483 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][0dbadd16-6955-4266-95ba-1ca96e580d82] received connection request\n2025-07-21 18:22:37.484 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:22:37.508 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][0dbadd16-6955-4266-95ba-1ca96e580d82] socks forwarding established\n2025-07-21 18:22:37.537 [info] [command][9b63881d-61d1-419c-9be5-546563e588d1] Process exited with code 0\n2025-07-21 18:22:37.538 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][0dbadd16-6955-4266-95ba-1ca96e580d82] socks connection closed\n2025-07-21 18:22:37.538 [info] [command][9b63881d-61d1-419c-9be5-546563e588d1] Socket close event received\n2025-07-21 18:22:37.562 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52417 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:23:37.539 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:23:37.541 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][575a98ef-f414-4f6a-b616-b5368cfb0730] received connection request\n2025-07-21 18:23:37.542 [info] [command][c78b7feb-98a4-483a-9feb-47b03d7a1ebe] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""c78b7feb-98a4-483a-9feb-47b03d7a1ebe""}\n2025-07-21 18:23:37.543 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:23:37.567 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][575a98ef-f414-4f6a-b616-b5368cfb0730] socks forwarding established\n2025-07-21 18:23:37.594 [info] [command][c78b7feb-98a4-483a-9feb-47b03d7a1ebe] Process exited with code 0\n2025-07-21 18:23:37.594 [info] [command][c78b7feb-98a4-483a-9feb-47b03d7a1ebe] Socket close event received\n2025-07-21 18:23:37.594 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][575a98ef-f414-4f6a-b616-b5368cfb0730] socks connection closed\n2025-07-21 18:23:37.618 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52442 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:24:37.595 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:24:37.596 [info] [command][d401959b-d3b2-4fb4-990a-f80a037106d9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""d401959b-d3b2-4fb4-990a-f80a037106d9""}\n2025-07-21 18:24:37.596 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][cfbb005c-0bee-4c64-97c0-ddedc3a8dbb7] received connection request\n2025-07-21 18:24:37.597 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:24:37.623 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][cfbb005c-0bee-4c64-97c0-ddedc3a8dbb7] socks forwarding established\n2025-07-21 18:24:37.649 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][cfbb005c-0bee-4c64-97c0-ddedc3a8dbb7] socks connection closed\n2025-07-21 18:24:37.649 [info] [command][d401959b-d3b2-4fb4-990a-f80a037106d9] Process exited with code 0\n2025-07-21 18:24:37.649 [info] [command][d401959b-d3b2-4fb4-990a-f80a037106d9] Socket close event received\n2025-07-21 18:24:37.674 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52499 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:25:37.653 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:25:37.655 [info] [command][8ce7d359-3b65-4fa9-98f7-dacd15bbc299] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""8ce7d359-3b65-4fa9-98f7-dacd15bbc299""}\n2025-07-21 18:25:37.656 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][ad1fb8ad-9924-4500-bb28-feed45c439ce] received connection request\n2025-07-21 18:25:37.657 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:25:37.682 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][ad1fb8ad-9924-4500-bb28-feed45c439ce] socks forwarding established\n2025-07-21 18:25:37.711 [info] [command][8ce7d359-3b65-4fa9-98f7-dacd15bbc299] Process exited with code 0\n2025-07-21 18:25:37.712 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][ad1fb8ad-9924-4500-bb28-feed45c439ce] socks connection closed\n2025-07-21 18:25:37.712 [info] [command][8ce7d359-3b65-4fa9-98f7-dacd15bbc299] Socket close event received\n2025-07-21 18:25:37.735 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52525 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:26:37.717 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:26:37.718 [info] [command][b3aa66d9-8c5d-4efb-aac2-7ebddcb2790f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""b3aa66d9-8c5d-4efb-aac2-7ebddcb2790f""}\n2025-07-21 18:26:37.718 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][7b4ac7f8-47dd-49ae-ab86-2c59c7dbfda3] received connection request\n2025-07-21 18:26:37.718 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:26:37.741 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][7b4ac7f8-47dd-49ae-ab86-2c59c7dbfda3] socks forwarding established\n2025-07-21 18:26:37.767 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][7b4ac7f8-47dd-49ae-ab86-2c59c7dbfda3] socks connection closed\n2025-07-21 18:26:37.767 [info] [command][b3aa66d9-8c5d-4efb-aac2-7ebddcb2790f] Process exited with code 0\n2025-07-21 18:26:37.767 [info] [command][b3aa66d9-8c5d-4efb-aac2-7ebddcb2790f] Socket close event received\n2025-07-21 18:26:37.790 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52567 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:27:37.773 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:27:37.775 [info] [command][00be2e95-a94a-43bf-9707-b43c6b462366] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""00be2e95-a94a-43bf-9707-b43c6b462366""}\n2025-07-21 18:27:37.776 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][24d6d19e-11c1-4a40-8285-1311ea9c6171] received connection request\n2025-07-21 18:27:37.777 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:27:37.802 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][24d6d19e-11c1-4a40-8285-1311ea9c6171] socks forwarding established\n2025-07-21 18:27:37.831 [info] [command][00be2e95-a94a-43bf-9707-b43c6b462366] Process exited with code 0\n2025-07-21 18:27:37.831 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][24d6d19e-11c1-4a40-8285-1311ea9c6171] socks connection closed\n2025-07-21 18:27:37.831 [info] [command][00be2e95-a94a-43bf-9707-b43c6b462366] Socket close event received\n2025-07-21 18:27:37.855 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52590 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:28:37.836 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:28:37.837 [info] [command][be2c9349-5be7-4a46-8b0c-79c685f99cc7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""be2c9349-5be7-4a46-8b0c-79c685f99cc7""}\n2025-07-21 18:28:37.837 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][69b079d0-2ca9-4b58-9e8e-b3d8f87bbd3f] received connection request\n2025-07-21 18:28:37.837 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\n\n2025-07-21 18:28:37.837 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:28:37.865 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][69b079d0-2ca9-4b58-9e8e-b3d8f87bbd3f] socks forwarding established\n2025-07-21 18:28:37.891 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][69b079d0-2ca9-4b58-9e8e-b3d8f87bbd3f] socks connection closed\n2025-07-21 18:28:37.891 [info] [command][be2c9349-5be7-4a46-8b0c-79c685f99cc7] Process exited with code 0\n2025-07-21 18:28:37.891 [info] [command][be2c9349-5be7-4a46-8b0c-79c685f99cc7] Socket close event received\n2025-07-21 18:28:37.915 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52635 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:29:37.894 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:29:37.896 [info] [command][b04ab700-e608-4a39-be8b-468cf4b20ad4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""b04ab700-e608-4a39-be8b-468cf4b20ad4""}\n2025-07-21 18:29:37.897 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][3363b10a-e537-4958-bf9a-d741df89fa85] received connection request\n2025-07-21 18:29:37.897 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:29:37.926 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][3363b10a-e537-4958-bf9a-d741df89fa85] socks forwarding established\n2025-07-21 18:29:37.952 [info] [command][b04ab700-e608-4a39-be8b-468cf4b20ad4] Process exited with code 0\n2025-07-21 18:29:37.952 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][3363b10a-e537-4958-bf9a-d741df89fa85] socks connection closed\n2025-07-21 18:29:37.952 [info] [command][b04ab700-e608-4a39-be8b-468cf4b20ad4] Socket close event received\n2025-07-21 18:29:37.974 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52689 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:30:37.956 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:30:37.958 [info] [command][4d040278-a01a-46fa-88a7-b824b74435aa] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""4d040278-a01a-46fa-88a7-b824b74435aa""}\n2025-07-21 18:30:37.959 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][ae557b9f-cdfa-403e-9c1d-c173d81390d2] received connection request\n2025-07-21 18:30:37.959 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:30:38.057 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][ae557b9f-cdfa-403e-9c1d-c173d81390d2] socks forwarding established\n2025-07-21 18:30:38.085 [info] [command][4d040278-a01a-46fa-88a7-b824b74435aa] Process exited with code 0\n2025-07-21 18:30:38.085 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][ae557b9f-cdfa-403e-9c1d-c173d81390d2] socks connection closed\n2025-07-21 18:30:38.085 [info] [command][4d040278-a01a-46fa-88a7-b824b74435aa] Socket close event received\n2025-07-21 18:30:38.112 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52720 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:31:38.089 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:31:38.091 [info] [command][38d83b89-344f-415c-a030-a393b0e4f5e7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""38d83b89-344f-415c-a030-a393b0e4f5e7""}\n2025-07-21 18:31:38.092 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][9327a746-716d-4e70-a358-d765559d0500] received connection request\n2025-07-21 18:31:38.092 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:31:38.116 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][9327a746-716d-4e70-a358-d765559d0500] socks forwarding established\n2025-07-21 18:31:38.142 [info] [command][38d83b89-344f-415c-a030-a393b0e4f5e7] Process exited with code 0\n2025-07-21 18:31:38.142 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][9327a746-716d-4e70-a358-d765559d0500] socks connection closed\n2025-07-21 18:31:38.142 [info] [command][38d83b89-344f-415c-a030-a393b0e4f5e7] Socket close event received\n2025-07-21 18:31:38.166 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52770 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:32:38.147 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:32:38.149 [info] [command][0c9f5f00-103d-4421-be0e-f1e5cb677fd0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""0c9f5f00-103d-4421-be0e-f1e5cb677fd0""}\n2025-07-21 18:32:38.150 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][11093a53-906d-42fc-bafe-83c45e77cbc6] received connection request\n2025-07-21 18:32:38.151 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:32:38.176 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][11093a53-906d-42fc-bafe-83c45e77cbc6] socks forwarding established\n2025-07-21 18:32:38.202 [info] [command][0c9f5f00-103d-4421-be0e-f1e5cb677fd0] Process exited with code 0\n2025-07-21 18:32:38.202 [info] [command][0c9f5f00-103d-4421-be0e-f1e5cb677fd0] Socket close event received\n2025-07-21 18:32:38.203 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][11093a53-906d-42fc-bafe-83c45e77cbc6] socks connection closed\n2025-07-21 18:32:38.226 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52797 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:33:38.203 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:33:38.206 [info] [command][81f25ca4-85a6-4429-ae60-460efa787f31] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""81f25ca4-85a6-4429-ae60-460efa787f31""}\n2025-07-21 18:33:38.207 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][ea98ad55-73e1-4949-aab9-0694165342bf] received connection request\n2025-07-21 18:33:38.207 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:33:38.232 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][ea98ad55-73e1-4949-aab9-0694165342bf] socks forwarding established\n2025-07-21 18:33:38.256 [info] [command][81f25ca4-85a6-4429-ae60-460efa787f31] Process exited with code 0\n2025-07-21 18:33:38.257 [info] [command][81f25ca4-85a6-4429-ae60-460efa787f31] Socket close event received\n2025-07-21 18:33:38.257 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][ea98ad55-73e1-4949-aab9-0694165342bf] socks connection closed\n2025-07-21 18:33:38.279 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52821 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:34:38.258 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:34:38.259 [info] [command][9e665ff9-55f1-4885-be75-e5a0d0f17998] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""9e665ff9-55f1-4885-be75-e5a0d0f17998""}\n2025-07-21 18:34:38.260 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][19dc0261-6ed7-47b2-9049-499cce042fb8] received connection request\n2025-07-21 18:34:38.260 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:34:38.285 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][19dc0261-6ed7-47b2-9049-499cce042fb8] socks forwarding established\n2025-07-21 18:34:38.313 [info] [command][9e665ff9-55f1-4885-be75-e5a0d0f17998] Process exited with code 0\n2025-07-21 18:34:38.313 [info] [command][9e665ff9-55f1-4885-be75-e5a0d0f17998] Socket close event received\n2025-07-21 18:34:38.314 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][19dc0261-6ed7-47b2-9049-499cce042fb8] socks connection closed\n2025-07-21 18:34:38.338 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52884 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:35:38.314 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:35:38.316 [info] [command][c964a3de-0949-4cf8-94e8-33f0117810e1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""c964a3de-0949-4cf8-94e8-33f0117810e1""}\n2025-07-21 18:35:38.318 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][3fe2e4f3-fb67-4999-a4db-f98f0961d7e4] received connection request\n2025-07-21 18:35:38.319 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:35:38.342 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][3fe2e4f3-fb67-4999-a4db-f98f0961d7e4] socks forwarding established\n2025-07-21 18:35:38.369 [info] [command][c964a3de-0949-4cf8-94e8-33f0117810e1] Process exited with code 0\n2025-07-21 18:35:38.369 [info] [command][c964a3de-0949-4cf8-94e8-33f0117810e1] Socket close event received\n2025-07-21 18:35:38.370 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][3fe2e4f3-fb67-4999-a4db-f98f0961d7e4] socks connection closed\n2025-07-21 18:35:38.393 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52915 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:36:38.371 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:36:38.373 [info] [command][6efa15ec-3bb2-4dda-b59d-ca5f3c6aad78] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""6efa15ec-3bb2-4dda-b59d-ca5f3c6aad78""}\n2025-07-21 18:36:38.374 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][39fc8dfb-05ea-4585-aa00-5f4044908f97] received connection request\n2025-07-21 18:36:38.374 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:36:38.401 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][39fc8dfb-05ea-4585-aa00-5f4044908f97] socks forwarding established\n2025-07-21 18:36:38.431 [info] [command][6efa15ec-3bb2-4dda-b59d-ca5f3c6aad78] Process exited with code 0\n2025-07-21 18:36:38.431 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][39fc8dfb-05ea-4585-aa00-5f4044908f97] socks connection closed\n2025-07-21 18:36:38.431 [info] [command][6efa15ec-3bb2-4dda-b59d-ca5f3c6aad78] Socket close event received\n2025-07-21 18:36:38.457 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 52961 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:37:38.436 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:37:38.438 [info] [command][8ec7d5eb-57ec-4cf7-a9a2-89a33643cc22] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""8ec7d5eb-57ec-4cf7-a9a2-89a33643cc22""}\n2025-07-21 18:37:38.439 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][520969f7-e5b5-44c9-9868-bb431215d512] received connection request\n2025-07-21 18:37:38.439 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:37:38.465 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][520969f7-e5b5-44c9-9868-bb431215d512] socks forwarding established\n2025-07-21 18:37:38.494 [info] [command][8ec7d5eb-57ec-4cf7-a9a2-89a33643cc22] Process exited with code 0\n2025-07-21 18:37:38.495 [info] [command][8ec7d5eb-57ec-4cf7-a9a2-89a33643cc22] Socket close event received\n2025-07-21 18:37:38.495 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][520969f7-e5b5-44c9-9868-bb431215d512] socks connection closed\n2025-07-21 18:37:38.519 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53005 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:38:38.501 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:38:38.505 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][5588d954-b7fa-4714-b2e2-a25e6ad39d60] received connection request\n2025-07-21 18:38:38.506 [info] [command][052c3025-9aa4-4037-8f2d-9cf0ec5800f4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""052c3025-9aa4-4037-8f2d-9cf0ec5800f4""}\n2025-07-21 18:38:38.509 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\n\n2025-07-21 18:38:38.510 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:38:38.554 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][5588d954-b7fa-4714-b2e2-a25e6ad39d60] socks forwarding established\n2025-07-21 18:38:38.584 [info] [command][052c3025-9aa4-4037-8f2d-9cf0ec5800f4] Process exited with code 0\n2025-07-21 18:38:38.584 [info] [command][052c3025-9aa4-4037-8f2d-9cf0ec5800f4] Socket close event received\n2025-07-21 18:38:38.585 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][5588d954-b7fa-4714-b2e2-a25e6ad39d60] socks connection closed\n2025-07-21 18:38:38.608 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53033 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:39:38.588 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:39:38.589 [info] [command][7b9cd19a-8022-4f97-8bbe-4fcaade84f1d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""7b9cd19a-8022-4f97-8bbe-4fcaade84f1d""}\n2025-07-21 18:39:38.590 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][514f368a-2650-4ce1-b97d-8bf75e032119] received connection request\n2025-07-21 18:39:38.590 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:39:38.615 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][514f368a-2650-4ce1-b97d-8bf75e032119] socks forwarding established\n2025-07-21 18:39:38.640 [info] [command][7b9cd19a-8022-4f97-8bbe-4fcaade84f1d] Process exited with code 0\n2025-07-21 18:39:38.641 [info] [command][7b9cd19a-8022-4f97-8bbe-4fcaade84f1d] Socket close event received\n2025-07-21 18:39:38.641 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][514f368a-2650-4ce1-b97d-8bf75e032119] socks connection closed\n2025-07-21 18:39:38.664 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53083 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:40:38.643 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:40:38.646 [info] [command][a9e47790-c6a2-4d11-bbfd-e9cd743749ab] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""a9e47790-c6a2-4d11-bbfd-e9cd743749ab""}\n2025-07-21 18:40:38.646 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][ff473f07-a39c-41aa-bb17-d907cdcabbbc] received connection request\n2025-07-21 18:40:38.647 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:40:38.672 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][ff473f07-a39c-41aa-bb17-d907cdcabbbc] socks forwarding established\n2025-07-21 18:40:38.700 [info] [command][a9e47790-c6a2-4d11-bbfd-e9cd743749ab] Process exited with code 0\n2025-07-21 18:40:38.700 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][ff473f07-a39c-41aa-bb17-d907cdcabbbc] socks connection closed\n2025-07-21 18:40:38.700 [info] [command][a9e47790-c6a2-4d11-bbfd-e9cd743749ab] Socket close event received\n2025-07-21 18:40:38.724 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53112 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:41:38.702 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:41:38.704 [info] [command][d0df61f8-01b0-479f-ba54-35472ea591c8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""d0df61f8-01b0-479f-ba54-35472ea591c8""}\n2025-07-21 18:41:38.705 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][6404e176-2147-4584-a8e2-3d27d53c7f4d] received connection request\n2025-07-21 18:41:38.705 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:41:38.730 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][6404e176-2147-4584-a8e2-3d27d53c7f4d] socks forwarding established\n2025-07-21 18:41:38.760 [info] [command][d0df61f8-01b0-479f-ba54-35472ea591c8] Process exited with code 0\n2025-07-21 18:41:38.760 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][6404e176-2147-4584-a8e2-3d27d53c7f4d] socks connection closed\n2025-07-21 18:41:38.761 [info] [command][d0df61f8-01b0-479f-ba54-35472ea591c8] Socket close event received\n2025-07-21 18:41:38.786 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53152 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:42:38.762 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:42:38.764 [info] [command][40b9e2f0-f91f-4200-9440-9fba164766ff] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""40b9e2f0-f91f-4200-9440-9fba164766ff""}\n2025-07-21 18:42:38.765 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][333ad1d1-7c27-40bf-aa03-37631b9bbcf6] received connection request\n2025-07-21 18:42:38.766 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:42:38.790 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][333ad1d1-7c27-40bf-aa03-37631b9bbcf6] socks forwarding established\n2025-07-21 18:42:38.816 [info] [command][40b9e2f0-f91f-4200-9440-9fba164766ff] Process exited with code 0\n2025-07-21 18:42:38.817 [info] [command][40b9e2f0-f91f-4200-9440-9fba164766ff] Socket close event received\n2025-07-21 18:42:38.817 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][333ad1d1-7c27-40bf-aa03-37631b9bbcf6] socks connection closed\n2025-07-21 18:42:38.841 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53172 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:43:38.819 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:43:38.820 [info] [command][dafb4dcf-b0f2-4e5c-a76d-c30e25f24a73] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""dafb4dcf-b0f2-4e5c-a76d-c30e25f24a73""}\n2025-07-21 18:43:38.820 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][92bb703d-2766-4c33-a21c-bc693da4d780] received connection request\n2025-07-21 18:43:38.821 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:43:38.845 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][92bb703d-2766-4c33-a21c-bc693da4d780] socks forwarding established\n2025-07-21 18:43:38.872 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][92bb703d-2766-4c33-a21c-bc693da4d780] socks connection closed\n2025-07-21 18:43:38.872 [info] [command][dafb4dcf-b0f2-4e5c-a76d-c30e25f24a73] Process exited with code 0\n2025-07-21 18:43:38.873 [info] [command][dafb4dcf-b0f2-4e5c-a76d-c30e25f24a73] Socket close event received\n2025-07-21 18:43:38.897 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53203 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:44:38.875 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:44:38.877 [info] [command][71dd3482-49e7-49da-846d-9766566eecf8] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""71dd3482-49e7-49da-846d-9766566eecf8""}\n2025-07-21 18:44:38.878 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][3fff2f14-3cb6-4e63-8414-1b5beb99a7a8] received connection request\n2025-07-21 18:44:38.878 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:44:38.905 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][3fff2f14-3cb6-4e63-8414-1b5beb99a7a8] socks forwarding established\n2025-07-21 18:44:38.931 [info] [command][71dd3482-49e7-49da-846d-9766566eecf8] Process exited with code 0\n2025-07-21 18:44:38.931 [info] [command][71dd3482-49e7-49da-846d-9766566eecf8] Socket close event received\n2025-07-21 18:44:38.932 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][3fff2f14-3cb6-4e63-8414-1b5beb99a7a8] socks connection closed\n2025-07-21 18:44:38.956 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53258 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:45:38.936 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:45:38.939 [info] [command][70185b83-a713-4562-a47d-8493090a4c45] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""70185b83-a713-4562-a47d-8493090a4c45""}\n2025-07-21 18:45:38.939 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][be3cbf05-ea69-4f0c-9e21-e08daf05486b] received connection request\n2025-07-21 18:45:38.940 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:45:38.965 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][be3cbf05-ea69-4f0c-9e21-e08daf05486b] socks forwarding established\n2025-07-21 18:45:38.991 [info] [command][70185b83-a713-4562-a47d-8493090a4c45] Process exited with code 0\n2025-07-21 18:45:38.991 [info] [command][70185b83-a713-4562-a47d-8493090a4c45] Socket close event received\n2025-07-21 18:45:38.992 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][be3cbf05-ea69-4f0c-9e21-e08daf05486b] socks connection closed\n2025-07-21 18:45:39.015 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53310 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:46:38.993 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:46:38.995 [info] [command][c2d599ab-14ef-4efa-8d86-c3cc69c12e2a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""c2d599ab-14ef-4efa-8d86-c3cc69c12e2a""}\n2025-07-21 18:46:38.995 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][90d741a1-c9c1-4bac-a2d5-2efa003afc23] received connection request\n2025-07-21 18:46:38.995 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:46:39.018 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][90d741a1-c9c1-4bac-a2d5-2efa003afc23] socks forwarding established\n2025-07-21 18:46:39.045 [info] [command][c2d599ab-14ef-4efa-8d86-c3cc69c12e2a] Process exited with code 0\n2025-07-21 18:46:39.045 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][90d741a1-c9c1-4bac-a2d5-2efa003afc23] socks connection closed\n2025-07-21 18:46:39.045 [info] [command][c2d599ab-14ef-4efa-8d86-c3cc69c12e2a] Socket close event received\n2025-07-21 18:46:39.070 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53349 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:47:39.050 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:47:39.053 [info] [command][e95c0b61-dfc8-4b66-b9af-7611c11cdbea] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""e95c0b61-dfc8-4b66-b9af-7611c11cdbea""}\n2025-07-21 18:47:39.054 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][0b2343e4-2ea5-47ce-a085-958c4fbefbed] received connection request\n2025-07-21 18:47:39.055 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:47:39.080 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][0b2343e4-2ea5-47ce-a085-958c4fbefbed] socks forwarding established\n2025-07-21 18:47:39.109 [info] [command][e95c0b61-dfc8-4b66-b9af-7611c11cdbea] Process exited with code 0\n2025-07-21 18:47:39.109 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][0b2343e4-2ea5-47ce-a085-958c4fbefbed] socks connection closed\n2025-07-21 18:47:39.109 [info] [command][e95c0b61-dfc8-4b66-b9af-7611c11cdbea] Socket close event received\n2025-07-21 18:47:39.133 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53370 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:48:39.112 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:48:39.114 [info] [command][b830a10d-5014-43f6-9c9d-fa77062bf689] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""b830a10d-5014-43f6-9c9d-fa77062bf689""}\n2025-07-21 18:48:39.115 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][41b5dc38-292d-45a4-8a27-0d31d019a910] received connection request\n2025-07-21 18:48:39.115 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:48:39.139 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][41b5dc38-292d-45a4-8a27-0d31d019a910] socks forwarding established\n2025-07-21 18:48:39.165 [info] [command][b830a10d-5014-43f6-9c9d-fa77062bf689] Process exited with code 0\n2025-07-21 18:48:39.166 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][41b5dc38-292d-45a4-8a27-0d31d019a910] socks connection closed\n2025-07-21 18:48:39.166 [info] [command][b830a10d-5014-43f6-9c9d-fa77062bf689] Socket close event received\n2025-07-21 18:48:39.190 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53398 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:49:39.171 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:49:39.173 [info] [command][bd3a5cb3-d363-4038-928d-934407ed3f3d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""bd3a5cb3-d363-4038-928d-934407ed3f3d""}\n2025-07-21 18:49:39.174 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][c9fdb521-9160-4b9a-ba27-ae2a2fdec75d] received connection request\n2025-07-21 18:49:39.174 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:49:39.198 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][c9fdb521-9160-4b9a-ba27-ae2a2fdec75d] socks forwarding established\n2025-07-21 18:49:39.225 [info] [command][bd3a5cb3-d363-4038-928d-934407ed3f3d] Process exited with code 0\n2025-07-21 18:49:39.225 [info] [command][bd3a5cb3-d363-4038-928d-934407ed3f3d] Socket close event received\n2025-07-21 18:49:39.226 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][c9fdb521-9160-4b9a-ba27-ae2a2fdec75d] socks connection closed\n2025-07-21 18:49:39.249 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53451 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:50:39.230 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:50:39.231 [info] [command][36f5f854-d1b7-4405-9038-c3ea7382aa14] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""36f5f854-d1b7-4405-9038-c3ea7382aa14""}\n2025-07-21 18:50:39.232 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][e8e6235f-f334-44f8-8297-75e74f9bc32f] received connection request\n2025-07-21 18:50:39.232 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:50:39.260 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][e8e6235f-f334-44f8-8297-75e74f9bc32f] socks forwarding established\n2025-07-21 18:50:39.289 [info] [command][36f5f854-d1b7-4405-9038-c3ea7382aa14] Process exited with code 0\n2025-07-21 18:50:39.289 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][e8e6235f-f334-44f8-8297-75e74f9bc32f] socks connection closed\n2025-07-21 18:50:39.289 [info] [command][36f5f854-d1b7-4405-9038-c3ea7382aa14] Socket close event received\n2025-07-21 18:50:39.313 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53476 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:51:39.291 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:51:39.294 [info] [command][485b464a-fe99-45f6-b41c-26a5ddec739e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""485b464a-fe99-45f6-b41c-26a5ddec739e""}\n2025-07-21 18:51:39.295 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][382237df-93bc-443b-a282-4f623c1fdeb2] received connection request\n2025-07-21 18:51:39.297 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:51:39.325 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][382237df-93bc-443b-a282-4f623c1fdeb2] socks forwarding established\n2025-07-21 18:51:39.352 [info] [command][485b464a-fe99-45f6-b41c-26a5ddec739e] Process exited with code 0\n2025-07-21 18:51:39.353 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][382237df-93bc-443b-a282-4f623c1fdeb2] socks connection closed\n2025-07-21 18:51:39.353 [info] [command][485b464a-fe99-45f6-b41c-26a5ddec739e] Socket close event received\n2025-07-21 18:51:39.376 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53520 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:52:39.357 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:52:39.360 [info] [command][7ff50ee7-a184-4001-8ce6-fd8db3a300b9] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""7ff50ee7-a184-4001-8ce6-fd8db3a300b9""}\n2025-07-21 18:52:39.360 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][f6a0bec5-f4b9-404a-996e-253a94dd5ec9] received connection request\n2025-07-21 18:52:39.361 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:52:39.389 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][f6a0bec5-f4b9-404a-996e-253a94dd5ec9] socks forwarding established\n2025-07-21 18:52:39.416 [info] [command][7ff50ee7-a184-4001-8ce6-fd8db3a300b9] Process exited with code 0\n2025-07-21 18:52:39.416 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][f6a0bec5-f4b9-404a-996e-253a94dd5ec9] socks connection closed\n2025-07-21 18:52:39.416 [info] [command][7ff50ee7-a184-4001-8ce6-fd8db3a300b9] Socket close event received\n2025-07-21 18:52:39.439 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53565 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:53:39.418 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:53:39.420 [info] [command][6dbb6950-f7f7-4598-ba8c-69d04e60d2ff] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""6dbb6950-f7f7-4598-ba8c-69d04e60d2ff""}\n2025-07-21 18:53:39.421 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][52470111-c3c5-41fb-9f9b-26b23d7926a9] received connection request\n2025-07-21 18:53:39.421 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:53:39.447 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][52470111-c3c5-41fb-9f9b-26b23d7926a9] socks forwarding established\n2025-07-21 18:53:39.476 [info] [command][6dbb6950-f7f7-4598-ba8c-69d04e60d2ff] Process exited with code 0\n2025-07-21 18:53:39.476 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][52470111-c3c5-41fb-9f9b-26b23d7926a9] socks connection closed\n2025-07-21 18:53:39.476 [info] [command][6dbb6950-f7f7-4598-ba8c-69d04e60d2ff] Socket close event received\n2025-07-21 18:53:39.501 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53588 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:54:39.481 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:54:39.484 [info] [command][5d8a8330-c6f5-4cc7-bdf5-6bd49197b655] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""5d8a8330-c6f5-4cc7-bdf5-6bd49197b655""}\n2025-07-21 18:54:39.485 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][f53c4419-c432-46ad-9df4-3fe8635361b7] received connection request\n2025-07-21 18:54:39.486 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:54:39.514 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][f53c4419-c432-46ad-9df4-3fe8635361b7] socks forwarding established\n2025-07-21 18:54:39.542 [info] [command][5d8a8330-c6f5-4cc7-bdf5-6bd49197b655] Process exited with code 0\n2025-07-21 18:54:39.543 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][f53c4419-c432-46ad-9df4-3fe8635361b7] socks connection closed\n2025-07-21 18:54:39.543 [info] [command][5d8a8330-c6f5-4cc7-bdf5-6bd49197b655] Socket close event received\n2025-07-21 18:54:39.568 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53639 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:55:39.547 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:55:39.549 [info] [command][177b9a74-f2a6-4174-b6dd-264871832962] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""177b9a74-f2a6-4174-b6dd-264871832962""}\n2025-07-21 18:55:39.549 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][e7e2ba37-e676-4b13-8d4c-a25fd2207582] received connection request\n2025-07-21 18:55:39.550 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:55:39.573 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][e7e2ba37-e676-4b13-8d4c-a25fd2207582] socks forwarding established\n2025-07-21 18:55:39.600 [info] [command][177b9a74-f2a6-4174-b6dd-264871832962] Process exited with code 0\n2025-07-21 18:55:39.600 [info] [command][177b9a74-f2a6-4174-b6dd-264871832962] Socket close event received\n2025-07-21 18:55:39.601 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][e7e2ba37-e676-4b13-8d4c-a25fd2207582] socks connection closed\n2025-07-21 18:55:39.624 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53689 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:56:39.606 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:56:39.607 [info] [command][42acc039-f772-4d7e-a7e3-23bc0096be54] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""42acc039-f772-4d7e-a7e3-23bc0096be54""}\n2025-07-21 18:56:39.608 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][4bc9c563-4949-4b80-b106-de8ece1f3315] received connection request\n2025-07-21 18:56:39.608 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:56:39.633 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][4bc9c563-4949-4b80-b106-de8ece1f3315] socks forwarding established\n2025-07-21 18:56:39.662 [info] [command][42acc039-f772-4d7e-a7e3-23bc0096be54] Process exited with code 0\n2025-07-21 18:56:39.662 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][4bc9c563-4949-4b80-b106-de8ece1f3315] socks connection closed\n2025-07-21 18:56:39.662 [info] [command][42acc039-f772-4d7e-a7e3-23bc0096be54] Socket close event received\n2025-07-21 18:56:39.687 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53746 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:57:39.667 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:57:39.670 [info] [command][de835a75-9ffe-4f9b-a01f-a2b8c1fa812c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""de835a75-9ffe-4f9b-a01f-a2b8c1fa812c""}\n2025-07-21 18:57:39.670 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][91572d76-5f8d-4465-8588-9a5a8407a914] received connection request\n2025-07-21 18:57:39.671 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:57:39.695 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][91572d76-5f8d-4465-8588-9a5a8407a914] socks forwarding established\n2025-07-21 18:57:39.721 [info] [command][de835a75-9ffe-4f9b-a01f-a2b8c1fa812c] Process exited with code 0\n2025-07-21 18:57:39.722 [info] [command][de835a75-9ffe-4f9b-a01f-a2b8c1fa812c] Socket close event received\n2025-07-21 18:57:39.722 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][91572d76-5f8d-4465-8588-9a5a8407a914] socks connection closed\n2025-07-21 18:57:39.747 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53780 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:58:39.725 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:58:39.728 [info] [command][ebc9b897-db2b-4fc9-be2c-6352abf68b86] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""ebc9b897-db2b-4fc9-be2c-6352abf68b86""}\n2025-07-21 18:58:39.728 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][524c67d9-c6d3-4aa5-b882-ae7931a203c5] received connection request\n2025-07-21 18:58:39.729 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:58:39.754 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][524c67d9-c6d3-4aa5-b882-ae7931a203c5] socks forwarding established\n2025-07-21 18:58:39.783 [info] [command][ebc9b897-db2b-4fc9-be2c-6352abf68b86] Process exited with code 0\n2025-07-21 18:58:39.783 [info] [command][ebc9b897-db2b-4fc9-be2c-6352abf68b86] Socket close event received\n2025-07-21 18:58:39.784 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][524c67d9-c6d3-4aa5-b882-ae7931a203c5] socks connection closed\n2025-07-21 18:58:39.808 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53805 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 18:59:39.786 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 18:59:39.788 [info] [command][38445a8e-5d39-484c-9f01-2aa788e850a4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""38445a8e-5d39-484c-9f01-2aa788e850a4""}\n2025-07-21 18:59:39.789 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][db8b92c3-23da-4de4-b65d-88f96a2d2295] received connection request\n2025-07-21 18:59:39.790 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 18:59:39.814 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][db8b92c3-23da-4de4-b65d-88f96a2d2295] socks forwarding established\n2025-07-21 18:59:39.844 [info] [command][38445a8e-5d39-484c-9f01-2aa788e850a4] Process exited with code 0\n2025-07-21 18:59:39.844 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][db8b92c3-23da-4de4-b65d-88f96a2d2295] socks connection closed\n2025-07-21 18:59:39.845 [info] [command][38445a8e-5d39-484c-9f01-2aa788e850a4] Socket close event received\n2025-07-21 18:59:39.869 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53859 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:00:39.848 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:00:39.849 [info] [command][f3792944-32c0-4575-a3a8-28f037d38417] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""f3792944-32c0-4575-a3a8-28f037d38417""}\n2025-07-21 19:00:39.849 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][3eb2dee4-5563-40f7-a2d4-0352b10ffcaa] received connection request\n2025-07-21 19:00:39.849 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:00:39.872 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][3eb2dee4-5563-40f7-a2d4-0352b10ffcaa] socks forwarding established\n2025-07-21 19:00:39.898 [info] [command][f3792944-32c0-4575-a3a8-28f037d38417] Process exited with code 0\n2025-07-21 19:00:39.898 [info] [command][f3792944-32c0-4575-a3a8-28f037d38417] Socket close event received\n2025-07-21 19:00:39.899 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][3eb2dee4-5563-40f7-a2d4-0352b10ffcaa] socks connection closed\n2025-07-21 19:00:39.921 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53883 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:01:39.904 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:01:39.906 [info] [command][14f7ab07-1e4e-4558-a7fb-fbf138c83a78] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""14f7ab07-1e4e-4558-a7fb-fbf138c83a78""}\n2025-07-21 19:01:39.907 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][06da131f-80b9-48f2-8954-c3e012e8b05e] received connection request\n2025-07-21 19:01:39.907 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:01:39.931 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][06da131f-80b9-48f2-8954-c3e012e8b05e] socks forwarding established\n2025-07-21 19:01:39.958 [info] [command][14f7ab07-1e4e-4558-a7fb-fbf138c83a78] Process exited with code 0\n2025-07-21 19:01:39.958 [info] [command][14f7ab07-1e4e-4558-a7fb-fbf138c83a78] Socket close event received\n2025-07-21 19:01:39.959 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][06da131f-80b9-48f2-8954-c3e012e8b05e] socks connection closed\n2025-07-21 19:01:39.982 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53928 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:02:39.963 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:02:39.965 [info] [command][75589afd-c9e7-4e6d-9121-8364dec620fa] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""75589afd-c9e7-4e6d-9121-8364dec620fa""}\n2025-07-21 19:02:39.966 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][265ca550-d861-49f4-bd41-1f48c15cc440] received connection request\n2025-07-21 19:02:39.967 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:02:39.991 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][265ca550-d861-49f4-bd41-1f48c15cc440] socks forwarding established\n2025-07-21 19:02:40.020 [info] [command][75589afd-c9e7-4e6d-9121-8364dec620fa] Process exited with code 0\n2025-07-21 19:02:40.021 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][265ca550-d861-49f4-bd41-1f48c15cc440] socks connection closed\n2025-07-21 19:02:40.021 [info] [command][75589afd-c9e7-4e6d-9121-8364dec620fa] Socket close event received\n2025-07-21 19:02:40.044 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53951 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:03:40.026 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:03:40.028 [info] [command][b6a36eb4-1af1-4165-9176-cc1f6cf1d29b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""b6a36eb4-1af1-4165-9176-cc1f6cf1d29b""}\n2025-07-21 19:03:40.029 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][d9abf4a2-3c53-46e8-b0f5-ca16be89f368] received connection request\n2025-07-21 19:03:40.029 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:03:40.054 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][d9abf4a2-3c53-46e8-b0f5-ca16be89f368] socks forwarding established\n2025-07-21 19:03:40.080 [info] [command][b6a36eb4-1af1-4165-9176-cc1f6cf1d29b] Process exited with code 0\n2025-07-21 19:03:40.080 [info] [command][b6a36eb4-1af1-4165-9176-cc1f6cf1d29b] Socket close event received\n2025-07-21 19:03:40.081 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][d9abf4a2-3c53-46e8-b0f5-ca16be89f368] socks connection closed\n2025-07-21 19:03:40.105 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 53974 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:04:40.086 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:04:40.089 [info] [command][736b53ea-60f9-4229-90ef-ee23df272589] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""736b53ea-60f9-4229-90ef-ee23df272589""}\n2025-07-21 19:04:40.090 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][f2006758-535f-4a0f-8af8-f0091a0fb3ed] received connection request\n2025-07-21 19:04:40.091 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:04:40.113 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][f2006758-535f-4a0f-8af8-f0091a0fb3ed] socks forwarding established\n2025-07-21 19:04:40.140 [info] [command][736b53ea-60f9-4229-90ef-ee23df272589] Process exited with code 0\n2025-07-21 19:04:40.141 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][f2006758-535f-4a0f-8af8-f0091a0fb3ed] socks connection closed\n2025-07-21 19:04:40.141 [info] [command][736b53ea-60f9-4229-90ef-ee23df272589] Socket close event received\n2025-07-21 19:04:40.164 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 54027 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:05:40.145 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:05:40.148 [info] [command][0fdb1cab-659a-4e79-9d0b-d99eb982d5c1] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""0fdb1cab-659a-4e79-9d0b-d99eb982d5c1""}\n2025-07-21 19:05:40.148 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][13651fcf-b221-4778-bdb6-bfbfbc17c3ee] received connection request\n2025-07-21 19:05:40.149 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:05:40.173 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][13651fcf-b221-4778-bdb6-bfbfbc17c3ee] socks forwarding established\n2025-07-21 19:05:40.202 [info] [command][0fdb1cab-659a-4e79-9d0b-d99eb982d5c1] Process exited with code 0\n2025-07-21 19:05:40.203 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][13651fcf-b221-4778-bdb6-bfbfbc17c3ee] socks connection closed\n2025-07-21 19:05:40.203 [info] [command][0fdb1cab-659a-4e79-9d0b-d99eb982d5c1] Socket close event received\n2025-07-21 19:05:40.227 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 54050 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:06:40.208 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:06:40.210 [info] [command][c3fcce5b-628c-49d9-af71-dfef8aabc381] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""c3fcce5b-628c-49d9-af71-dfef8aabc381""}\n2025-07-21 19:06:40.211 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][d8bd82d4-6e7d-4ea0-add9-ef58f61cfc40] received connection request\n2025-07-21 19:06:40.211 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:06:40.239 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][d8bd82d4-6e7d-4ea0-add9-ef58f61cfc40] socks forwarding established\n2025-07-21 19:06:40.265 [info] [command][c3fcce5b-628c-49d9-af71-dfef8aabc381] Process exited with code 0\n2025-07-21 19:06:40.265 [info] [command][c3fcce5b-628c-49d9-af71-dfef8aabc381] Socket close event received\n2025-07-21 19:06:40.266 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][d8bd82d4-6e7d-4ea0-add9-ef58f61cfc40] socks connection closed\n2025-07-21 19:06:40.288 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 54091 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:07:40.271 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:07:40.272 [info] [command][fadf5015-353f-4c61-9c61-1c9bb6134252] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""fadf5015-353f-4c61-9c61-1c9bb6134252""}\n2025-07-21 19:07:40.273 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][eb950c70-a18b-4173-8cc6-4648fdb73378] received connection request\n2025-07-21 19:07:40.273 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:07:40.298 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][eb950c70-a18b-4173-8cc6-4648fdb73378] socks forwarding established\n2025-07-21 19:07:40.327 [info] [command][fadf5015-353f-4c61-9c61-1c9bb6134252] Process exited with code 0\n2025-07-21 19:07:40.328 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][eb950c70-a18b-4173-8cc6-4648fdb73378] socks connection closed\n2025-07-21 19:07:40.328 [info] [command][fadf5015-353f-4c61-9c61-1c9bb6134252] Socket close event received\n2025-07-21 19:07:40.353 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 54119 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:08:40.328 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:08:40.330 [info] [command][40205226-9b5f-4f66-8f71-0504bd875095] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""40205226-9b5f-4f66-8f71-0504bd875095""}\n2025-07-21 19:08:40.331 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][c5fd51b2-8936-4059-bac5-2d9ea1e227f7] received connection request\n2025-07-21 19:08:40.331 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:08:40.358 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][c5fd51b2-8936-4059-bac5-2d9ea1e227f7] socks forwarding established\n2025-07-21 19:08:40.388 [info] [command][40205226-9b5f-4f66-8f71-0504bd875095] Process exited with code 0\n2025-07-21 19:08:40.388 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][c5fd51b2-8936-4059-bac5-2d9ea1e227f7] socks connection closed\n2025-07-21 19:08:40.388 [info] [command][40205226-9b5f-4f66-8f71-0504bd875095] Socket close event received\n2025-07-21 19:08:40.412 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 54149 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:09:40.394 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:09:40.395 [info] [command][a4733820-5ce2-475d-bf88-599840d2e873] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""a4733820-5ce2-475d-bf88-599840d2e873""}\n2025-07-21 19:09:40.396 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][1cf8f806-7c4a-4d95-889b-86847a862b7f] received connection request\n2025-07-21 19:09:40.396 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:09:40.423 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][1cf8f806-7c4a-4d95-889b-86847a862b7f] socks forwarding established\n2025-07-21 19:09:40.450 [info] [command][a4733820-5ce2-475d-bf88-599840d2e873] Process exited with code 0\n2025-07-21 19:09:40.450 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][1cf8f806-7c4a-4d95-889b-86847a862b7f] socks connection closed\n2025-07-21 19:09:40.450 [info] [command][a4733820-5ce2-475d-bf88-599840d2e873] Socket close event received\n2025-07-21 19:09:40.473 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 54203 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:10:40.455 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:10:40.459 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:38723][989fe616-5467-462d-85a4-fb64b5f5568b] received connection request\n2025-07-21 19:10:40.459 [info] [command][1dcb83e9-a7d9-4ba9-82c5-934049edf46b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""1dcb83e9-a7d9-4ba9-82c5-934049edf46b""}\n2025-07-21 19:10:40.460 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:10:40.484 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][989fe616-5467-462d-85a4-fb64b5f5568b] socks forwarding established\n2025-07-21 19:10:40.513 [info] [command][1dcb83e9-a7d9-4ba9-82c5-934049edf46b] Process exited with code 0\n2025-07-21 19:10:40.513 [info] [forwarding][multiplex][127.0.0.1:51859 -> 127.0.0.1:51850 -> 127.0.0.1:38723][989fe616-5467-462d-85a4-fb64b5f5568b] socks connection closed\n2025-07-21 19:10:40.513 [info] [command][1dcb83e9-a7d9-4ba9-82c5-934049edf46b] Socket close event received\n2025-07-21 19:10:40.539 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 38723, connect from 127.0.0.1 port 54231 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:11:26.808 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #2)\n2025-07-21 19:11:26.808 [info] Received re-connection request; checking to see if existing connection is still valid\n2025-07-21 19:11:26.848 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:34715][271640d9-11e7-4d80-b6a9-de2c93b90aea] received connection request\n2025-07-21 19:11:26.875 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:11:26.944 [info] (ssh_tunnel) stderr: debug1: channel 5: free: direct-tcpip: listening port 51850 for 127.0.0.1 port 34715, connect from 127.0.0.1 port 51870 to 127.0.0.1 port 51850, nchannels 6\n\n2025-07-21 19:11:26.944 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:51850 -> 127.0.0.1:34715][d2130de6-f45b-4602-a7fa-a9d9c97b22d3] socks connection closed\n2025-07-21 19:11:29.822 [error] Unexpected error while checking if existing connection is still valid Timeout while checking if existing connection is still valid\n2025-07-21 19:11:29.822 [error] Failed to connect to Cursor server at http://127.0.0.1:51858, attempt 1 of 3 This operation was aborted\n2025-07-21 19:11:29.825 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:34715][e7ee0db0-e158-4154-956e-d976ebfb2aa1] received connection request\n2025-07-21 19:11:29.826 [info] (ssh_tunnel) stderr: debug1: Connection to port 51850 forwarding to socks port 0 requested.\ndebug1: channel 5: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:11:29.932 [info] Terminating existing SSH process with pid: 91396\n2025-07-21 19:11:29.932 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-21 19:11:29.933 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 19:11:29.946 [info] (ssh_tunnel): exit: code=null signal=SIGKILL\n2025-07-21 19:11:29.947 [error] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:51850 -> 127.0.0.1:34715][271640d9-11e7-4d80-b6a9-de2c93b90aea] error while creating socks forwarding Socket closed\n2025-07-21 19:11:29.947 [error] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:51850 -> 127.0.0.1:34715][e7ee0db0-e158-4154-956e-d976ebfb2aa1] error while creating socks forwarding Socket closed\n2025-07-21 19:11:29.947 [info] [forwarding][code][127.0.0.1:51858 -> 127.0.0.1:51850 -> 127.0.0.1:34715][212940ee-8b89-4d7f-be42-7b17f458dcbf] socks connection closed\n2025-07-21 19:11:29.950 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_76170.sh"" | ssh -v -T -D 54279 login.haicore.berlin bash --login -c bash\n2025-07-21 19:11:29.950 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:11:29.950 [info] Waiting for server to install via process(93153)...\n2025-07-21 19:11:29.956 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-21 19:11:29.956 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:11:29.956 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 19:11:29.956 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 19:11:29.956 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:11:29.957 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 19:11:29.958 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 19:11:29.958 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:11:29.958 [info] Retrying connection in 5 seconds...\n2025-07-21 19:11:30.831 [error] Failed to connect to Cursor server at http://127.0.0.1:51858, attempt 2 of 3 This operation was aborted\n2025-07-21 19:11:31.836 [error] Failed to connect to Cursor server at http://127.0.0.1:51858, attempt 3 of 3 This operation was aborted\n2025-07-21 19:11:31.836 [error] Could not re-use existing SOCKS connection; attempting to re-establish SOCKS forwarding Failed to connect to Cursor code server. Ensure that your remote host ssh config has 'AllowTcpForwarding yes' in '/etc/ssh/sshd_config'. Please check the logs and try reinstalling the server.\n2025-07-21 19:11:31.836 [error] Could not re-establish SOCKS forwarding; re-establishing entire SSH connection Remote server is not set\n2025-07-21 19:15:33.015 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:51859\n2025-07-21 19:15:33.016 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_76170.sh\n2025-07-21 19:15:33.022 [error] [forwarding][multiplex][127.0.0.1:51859 -> unknown}][4f18d06c-e5ea-4eeb-ab90-9c9963f8e8d7] remote server not configured\n2025-07-21 19:15:33.023 [info] [command][d93cf5e1-bbb9-4b93-83fe-5a43aefda107] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""b5322cab-ea7d-4ba2-9953-741fbc1d862f"",""id"":""d93cf5e1-bbb9-4b93-83fe-5a43aefda107""}\n2025-07-21 19:15:33.024 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 19:15:33.024 [error] [command][d93cf5e1-bbb9-4b93-83fe-5a43aefda107] Socket error: Error: read ECONNRESET\n2025-07-21 19:15:33.024 [info] [command][d93cf5e1-bbb9-4b93-83fe-5a43aefda107] Socket close event received\n2025-07-21 19:15:33.032 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_2845.sh"" | ssh -v -T -D 54283 login.haicore.berlin bash --login -c bash\n2025-07-21 19:15:33.032 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:15:33.032 [info] Waiting for server to install via process(93159)...\n2025-07-21 19:15:33.055 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-21 19:15:33.055 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:15:33.055 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 19:15:33.055 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 19:15:33.056 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:15:33.058 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 19:15:33.059 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 19:15:33.060 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:15:33.060 [info] Retrying connection in 5 seconds...\n2025-07-21 19:15:38.061 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_2845.sh\n2025-07-21 19:15:38.062 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 19:15:38.065 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_19206.sh"" | ssh -v -T -D 54285 login.haicore.berlin bash --login -c bash\n2025-07-21 19:15:38.065 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:15:38.065 [info] Waiting for server to install via process(93168)...\n2025-07-21 19:15:38.079 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-21 19:15:38.079 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:15:38.079 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 19:15:38.079 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 19:15:38.079 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:15:38.081 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 19:15:38.082 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 19:15:38.082 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:15:38.082 [info] Retrying connection in 5 seconds...\n2025-07-21 19:15:43.083 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_19206.sh\n2025-07-21 19:15:43.084 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 19:15:43.088 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_29809.sh"" | ssh -v -T -D 54288 login.haicore.berlin bash --login -c bash\n2025-07-21 19:15:43.088 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:15:43.089 [info] Waiting for server to install via process(93176)...\n2025-07-21 19:15:43.124 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-21 19:15:43.125 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:15:43.126 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\ndebug1: Reading configuration data /etc/ssh/crypto.conf\ndebug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:15:43.132 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 19:15:43.135 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 19:15:43.135 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:15:43.135 [info] Retrying connection in 5 seconds...\n2025-07-21 19:15:48.144 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_29809.sh\n2025-07-21 19:15:48.145 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 19:15:48.149 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_8833.sh"" | ssh -v -T -D 54290 login.haicore.berlin bash --login -c bash\n2025-07-21 19:15:48.149 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:15:48.149 [info] Waiting for server to install via process(93183)...\n2025-07-21 19:15:48.191 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\n\n2025-07-21 19:15:48.191 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:15:48.192 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\ndebug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 19:15:48.192 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:15:48.193 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 19:15:48.194 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 19:15:48.194 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:15:48.194 [info] Retrying connection in 5 seconds...\n2025-07-21 19:15:53.208 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_8833.sh\n2025-07-21 19:15:53.218 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 19:15:53.267 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_29959.sh"" | ssh -v -T -D 54294 login.haicore.berlin bash --login -c bash\n2025-07-21 19:15:53.268 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:15:53.268 [info] Waiting for server to install via process(93196)...\n2025-07-21 19:15:53.384 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-21 19:15:53.385 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:15:53.388 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\ndebug1: Reading configuration data /etc/ssh/crypto.conf\ndebug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:15:53.409 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 19:15:53.421 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 19:15:53.422 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:15:53.422 [info] Retrying connection in 5 seconds...\n2025-07-21 19:16:15.162 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_29959.sh\n2025-07-21 19:16:15.164 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 19:16:15.177 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_2702.sh"" | ssh -v -T -D 54296 login.haicore.berlin bash --login -c bash\n2025-07-21 19:16:15.178 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:16:15.178 [info] Waiting for server to install via process(93209)...\n2025-07-21 19:16:15.216 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-21 19:16:15.216 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:16:15.216 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 19:16:15.216 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 19:16:15.216 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:16:15.217 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 19:16:15.218 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 19:16:15.218 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:16:15.218 [info] Retrying connection in 5 seconds...\n2025-07-21 19:16:20.226 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_2702.sh\n2025-07-21 19:16:20.227 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 19:16:20.231 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_41306.sh"" | ssh -v -T -D 54298 login.haicore.berlin bash --login -c bash\n2025-07-21 19:16:20.231 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:16:20.231 [info] Waiting for server to install via process(93218)...\n2025-07-21 19:16:20.242 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-21 19:16:20.242 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:16:20.242 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 19:16:20.242 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 19:16:20.243 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:16:20.244 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 19:16:20.245 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 19:16:20.245 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:16:20.245 [info] Retrying connection in 5 seconds...\n2025-07-21 19:16:25.254 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_41306.sh\n2025-07-21 19:16:25.255 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 19:16:25.260 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_4792.sh"" | ssh -v -T -D 54300 login.haicore.berlin bash --login -c bash\n2025-07-21 19:16:25.260 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:16:25.261 [info] Waiting for server to install via process(93228)...\n2025-07-21 19:16:25.275 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-21 19:16:25.275 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:16:25.276 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 19:16:25.276 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 19:16:25.276 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:16:25.278 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 19:16:25.279 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 19:16:25.279 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:16:25.279 [info] Retrying connection in 5 seconds...\n2025-07-21 19:16:30.288 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_4792.sh\n2025-07-21 19:16:30.289 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-EBRMZl/socket.sock\n2025-07-21 19:16:30.290 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_20924.sh"" | ssh -v -T -D 54303 login.haicore.berlin bash --login -c bash\n2025-07-21 19:16:30.290 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:16:30.290 [info] Waiting for server to install via process(93236)...\n2025-07-21 19:16:30.297 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-21 19:16:30.297 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:16:30.297 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 19:16:30.297 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 19:16:30.297 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:16:30.298 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 19:16:30.299 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 19:16:30.299 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:16:30.299 [error] Failed to connect after 10 attempts: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:16:30.299 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_20924.sh\n2025-07-21 19:16:30.299 [error] Error resolving SSH authority Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 19:29:27.687 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #1)\n2025-07-21 19:29:27.703 [info] SSH askpass server listening on /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-tIR4fK/socket.sock\n2025-07-21 19:29:27.704 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-21 19:29:27.706 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-tIR4fK/socket.sock\n2025-07-21 19:29:27.708 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_82390.sh"" | ssh -v -T -D 55024 login.haicore.berlin bash --login -c bash\n2025-07-21 19:29:27.708 [info] Started installation script. Waiting for it to finish...\n2025-07-21 19:29:27.708 [info] Waiting for server to install via process(93437)...\n2025-07-21 19:29:27.714 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-21 19:29:27.714 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-21 19:29:27.714 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\n\n2025-07-21 19:29:27.714 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 19:29:27.714 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 19:29:27.715 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 19:29:27.715 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\n\n2025-07-21 19:29:27.715 [info] (ssh_tunnel) stderr: debug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 19:29:27.926 [info] (ssh_tunnel) stderr: debug1: Connection established.\n\n2025-07-21 19:29:27.927 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519 type 3\ndebug1: identity file /Users/franzsrambical/.ssh/id_ed25519-cert type -1\n\n2025-07-21 19:29:27.927 [info] (ssh_tunnel) stderr: debug1: Local version string SSH-2.0-OpenSSH_9.9\n\n2025-07-21 19:29:28.064 [info] (ssh_tunnel) stderr: debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7\ndebug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000\ndebug1: Authenticating to login.haicore.berlin:22 as 'franz.srambical'\n\n2025-07-21 19:29:28.065 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-21 19:29:28.065 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: SSH2_MSG_KEXINIT sent\n\n2025-07-21 19:29:28.170 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT received\ndebug1: kex: algorithm: ecdh-sha2-nistp256\ndebug1: kex: host key algorithm: ssh-ed25519\ndebug1: kex: server->client cipher: aes128-gcm@openssh.com MAC: compression: none\ndebug1: kex: client->server cipher: aes128-gcm@openssh.com MAC: compression: none\n\n2025-07-21 19:29:28.171 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_KEX_ECDH_REPLY\n\n2025-07-21 19:29:28.289 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEX_ECDH_REPLY received\ndebug1: Server host key: ssh-ed25519 SHA256:3/BGZ1UNXR9SufKdsZVtx4Yd+kZTnZzSvRH0l6rtbvo\n\n2025-07-21 19:29:28.290 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-21 19:29:28.290 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: Host 'login.haicore.berlin' is known and matches the ED25519 host key.\ndebug1: Found key in /Users/franzsrambical/.ssh/known_hosts:17\n\n2025-07-21 19:29:28.293 [info] (ssh_tunnel) stderr: debug1: ssh_packet_send2_wrapped: resetting send seqnr 3\ndebug1: rekey out after 4294967296 blocks\ndebug1: SSH2_MSG_NEWKEYS sent\ndebug1: expecting SSH2_MSG_NEWKEYS\n\n2025-07-21 19:29:28.293 [info] (ssh_tunnel) stderr: debug1: ssh_packet_read_poll2: resetting read seqnr 3\ndebug1: SSH2_MSG_NEWKEYS received\ndebug1: rekey in after 4294967296 blocks\n\n2025-07-21 19:29:28.293 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_EXT_INFO received\ndebug1: kex_ext_info_client_parse: server-sig-algs=\n\n2025-07-21 19:29:28.552 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_SERVICE_ACCEPT received\n\n2025-07-21 19:29:28.655 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: publickey\n\n2025-07-21 19:29:28.660 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: bound agent to hostkey\n\n2025-07-21 19:29:28.660 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: ssh_fetch_identitylist: agent contains no identities\ndebug1: Will attempt key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\ndebug1: Offering public key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-21 19:29:28.788 [info] (ssh_tunnel) stderr: debug1: Server accepts key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-21 19:29:28.911 [info] (ssh_tunnel) stderr: Authenticated to login.haicore.berlin ([141.80.150.4]:22) using ""publickey"".\ndebug1: Local connections to LOCALHOST:55024 forwarded to remote address socks:0\n\n2025-07-21 19:29:28.911 [info] (ssh_tunnel) stderr: debug1: Local forwarding listening on ::1 port 55024.\ndebug1: channel 0: new port-listener [port listener] (inactive timeout: 0)\ndebug1: Local forwarding listening on 127.0.0.1 port 55024.\ndebug1: channel 1: new port-listener [port listener] (inactive timeout: 0)\n\n2025-07-21 19:29:28.911 [info] (ssh_tunnel) stderr: debug1: channel 2: new session [client-session] (inactive timeout: 0)\ndebug1: Requesting no-more-sessions@openssh.com\n\n2025-07-21 19:29:28.911 [info] (ssh_tunnel) stderr: debug1: Entering interactive session.\ndebug1: pledge: filesystem\n\n2025-07-21 19:29:29.091 [info] (ssh_tunnel) stderr: debug1: client_input_global_request: rtype hostkeys-00@openssh.com want_reply 0\n\n2025-07-21 19:29:29.092 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts for login.haicore.berlin / (none)\n\n2025-07-21 19:29:29.095 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts2 for login.haicore.berlin / (none)\ndebug1: client_input_hostkeys: hostkeys file /Users/franzsrambical/.ssh/known_hosts2 does not exist\ndebug1: client_input_hostkeys: no new or deprecated keys from server\ndebug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\n\n2025-07-21 19:29:29.194 [info] (ssh_tunnel) stderr: debug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\ndebug1: Sending environment.\ndebug1: Sending command: bash --login -c bash\ndebug1: pledge: network\n\n2025-07-21 19:29:29.661 [info] (ssh_tunnel) stdout: Using TMP_DIR: /run/user/961800067\n\n2025-07-21 19:29:29.700 [info] (ssh_tunnel) stdout: Locking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-21 19:29:29.711 [info] (ssh_tunnel) stdout: Server script already installed in /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server\nChecking node executable\n\n2025-07-21 19:29:29.715 [info] (ssh_tunnel) stdout: v20.18.2\n\n2025-07-21 19:29:29.718 [info] (ssh_tunnel) stdout: Checking for running multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-21 19:29:29.736 [info] (ssh_tunnel) stdout: Running multiplex server: \n\n2025-07-21 19:29:29.740 [info] (ssh_tunnel) stdout: Creating multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-21 19:29:29.740 [info] (ssh_tunnel) stdout: Creating directory for multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server\n\n2025-07-21 19:29:29.747 [info] (ssh_tunnel) stdout: Writing multiplex server script to /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-21 19:29:29.766 [info] (ssh_tunnel) stdout: Starting multiplex server: /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js 07bc5132-c661-4376-9ce0-90133aaa97fa\nMultiplex server started with PID 3568075 and wrote pid to file /run/user/961800067/cursor-remote-multiplex.pid.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nReading multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nMultiplex server token file found\nReading multiplex server log file /run/user/961800067/cursor-remote-multiplex.log.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-21 19:29:30.279 [info] (ssh_tunnel) stdout: Checking for code servers\n\n2025-07-21 19:29:30.318 [info] (ssh_tunnel) stdout: Code server script is not running\nCreating code server token file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0\nStarting code server script /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server --start-server --host=127.0.0.1 --port 0 --connection-token-file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0 --telemetry-level off --enable-remote-auto-shutdown --accept-server-license-terms &> /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0 &\nCode server started with PID 3568099 and wrote pid to file /run/user/961800067/cursor-remote-code.pid.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-21 19:29:30.323 [info] (ssh_tunnel) stdout: Code server log file is /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-21 19:29:30.806 [info] (ssh_tunnel) stdout: 83d89d581064c1883974e752: start\nexitCode==0==\nnodeExecutable==/home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node==\nerrorMessage====\nisFatalError==false==\nmultiplexListeningOn==42953==\nmultiplexConnectionToken==07bc5132-c661-4376-9ce0-90133aaa97fa==\ncodeListeningOn==33437==\ncodeConnectionToken==c9e111c6-ce55-491e-bda3-4c28c96cbecd==\ndetectedPlatform==linux==\narch==x64==\nSSH_AUTH_SOCK====\n83d89d581064c1883974e752: end\n\n2025-07-21 19:29:30.808 [info] Server install command exit code: 0\n2025-07-21 19:29:30.808 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_82390.sh\n2025-07-21 19:29:30.810 [info] [forwarding][code] creating new forwarding server\n2025-07-21 19:29:30.810 [info] [forwarding][code] server listening on 127.0.0.1:55038\n2025-07-21 19:29:30.810 [info] [forwarding][code] Set up server\n2025-07-21 19:29:30.810 [info] [remote-ssh] codeListeningOn (remote=[object Object]; local=[object Object]) codeConnectionToken: c9e111c6-ce55-491e-bda3-4c28c96cbecd\n2025-07-21 19:29:30.810 [info] [forwarding][multiplex] creating new forwarding server\n2025-07-21 19:29:30.810 [info] [forwarding][multiplex] server listening on 127.0.0.1:55039\n2025-07-21 19:29:30.811 [info] [forwarding][multiplex] Set up server\n2025-07-21 19:29:30.812 [info] [remote-ssh] multiplexListeningOn (remote=[object Object]; local=[object Object]) multiplexConnectionToken: 07bc5132-c661-4376-9ce0-90133aaa97fa\n2025-07-21 19:29:30.812 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:29:30.816 [info] (ssh_tunnel) stdout: Unlocking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-21 19:29:30.817 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][d373bf2f-497c-4f3e-bc49-e78a7b75401a] received connection request\n2025-07-21 19:29:30.818 [info] [command][77587c05-0678-45b4-b7bd-f7c3461e8418] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""77587c05-0678-45b4-b7bd-f7c3461e8418""}\n2025-07-21 19:29:30.819 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:29:30.841 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:33437][4fb64fb2-5382-4d91-a417-ee831511dac5] received connection request\n2025-07-21 19:29:30.860 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:29:30.929 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][d373bf2f-497c-4f3e-bc49-e78a7b75401a] socks forwarding established\n2025-07-21 19:29:30.993 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:55024 -> 127.0.0.1:33437][4fb64fb2-5382-4d91-a417-ee831511dac5] socks forwarding established\n2025-07-21 19:29:31.055 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][d373bf2f-497c-4f3e-bc49-e78a7b75401a] socks connection closed\n2025-07-21 19:29:31.055 [info] [command][77587c05-0678-45b4-b7bd-f7c3461e8418] Process exited with code 0\n2025-07-21 19:29:31.056 [info] [command][77587c05-0678-45b4-b7bd-f7c3461e8418] Socket close event received\n2025-07-21 19:29:31.110 [info] Successfully connected to Cursor server at http://127.0.0.1:55038/version\n2025-07-21 19:29:31.110 [info] [execServer][spawn] command: echo, args: 1, options: {}\n2025-07-21 19:29:31.110 [info] [command][7b118bc6-859c-475c-b902-50c31a108e96] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""7b118bc6-859c-475c-b902-50c31a108e96""}\n2025-07-21 19:29:31.111 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][c0699cd7-0d88-48d5-875e-90ff7e520ffb] received connection request\n2025-07-21 19:29:31.111 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 5: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:29:31.175 [info] (ssh_tunnel) stderr: debug1: channel 3: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55041 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:29:31.207 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][c0699cd7-0d88-48d5-875e-90ff7e520ffb] socks forwarding established\n2025-07-21 19:29:31.337 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][c0699cd7-0d88-48d5-875e-90ff7e520ffb] socks connection closed\n2025-07-21 19:29:31.337 [info] [command][7b118bc6-859c-475c-b902-50c31a108e96] Process exited with code 0\n2025-07-21 19:29:31.337 [info] Successfully ran 'echo 1' against the multiplex server\n2025-07-21 19:29:31.338 [info] [remote-ssh] Resolved exec server. Socks port: 55024\n2025-07-21 19:29:31.338 [info] [remote-ssh] Resolved authority: {""host"":""127.0.0.1"",""port"":55038,""connectionToken"":""c9e111c6-ce55-491e-bda3-4c28c96cbecd"",""extensionHostEnv"":{}}. Socks port: 55024\n2025-07-21 19:29:31.338 [info] [command][7b118bc6-859c-475c-b902-50c31a108e96] Socket close event received\n2025-07-21 19:29:31.360 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:33437][75c7eafc-7840-4024-91e3-0cbe7eb8cf5e] received connection request\n2025-07-21 19:29:31.360 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\n\n2025-07-21 19:29:31.361 [info] (ssh_tunnel) stderr: debug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:29:31.478 [info] (ssh_tunnel) stderr: debug1: channel 5: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55045 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:29:31.478 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:55024 -> 127.0.0.1:33437][75c7eafc-7840-4024-91e3-0cbe7eb8cf5e] socks forwarding established\n2025-07-21 19:29:31.596 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:33437][f2f510c1-3b57-492b-9a11-93c99a27dfff] received connection request\n2025-07-21 19:29:31.597 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 5: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:29:31.695 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:55024 -> 127.0.0.1:33437][f2f510c1-3b57-492b-9a11-93c99a27dfff] socks forwarding established\n2025-07-21 19:29:32.128 [info] Saved platform linux for remote host login.haicore.berlin\n2025-07-21 19:29:34.306 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 33437, connect from 127.0.0.1 port 55043 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:29:34.306 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:55024 -> 127.0.0.1:33437][4fb64fb2-5382-4d91-a417-ee831511dac5] socks connection closed\n2025-07-21 19:30:31.059 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:30:31.061 [info] [command][6974b253-f0e7-4d20-92d6-5c0d338756dc] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""6974b253-f0e7-4d20-92d6-5c0d338756dc""}\n2025-07-21 19:30:31.061 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][274f9434-b729-488e-994a-fde2a3bc054b] received connection request\n2025-07-21 19:30:31.063 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:30:31.441 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][274f9434-b729-488e-994a-fde2a3bc054b] socks forwarding established\n2025-07-21 19:30:32.899 [info] [command][6974b253-f0e7-4d20-92d6-5c0d338756dc] Process exited with code 0\n2025-07-21 19:30:32.900 [info] [command][6974b253-f0e7-4d20-92d6-5c0d338756dc] Socket close event received\n2025-07-21 19:30:32.900 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][274f9434-b729-488e-994a-fde2a3bc054b] socks connection closed\n2025-07-21 19:30:36.373 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55202 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:31:32.905 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:31:32.907 [info] [command][f5144a0b-b134-49bf-a452-af2b01067322] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""f5144a0b-b134-49bf-a452-af2b01067322""}\n2025-07-21 19:31:32.908 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][67e60301-40a1-48f1-a70f-d4127f4de05d] received connection request\n2025-07-21 19:31:32.909 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:31:33.041 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][67e60301-40a1-48f1-a70f-d4127f4de05d] socks forwarding established\n2025-07-21 19:31:33.344 [info] [command][f5144a0b-b134-49bf-a452-af2b01067322] Process exited with code 0\n2025-07-21 19:31:33.344 [info] [command][f5144a0b-b134-49bf-a452-af2b01067322] Socket close event received\n2025-07-21 19:31:33.345 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][67e60301-40a1-48f1-a70f-d4127f4de05d] socks connection closed\n2025-07-21 19:31:33.715 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55291 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:32:33.346 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:32:33.348 [info] [command][036faed3-4618-4626-b82a-11a36bd6de4b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""036faed3-4618-4626-b82a-11a36bd6de4b""}\n2025-07-21 19:32:33.349 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][98738cdc-680b-4add-9a65-7a4acf4957f9] received connection request\n2025-07-21 19:32:33.350 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:32:37.964 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][98738cdc-680b-4add-9a65-7a4acf4957f9] socks forwarding established\n2025-07-21 19:32:40.642 [info] [command][036faed3-4618-4626-b82a-11a36bd6de4b] Process exited with code 0\n2025-07-21 19:32:40.642 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][98738cdc-680b-4add-9a65-7a4acf4957f9] socks connection closed\n2025-07-21 19:32:40.643 [info] [command][036faed3-4618-4626-b82a-11a36bd6de4b] Socket close event received\n2025-07-21 19:32:45.822 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55340 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:33:40.646 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:33:40.647 [info] [command][050fdb73-5287-454a-9f0a-b3efa8f7daa7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""050fdb73-5287-454a-9f0a-b3efa8f7daa7""}\n2025-07-21 19:33:40.648 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][1e445c12-f3c7-4c07-a46d-e572cce02b0b] received connection request\n2025-07-21 19:33:40.649 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:33:40.770 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][1e445c12-f3c7-4c07-a46d-e572cce02b0b] socks forwarding established\n2025-07-21 19:33:40.906 [info] [command][050fdb73-5287-454a-9f0a-b3efa8f7daa7] Process exited with code 0\n2025-07-21 19:33:40.907 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][1e445c12-f3c7-4c07-a46d-e572cce02b0b] socks connection closed\n2025-07-21 19:33:40.907 [info] [command][050fdb73-5287-454a-9f0a-b3efa8f7daa7] Socket close event received\n2025-07-21 19:33:41.018 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55442 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:34:40.911 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:34:40.912 [info] [command][ec89bc5f-af4c-4d88-b26d-88f2b7f47e61] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""ec89bc5f-af4c-4d88-b26d-88f2b7f47e61""}\n2025-07-21 19:34:40.914 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][0d1327e0-c714-4bf7-bc28-7b21c88c5620] received connection request\n2025-07-21 19:34:40.915 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:34:41.051 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][0d1327e0-c714-4bf7-bc28-7b21c88c5620] socks forwarding established\n2025-07-21 19:34:41.249 [info] [command][ec89bc5f-af4c-4d88-b26d-88f2b7f47e61] Process exited with code 0\n2025-07-21 19:34:41.249 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][0d1327e0-c714-4bf7-bc28-7b21c88c5620] socks connection closed\n2025-07-21 19:34:41.249 [info] [command][ec89bc5f-af4c-4d88-b26d-88f2b7f47e61] Socket close event received\n2025-07-21 19:34:41.406 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55566 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:35:41.249 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:35:41.250 [info] [command][44c86ce9-8168-4982-9784-8d911376128e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""44c86ce9-8168-4982-9784-8d911376128e""}\n2025-07-21 19:35:41.251 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][bb99016b-9ce1-47af-b15b-2a8738a3280c] received connection request\n2025-07-21 19:35:41.252 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:35:44.747 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][bb99016b-9ce1-47af-b15b-2a8738a3280c] socks forwarding established\n2025-07-21 19:35:44.850 [info] [command][44c86ce9-8168-4982-9784-8d911376128e] Process exited with code 0\n2025-07-21 19:35:44.850 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][bb99016b-9ce1-47af-b15b-2a8738a3280c] socks connection closed\n2025-07-21 19:35:44.850 [info] [command][44c86ce9-8168-4982-9784-8d911376128e] Socket close event received\n2025-07-21 19:35:44.953 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55671 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:36:44.855 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:36:44.857 [info] [command][1f008990-819e-4b58-91f6-063fc89288b5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""1f008990-819e-4b58-91f6-063fc89288b5""}\n2025-07-21 19:36:44.858 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][37f121da-8120-4a50-8162-489c6313fea9] received connection request\n2025-07-21 19:36:44.858 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:36:44.975 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][37f121da-8120-4a50-8162-489c6313fea9] socks forwarding established\n2025-07-21 19:36:45.080 [info] [command][1f008990-819e-4b58-91f6-063fc89288b5] Process exited with code 0\n2025-07-21 19:36:45.080 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][37f121da-8120-4a50-8162-489c6313fea9] socks connection closed\n2025-07-21 19:36:45.081 [info] [command][1f008990-819e-4b58-91f6-063fc89288b5] Socket close event received\n2025-07-21 19:36:45.185 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55782 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:37:45.084 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:37:45.085 [info] [command][42f9ff5c-7814-4070-bf67-ebe298d92bf5] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""42f9ff5c-7814-4070-bf67-ebe298d92bf5""}\n2025-07-21 19:37:45.085 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][d0bd64c7-9d43-476b-8671-99e9e2ea195e] received connection request\n2025-07-21 19:37:45.086 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:37:45.186 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][d0bd64c7-9d43-476b-8671-99e9e2ea195e] socks forwarding established\n2025-07-21 19:37:45.310 [info] [command][42f9ff5c-7814-4070-bf67-ebe298d92bf5] Process exited with code 0\n2025-07-21 19:37:45.310 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][d0bd64c7-9d43-476b-8671-99e9e2ea195e] socks connection closed\n2025-07-21 19:37:45.310 [info] [command][42f9ff5c-7814-4070-bf67-ebe298d92bf5] Socket close event received\n2025-07-21 19:37:45.442 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55833 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:38:45.311 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:38:45.313 [info] [command][2dabc3d4-251a-4cb5-99c5-d2c035a34461] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""2dabc3d4-251a-4cb5-99c5-d2c035a34461""}\n2025-07-21 19:38:45.314 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][f7542eff-8872-435e-981c-e1e308ddf64e] received connection request\n2025-07-21 19:38:45.314 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:38:45.458 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][f7542eff-8872-435e-981c-e1e308ddf64e] socks forwarding established\n2025-07-21 19:38:45.611 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][f7542eff-8872-435e-981c-e1e308ddf64e] socks connection closed\n2025-07-21 19:38:45.611 [info] [command][2dabc3d4-251a-4cb5-99c5-d2c035a34461] Process exited with code 0\n2025-07-21 19:38:45.611 [info] [command][2dabc3d4-251a-4cb5-99c5-d2c035a34461] Socket close event received\n2025-07-21 19:38:45.860 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55908 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:39:45.616 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:39:45.618 [info] [command][21ce5145-57fc-49d2-8388-713132e65a92] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""21ce5145-57fc-49d2-8388-713132e65a92""}\n2025-07-21 19:39:45.618 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][70d992ff-3a45-4c55-8eb8-55a9b21fe0df] received connection request\n2025-07-21 19:39:45.619 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:39:45.711 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][70d992ff-3a45-4c55-8eb8-55a9b21fe0df] socks forwarding established\n2025-07-21 19:39:45.801 [info] [command][21ce5145-57fc-49d2-8388-713132e65a92] Process exited with code 0\n2025-07-21 19:39:45.801 [info] [command][21ce5145-57fc-49d2-8388-713132e65a92] Socket close event received\n2025-07-21 19:39:45.806 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][70d992ff-3a45-4c55-8eb8-55a9b21fe0df] socks connection closed\n2025-07-21 19:39:45.891 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55950 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:40:45.806 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:40:45.809 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][731a4131-c2df-496b-9f81-09be09962581] received connection request\n2025-07-21 19:40:45.809 [info] [command][4877b5eb-72a2-40a9-b56c-02a77b5f9ebf] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""4877b5eb-72a2-40a9-b56c-02a77b5f9ebf""}\n2025-07-21 19:40:45.809 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:40:45.902 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][731a4131-c2df-496b-9f81-09be09962581] socks forwarding established\n2025-07-21 19:40:46.002 [info] [command][4877b5eb-72a2-40a9-b56c-02a77b5f9ebf] Process exited with code 0\n2025-07-21 19:40:46.002 [info] [command][4877b5eb-72a2-40a9-b56c-02a77b5f9ebf] Socket close event received\n2025-07-21 19:40:46.003 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][731a4131-c2df-496b-9f81-09be09962581] socks connection closed\n2025-07-21 19:40:46.114 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 55984 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:41:46.007 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:41:46.009 [info] [command][987b7563-a801-4bc7-87b2-d51874052701] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""987b7563-a801-4bc7-87b2-d51874052701""}\n2025-07-21 19:41:46.010 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][e52f7f7f-553d-4a07-a0e7-cbadb40ff62f] received connection request\n2025-07-21 19:41:46.010 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:41:46.119 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][e52f7f7f-553d-4a07-a0e7-cbadb40ff62f] socks forwarding established\n2025-07-21 19:41:46.242 [info] [command][987b7563-a801-4bc7-87b2-d51874052701] Process exited with code 0\n2025-07-21 19:41:46.243 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][e52f7f7f-553d-4a07-a0e7-cbadb40ff62f] socks connection closed\n2025-07-21 19:41:46.243 [info] [command][987b7563-a801-4bc7-87b2-d51874052701] Socket close event received\n2025-07-21 19:41:46.341 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56028 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:42:46.246 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:42:46.250 [info] [command][b4442ea8-c149-4e4b-bc0b-53b33079c3eb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""b4442ea8-c149-4e4b-bc0b-53b33079c3eb""}\n2025-07-21 19:42:46.251 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][02f76853-6e69-4837-b449-17211a132175] received connection request\n2025-07-21 19:42:46.251 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:42:46.380 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][02f76853-6e69-4837-b449-17211a132175] socks forwarding established\n2025-07-21 19:42:46.515 [info] [command][b4442ea8-c149-4e4b-bc0b-53b33079c3eb] Process exited with code 0\n2025-07-21 19:42:46.515 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][02f76853-6e69-4837-b449-17211a132175] socks connection closed\n2025-07-21 19:42:46.515 [info] [command][b4442ea8-c149-4e4b-bc0b-53b33079c3eb] Socket close event received\n2025-07-21 19:42:46.638 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56054 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:43:46.517 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:43:46.519 [info] [command][d0fa1481-32d5-4513-9a72-3ab04835807b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""d0fa1481-32d5-4513-9a72-3ab04835807b""}\n2025-07-21 19:43:46.519 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][69afcb52-5c7d-41b2-9e66-e05d9e267d34] received connection request\n2025-07-21 19:43:46.520 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:43:46.619 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][69afcb52-5c7d-41b2-9e66-e05d9e267d34] socks forwarding established\n2025-07-21 19:43:46.729 [info] [command][d0fa1481-32d5-4513-9a72-3ab04835807b] Process exited with code 0\n2025-07-21 19:43:46.730 [info] [command][d0fa1481-32d5-4513-9a72-3ab04835807b] Socket close event received\n2025-07-21 19:43:46.730 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][69afcb52-5c7d-41b2-9e66-e05d9e267d34] socks connection closed\n2025-07-21 19:43:46.837 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56086 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:44:46.735 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:44:46.737 [info] [command][eb0c9d95-6e69-4db4-b59f-ff82cb7f2b6a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""eb0c9d95-6e69-4db4-b59f-ff82cb7f2b6a""}\n2025-07-21 19:44:46.738 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][964b38e1-c6a4-4908-be9c-96c0a394030b] received connection request\n2025-07-21 19:44:46.738 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:44:46.852 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][964b38e1-c6a4-4908-be9c-96c0a394030b] socks forwarding established\n2025-07-21 19:44:46.973 [info] [command][eb0c9d95-6e69-4db4-b59f-ff82cb7f2b6a] Process exited with code 0\n2025-07-21 19:44:46.976 [info] [command][eb0c9d95-6e69-4db4-b59f-ff82cb7f2b6a] Socket close event received\n2025-07-21 19:44:46.979 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][964b38e1-c6a4-4908-be9c-96c0a394030b] socks connection closed\n2025-07-21 19:44:47.075 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56137 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:45:46.975 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:45:46.978 [info] [command][55eb6197-4c98-407f-8902-28a37f1df293] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""55eb6197-4c98-407f-8902-28a37f1df293""}\n2025-07-21 19:45:46.978 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][4f4dbc27-0280-413a-b8eb-82c2664c06a3] received connection request\n2025-07-21 19:45:46.979 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:45:47.101 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][4f4dbc27-0280-413a-b8eb-82c2664c06a3] socks forwarding established\n2025-07-21 19:45:47.203 [info] [command][55eb6197-4c98-407f-8902-28a37f1df293] Process exited with code 0\n2025-07-21 19:45:47.203 [info] [command][55eb6197-4c98-407f-8902-28a37f1df293] Socket close event received\n2025-07-21 19:45:47.209 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][4f4dbc27-0280-413a-b8eb-82c2664c06a3] socks connection closed\n2025-07-21 19:45:47.318 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56219 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:46:47.208 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:46:47.210 [info] [command][4a5aa864-4015-4eb4-9da6-3bb25b998b5b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""4a5aa864-4015-4eb4-9da6-3bb25b998b5b""}\n2025-07-21 19:46:47.211 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][8dba324b-e7ae-4250-a33b-73aa34550107] received connection request\n2025-07-21 19:46:47.211 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:46:47.313 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][8dba324b-e7ae-4250-a33b-73aa34550107] socks forwarding established\n2025-07-21 19:46:47.426 [info] [command][4a5aa864-4015-4eb4-9da6-3bb25b998b5b] Process exited with code 0\n2025-07-21 19:46:47.426 [info] [command][4a5aa864-4015-4eb4-9da6-3bb25b998b5b] Socket close event received\n2025-07-21 19:46:47.431 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][8dba324b-e7ae-4250-a33b-73aa34550107] socks connection closed\n2025-07-21 19:46:47.532 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56242 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:47:47.432 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:47:47.434 [info] [command][65a0f4ab-25f9-459b-b4ea-691281e43c51] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""65a0f4ab-25f9-459b-b4ea-691281e43c51""}\n2025-07-21 19:47:47.435 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][e151e469-7334-4a60-abd1-01d33da745ef] received connection request\n2025-07-21 19:47:47.435 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:47:50.251 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][e151e469-7334-4a60-abd1-01d33da745ef] socks forwarding established\n2025-07-21 19:47:52.634 [info] [command][65a0f4ab-25f9-459b-b4ea-691281e43c51] Process exited with code 0\n2025-07-21 19:47:52.634 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][e151e469-7334-4a60-abd1-01d33da745ef] socks connection closed\n2025-07-21 19:47:52.635 [info] [command][65a0f4ab-25f9-459b-b4ea-691281e43c51] Socket close event received\n2025-07-21 19:47:53.316 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56361 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:48:52.640 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:48:52.642 [info] [command][33944755-97a6-4e88-9b6a-3e66c9237b97] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""33944755-97a6-4e88-9b6a-3e66c9237b97""}\n2025-07-21 19:48:52.643 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][107a56b0-7aa2-46e6-be74-74760ea39a4c] received connection request\n2025-07-21 19:48:52.643 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:48:52.750 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][107a56b0-7aa2-46e6-be74-74760ea39a4c] socks forwarding established\n2025-07-21 19:48:52.885 [info] [command][33944755-97a6-4e88-9b6a-3e66c9237b97] Process exited with code 0\n2025-07-21 19:48:52.885 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][107a56b0-7aa2-46e6-be74-74760ea39a4c] socks connection closed\n2025-07-21 19:48:52.885 [info] [command][33944755-97a6-4e88-9b6a-3e66c9237b97] Socket close event received\n2025-07-21 19:48:52.990 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56485 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:49:52.891 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:49:52.892 [info] [command][8a1589ee-43b5-438c-82d4-888fe90d53f0] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""8a1589ee-43b5-438c-82d4-888fe90d53f0""}\n2025-07-21 19:49:52.892 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][8b80cac9-62b9-4539-9fe0-3e8b6b847a43] received connection request\n2025-07-21 19:49:52.892 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:49:53.000 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][8b80cac9-62b9-4539-9fe0-3e8b6b847a43] socks forwarding established\n2025-07-21 19:49:53.100 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][8b80cac9-62b9-4539-9fe0-3e8b6b847a43] socks connection closed\n2025-07-21 19:49:53.100 [info] [command][8a1589ee-43b5-438c-82d4-888fe90d53f0] Process exited with code 0\n2025-07-21 19:49:53.101 [info] [command][8a1589ee-43b5-438c-82d4-888fe90d53f0] Socket close event received\n2025-07-21 19:49:53.245 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56559 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:50:53.107 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:50:53.108 [info] [command][536b3a1f-601a-47d9-bc25-29cb20501bf2] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""536b3a1f-601a-47d9-bc25-29cb20501bf2""}\n2025-07-21 19:50:53.109 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][5b531fe2-f4e2-47b1-9d6d-211964de268a] received connection request\n2025-07-21 19:50:53.109 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:50:53.215 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][5b531fe2-f4e2-47b1-9d6d-211964de268a] socks forwarding established\n2025-07-21 19:50:53.318 [info] [command][536b3a1f-601a-47d9-bc25-29cb20501bf2] Process exited with code 0\n2025-07-21 19:50:53.318 [info] [command][536b3a1f-601a-47d9-bc25-29cb20501bf2] Socket close event received\n2025-07-21 19:50:53.321 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][5b531fe2-f4e2-47b1-9d6d-211964de268a] socks connection closed\n2025-07-21 19:50:53.415 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56608 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:51:53.324 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:51:53.325 [info] [command][41adbeae-da55-408e-8557-e5c78209ac6b] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""41adbeae-da55-408e-8557-e5c78209ac6b""}\n2025-07-21 19:51:53.325 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][62bfe2f3-791f-4027-80ad-88d46d24f3c1] received connection request\n2025-07-21 19:51:53.325 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\n\n2025-07-21 19:51:53.325 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:51:53.434 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][62bfe2f3-791f-4027-80ad-88d46d24f3c1] socks forwarding established\n2025-07-21 19:51:53.561 [info] [command][41adbeae-da55-408e-8557-e5c78209ac6b] Process exited with code 0\n2025-07-21 19:51:53.561 [info] [command][41adbeae-da55-408e-8557-e5c78209ac6b] Socket close event received\n2025-07-21 19:51:53.566 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][62bfe2f3-791f-4027-80ad-88d46d24f3c1] socks connection closed\n2025-07-21 19:51:53.700 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56706 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:52:53.564 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:52:53.566 [info] [command][accfc714-461e-40b1-889e-72290fb7dd19] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""accfc714-461e-40b1-889e-72290fb7dd19""}\n2025-07-21 19:52:53.566 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][eccbc1f4-18fa-4221-8fc5-7ec9ce9e2cd9] received connection request\n2025-07-21 19:52:53.566 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:53:04.797 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][eccbc1f4-18fa-4221-8fc5-7ec9ce9e2cd9] socks forwarding established\n2025-07-21 19:53:05.214 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][eccbc1f4-18fa-4221-8fc5-7ec9ce9e2cd9] socks connection closed\n2025-07-21 19:53:05.214 [info] [command][accfc714-461e-40b1-889e-72290fb7dd19] Process exited with code 0\n2025-07-21 19:53:05.214 [info] [command][accfc714-461e-40b1-889e-72290fb7dd19] Socket close event received\n2025-07-21 19:53:05.310 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56762 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:54:05.219 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:54:05.223 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][c962194c-bf77-4b90-9b30-03ff56f486c9] received connection request\n2025-07-21 19:54:05.223 [info] [command][f821de85-4c87-4959-a14b-639bec51ed8f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""f821de85-4c87-4959-a14b-639bec51ed8f""}\n2025-07-21 19:54:05.223 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:54:05.318 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][c962194c-bf77-4b90-9b30-03ff56f486c9] socks forwarding established\n2025-07-21 19:54:05.422 [info] [command][f821de85-4c87-4959-a14b-639bec51ed8f] Process exited with code 0\n2025-07-21 19:54:05.422 [info] [command][f821de85-4c87-4959-a14b-639bec51ed8f] Socket close event received\n2025-07-21 19:54:05.433 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][c962194c-bf77-4b90-9b30-03ff56f486c9] socks connection closed\n2025-07-21 19:54:05.538 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56903 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:55:05.428 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:55:05.430 [info] [command][4c754454-062e-44dd-9afc-144f04f9f104] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""4c754454-062e-44dd-9afc-144f04f9f104""}\n2025-07-21 19:55:05.430 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][44fc5f9f-80e8-4da6-a475-65c2af9e2dec] received connection request\n2025-07-21 19:55:05.430 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:55:05.531 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][44fc5f9f-80e8-4da6-a475-65c2af9e2dec] socks forwarding established\n2025-07-21 19:55:05.651 [info] [command][4c754454-062e-44dd-9afc-144f04f9f104] Process exited with code 0\n2025-07-21 19:55:05.651 [info] [command][4c754454-062e-44dd-9afc-144f04f9f104] Socket close event received\n2025-07-21 19:55:05.657 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][44fc5f9f-80e8-4da6-a475-65c2af9e2dec] socks connection closed\n2025-07-21 19:55:05.741 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 56956 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:56:05.653 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:56:05.656 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][0c13d068-da03-4cb3-aa2f-2fc8809d80df] received connection request\n2025-07-21 19:56:05.656 [info] [command][4656a160-d87c-48e1-83e9-5e575aef8877] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""4656a160-d87c-48e1-83e9-5e575aef8877""}\n2025-07-21 19:56:05.656 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:56:05.750 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][0c13d068-da03-4cb3-aa2f-2fc8809d80df] socks forwarding established\n2025-07-21 19:56:05.861 [info] [command][4656a160-d87c-48e1-83e9-5e575aef8877] Process exited with code 0\n2025-07-21 19:56:05.861 [info] [command][4656a160-d87c-48e1-83e9-5e575aef8877] Socket close event received\n2025-07-21 19:56:05.861 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][0c13d068-da03-4cb3-aa2f-2fc8809d80df] socks connection closed\n2025-07-21 19:56:05.971 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57060 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:57:05.861 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:57:05.863 [info] [command][531a4657-3e06-4963-84d3-60af6baef61e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""531a4657-3e06-4963-84d3-60af6baef61e""}\n2025-07-21 19:57:05.864 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][605c8e80-36d8-46a7-af0b-77a5e1d6d477] received connection request\n2025-07-21 19:57:05.864 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\n\n2025-07-21 19:57:05.864 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:57:05.991 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][605c8e80-36d8-46a7-af0b-77a5e1d6d477] socks forwarding established\n2025-07-21 19:57:06.098 [info] [command][531a4657-3e06-4963-84d3-60af6baef61e] Process exited with code 0\n2025-07-21 19:57:06.098 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][605c8e80-36d8-46a7-af0b-77a5e1d6d477] socks connection closed\n2025-07-21 19:57:06.098 [info] [command][531a4657-3e06-4963-84d3-60af6baef61e] Socket close event received\n2025-07-21 19:57:06.208 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57083 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:58:06.102 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:58:06.105 [info] [command][4aa2ec30-8b77-4b3d-82a0-528bd40b36fb] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""4aa2ec30-8b77-4b3d-82a0-528bd40b36fb""}\n2025-07-21 19:58:06.106 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][fea06060-d3cc-4447-aacd-4933ffad4b3f] received connection request\n2025-07-21 19:58:06.106 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\n\n2025-07-21 19:58:06.108 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:58:06.211 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][fea06060-d3cc-4447-aacd-4933ffad4b3f] socks forwarding established\n2025-07-21 19:58:06.339 [info] [command][4aa2ec30-8b77-4b3d-82a0-528bd40b36fb] Process exited with code 0\n2025-07-21 19:58:06.339 [info] [command][4aa2ec30-8b77-4b3d-82a0-528bd40b36fb] Socket close event received\n2025-07-21 19:58:06.342 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][fea06060-d3cc-4447-aacd-4933ffad4b3f] socks connection closed\n2025-07-21 19:58:06.454 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57104 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 19:59:06.345 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 19:59:06.348 [info] [command][cf318154-50f4-41cc-ac3c-1233f863a18a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""cf318154-50f4-41cc-ac3c-1233f863a18a""}\n2025-07-21 19:59:06.348 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][aefbce35-e685-4a44-82e5-3a984f837977] received connection request\n2025-07-21 19:59:06.349 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\n\n2025-07-21 19:59:06.349 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 19:59:06.462 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][aefbce35-e685-4a44-82e5-3a984f837977] socks forwarding established\n2025-07-21 19:59:06.560 [info] [command][cf318154-50f4-41cc-ac3c-1233f863a18a] Process exited with code 0\n2025-07-21 19:59:06.560 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][aefbce35-e685-4a44-82e5-3a984f837977] socks connection closed\n2025-07-21 19:59:06.561 [info] [command][cf318154-50f4-41cc-ac3c-1233f863a18a] Socket close event received\n2025-07-21 19:59:06.678 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57192 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:00:06.565 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:00:06.567 [info] [command][32433d4e-5ad6-48e1-8f4d-b42ed820f803] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""32433d4e-5ad6-48e1-8f4d-b42ed820f803""}\n2025-07-21 20:00:06.568 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][cab8810c-27c4-4ffd-927a-5672b6a91141] received connection request\n2025-07-21 20:00:06.569 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:00:06.674 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][cab8810c-27c4-4ffd-927a-5672b6a91141] socks forwarding established\n2025-07-21 20:00:06.774 [info] [command][32433d4e-5ad6-48e1-8f4d-b42ed820f803] Process exited with code 0\n2025-07-21 20:00:06.775 [info] [command][32433d4e-5ad6-48e1-8f4d-b42ed820f803] Socket close event received\n2025-07-21 20:00:06.777 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][cab8810c-27c4-4ffd-927a-5672b6a91141] socks connection closed\n2025-07-21 20:00:06.879 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57227 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:01:06.780 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:01:06.782 [info] [command][163408c5-55c0-4fc5-961b-a11c0eb02d91] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""163408c5-55c0-4fc5-961b-a11c0eb02d91""}\n2025-07-21 20:01:06.783 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][4d7d9873-65d9-4885-8ec0-cd076804c891] received connection request\n2025-07-21 20:01:06.783 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:01:06.899 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][4d7d9873-65d9-4885-8ec0-cd076804c891] socks forwarding established\n2025-07-21 20:01:07.026 [info] [command][163408c5-55c0-4fc5-961b-a11c0eb02d91] Process exited with code 0\n2025-07-21 20:01:07.027 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][4d7d9873-65d9-4885-8ec0-cd076804c891] socks connection closed\n2025-07-21 20:01:07.027 [info] [command][163408c5-55c0-4fc5-961b-a11c0eb02d91] Socket close event received\n2025-07-21 20:01:07.146 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57261 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:02:07.032 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:02:07.035 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][e319d02d-0955-4ca6-8508-dfa024eb9465] received connection request\n2025-07-21 20:02:07.036 [info] [command][8d9564c9-a26e-4d52-a1e9-2d986ea52975] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""8d9564c9-a26e-4d52-a1e9-2d986ea52975""}\n2025-07-21 20:02:07.036 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:02:07.159 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][e319d02d-0955-4ca6-8508-dfa024eb9465] socks forwarding established\n2025-07-21 20:02:07.276 [info] [command][8d9564c9-a26e-4d52-a1e9-2d986ea52975] Process exited with code 0\n2025-07-21 20:02:07.276 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][e319d02d-0955-4ca6-8508-dfa024eb9465] socks connection closed\n2025-07-21 20:02:07.276 [info] [command][8d9564c9-a26e-4d52-a1e9-2d986ea52975] Socket close event received\n2025-07-21 20:02:07.383 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57283 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:03:07.282 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:03:07.285 [info] [command][2664874c-fe0e-4b24-af81-314b8682d753] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""2664874c-fe0e-4b24-af81-314b8682d753""}\n2025-07-21 20:03:07.286 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][ab732074-4e49-4009-b7d9-0ef6f75dd0dc] received connection request\n2025-07-21 20:03:07.287 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:03:07.390 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][ab732074-4e49-4009-b7d9-0ef6f75dd0dc] socks forwarding established\n2025-07-21 20:03:07.495 [info] [command][2664874c-fe0e-4b24-af81-314b8682d753] Process exited with code 0\n2025-07-21 20:03:07.496 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][ab732074-4e49-4009-b7d9-0ef6f75dd0dc] socks connection closed\n2025-07-21 20:03:07.496 [info] [command][2664874c-fe0e-4b24-af81-314b8682d753] Socket close event received\n2025-07-21 20:03:07.598 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57313 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:04:07.501 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:04:07.503 [info] [command][317b5b94-58ad-4a0f-9b66-18394c1b5b82] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""317b5b94-58ad-4a0f-9b66-18394c1b5b82""}\n2025-07-21 20:04:07.504 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][2d12eddd-30a2-45c9-8a2b-536af0d77c27] received connection request\n2025-07-21 20:04:07.504 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:04:07.620 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][2d12eddd-30a2-45c9-8a2b-536af0d77c27] socks forwarding established\n2025-07-21 20:04:07.715 [info] [command][317b5b94-58ad-4a0f-9b66-18394c1b5b82] Process exited with code 0\n2025-07-21 20:04:07.715 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][2d12eddd-30a2-45c9-8a2b-536af0d77c27] socks connection closed\n2025-07-21 20:04:07.716 [info] [command][317b5b94-58ad-4a0f-9b66-18394c1b5b82] Socket close event received\n2025-07-21 20:04:07.818 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57354 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:05:07.717 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:05:07.719 [info] [command][2a4a821c-78b5-4bdd-a958-f34a6aa82a2f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""2a4a821c-78b5-4bdd-a958-f34a6aa82a2f""}\n2025-07-21 20:05:07.720 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][7fa2421d-40b4-4105-af3c-750e09bc615a] received connection request\n2025-07-21 20:05:07.720 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:05:07.860 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][7fa2421d-40b4-4105-af3c-750e09bc615a] socks forwarding established\n2025-07-21 20:05:07.971 [info] [command][2a4a821c-78b5-4bdd-a958-f34a6aa82a2f] Process exited with code 0\n2025-07-21 20:05:07.971 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][7fa2421d-40b4-4105-af3c-750e09bc615a] socks connection closed\n2025-07-21 20:05:07.971 [info] [command][2a4a821c-78b5-4bdd-a958-f34a6aa82a2f] Socket close event received\n2025-07-21 20:05:08.071 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57393 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:06:07.973 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:06:07.976 [info] [command][4039f721-0cf5-4d80-a34d-3538c4718f7f] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""4039f721-0cf5-4d80-a34d-3538c4718f7f""}\n2025-07-21 20:06:07.976 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][3df94dee-61e7-4793-a96e-3d14d46315d0] received connection request\n2025-07-21 20:06:07.976 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:06:08.078 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][3df94dee-61e7-4793-a96e-3d14d46315d0] socks forwarding established\n2025-07-21 20:06:08.195 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][3df94dee-61e7-4793-a96e-3d14d46315d0] socks connection closed\n2025-07-21 20:06:08.195 [info] [command][4039f721-0cf5-4d80-a34d-3538c4718f7f] Process exited with code 0\n2025-07-21 20:06:08.195 [info] [command][4039f721-0cf5-4d80-a34d-3538c4718f7f] Socket close event received\n2025-07-21 20:06:08.292 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57434 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:07:08.200 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:07:08.202 [info] [command][b1dba7f6-76fb-4a58-80d2-a43e7a4a439c] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""b1dba7f6-76fb-4a58-80d2-a43e7a4a439c""}\n2025-07-21 20:07:08.203 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][5cfd86ca-b0f3-4fc7-a145-ee1dfe5edd9d] received connection request\n2025-07-21 20:07:08.203 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:07:08.292 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][5cfd86ca-b0f3-4fc7-a145-ee1dfe5edd9d] socks forwarding established\n2025-07-21 20:07:08.393 [info] [command][b1dba7f6-76fb-4a58-80d2-a43e7a4a439c] Process exited with code 0\n2025-07-21 20:07:08.394 [info] [command][b1dba7f6-76fb-4a58-80d2-a43e7a4a439c] Socket close event received\n2025-07-21 20:07:08.397 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][5cfd86ca-b0f3-4fc7-a145-ee1dfe5edd9d] socks connection closed\n2025-07-21 20:07:08.492 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57462 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:08:08.395 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:08:08.396 [info] [command][814d030e-6065-4c07-a3f4-c6b9e73ac9ac] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""814d030e-6065-4c07-a3f4-c6b9e73ac9ac""}\n2025-07-21 20:08:08.397 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][74712c4d-e1eb-409c-b2d1-71d84bb36bee] received connection request\n2025-07-21 20:08:08.397 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\n\n2025-07-21 20:08:08.397 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:08:08.500 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][74712c4d-e1eb-409c-b2d1-71d84bb36bee] socks forwarding established\n2025-07-21 20:08:08.618 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][74712c4d-e1eb-409c-b2d1-71d84bb36bee] socks connection closed\n2025-07-21 20:08:08.618 [info] [command][814d030e-6065-4c07-a3f4-c6b9e73ac9ac] Process exited with code 0\n2025-07-21 20:08:08.618 [info] [command][814d030e-6065-4c07-a3f4-c6b9e73ac9ac] Socket close event received\n2025-07-21 20:08:08.723 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57497 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:09:08.620 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:09:08.622 [info] [command][7ee2aa59-061d-4fbe-98ac-d7707f24383d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""7ee2aa59-061d-4fbe-98ac-d7707f24383d""}\n2025-07-21 20:09:08.623 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][231d97f1-89fb-4a1e-a46e-819f05dc8ee9] received connection request\n2025-07-21 20:09:08.623 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:09:08.720 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][231d97f1-89fb-4a1e-a46e-819f05dc8ee9] socks forwarding established\n2025-07-21 20:09:08.844 [info] [command][7ee2aa59-061d-4fbe-98ac-d7707f24383d] Process exited with code 0\n2025-07-21 20:09:08.844 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][231d97f1-89fb-4a1e-a46e-819f05dc8ee9] socks connection closed\n2025-07-21 20:09:08.844 [info] [command][7ee2aa59-061d-4fbe-98ac-d7707f24383d] Socket close event received\n2025-07-21 20:09:08.961 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57543 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:10:08.847 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:10:08.851 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][5c8f4a1e-412e-49cb-8349-dbb71104ebe5] received connection request\n2025-07-21 20:10:08.851 [info] [command][294c132d-1240-47bf-aa63-cbe4313139ca] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""294c132d-1240-47bf-aa63-cbe4313139ca""}\n2025-07-21 20:10:08.852 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\n\n2025-07-21 20:10:08.852 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:10:08.938 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][5c8f4a1e-412e-49cb-8349-dbb71104ebe5] socks forwarding established\n2025-07-21 20:10:09.043 [info] [command][294c132d-1240-47bf-aa63-cbe4313139ca] Process exited with code 0\n2025-07-21 20:10:09.043 [info] [command][294c132d-1240-47bf-aa63-cbe4313139ca] Socket close event received\n2025-07-21 20:10:09.045 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][5c8f4a1e-412e-49cb-8349-dbb71104ebe5] socks connection closed\n2025-07-21 20:10:09.186 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57608 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:11:09.049 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:11:09.051 [info] [command][a149742d-2969-4dc9-8411-6e0afedb0e3d] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""a149742d-2969-4dc9-8411-6e0afedb0e3d""}\n2025-07-21 20:11:09.052 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][30024dba-9203-4872-ac40-59a2c0bc1ea7] received connection request\n2025-07-21 20:11:09.052 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:11:09.157 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][30024dba-9203-4872-ac40-59a2c0bc1ea7] socks forwarding established\n2025-07-21 20:11:09.274 [info] [command][a149742d-2969-4dc9-8411-6e0afedb0e3d] Process exited with code 0\n2025-07-21 20:11:09.274 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][30024dba-9203-4872-ac40-59a2c0bc1ea7] socks connection closed\n2025-07-21 20:11:09.274 [info] [command][a149742d-2969-4dc9-8411-6e0afedb0e3d] Socket close event received\n2025-07-21 20:11:09.376 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57664 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:12:09.275 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:12:09.277 [info] [command][e97ae439-cce4-4fea-82b6-5a85dcb132ce] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""e97ae439-cce4-4fea-82b6-5a85dcb132ce""}\n2025-07-21 20:12:09.277 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][8a613b40-fee5-4043-b8ad-297171d7b16e] received connection request\n2025-07-21 20:12:09.278 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:12:09.383 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][8a613b40-fee5-4043-b8ad-297171d7b16e] socks forwarding established\n2025-07-21 20:12:09.482 [info] [command][e97ae439-cce4-4fea-82b6-5a85dcb132ce] Process exited with code 0\n2025-07-21 20:12:09.482 [info] [command][e97ae439-cce4-4fea-82b6-5a85dcb132ce] Socket close event received\n2025-07-21 20:12:09.484 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][8a613b40-fee5-4043-b8ad-297171d7b16e] socks connection closed\n2025-07-21 20:12:09.577 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57692 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:13:09.486 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:13:09.488 [info] [command][04dfceb0-3427-4143-8ccf-6f8309d78db7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""04dfceb0-3427-4143-8ccf-6f8309d78db7""}\n2025-07-21 20:13:09.488 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][6dfda0b9-490d-4b39-8ca8-7ce523130697] received connection request\n2025-07-21 20:13:09.488 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\n\n2025-07-21 20:13:09.488 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:13:09.591 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][6dfda0b9-490d-4b39-8ca8-7ce523130697] socks forwarding established\n2025-07-21 20:13:09.690 [info] [command][04dfceb0-3427-4143-8ccf-6f8309d78db7] Process exited with code 0\n2025-07-21 20:13:09.690 [info] [command][04dfceb0-3427-4143-8ccf-6f8309d78db7] Socket close event received\n2025-07-21 20:13:09.697 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][6dfda0b9-490d-4b39-8ca8-7ce523130697] socks connection closed\n2025-07-21 20:13:09.798 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57718 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:14:09.695 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:14:09.697 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][4bbf487f-9886-4eea-a3ad-a65365c4df27] received connection request\n2025-07-21 20:14:09.698 [info] [command][495db9b6-6925-4418-8545-d1a16eaebcc7] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""495db9b6-6925-4418-8545-d1a16eaebcc7""}\n2025-07-21 20:14:09.698 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:14:09.801 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][4bbf487f-9886-4eea-a3ad-a65365c4df27] socks forwarding established\n2025-07-21 20:14:09.908 [info] [command][495db9b6-6925-4418-8545-d1a16eaebcc7] Process exited with code 0\n2025-07-21 20:14:09.908 [info] [command][495db9b6-6925-4418-8545-d1a16eaebcc7] Socket close event received\n2025-07-21 20:14:09.914 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][4bbf487f-9886-4eea-a3ad-a65365c4df27] socks connection closed\n2025-07-21 20:14:09.993 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57774 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:15:09.914 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:15:09.917 [info] [command][ec61192c-c1ad-4982-859f-4fe4c793c506] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""ec61192c-c1ad-4982-859f-4fe4c793c506""}\n2025-07-21 20:15:09.917 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][debfba46-e410-4a98-baa6-a295b2ef84c7] received connection request\n2025-07-21 20:15:09.918 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:15:10.014 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][debfba46-e410-4a98-baa6-a295b2ef84c7] socks forwarding established\n2025-07-21 20:15:10.122 [info] [command][ec61192c-c1ad-4982-859f-4fe4c793c506] Process exited with code 0\n2025-07-21 20:15:10.122 [info] [command][ec61192c-c1ad-4982-859f-4fe4c793c506] Socket close event received\n2025-07-21 20:15:10.128 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][debfba46-e410-4a98-baa6-a295b2ef84c7] socks connection closed\n2025-07-21 20:15:10.214 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57822 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:16:10.123 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:16:10.126 [info] [command][f7b87f50-e263-4547-a125-376377aa65d6] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""f7b87f50-e263-4547-a125-376377aa65d6""}\n2025-07-21 20:16:10.126 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][2db85107-8ca2-4602-a2e9-df8fbce1a32f] received connection request\n2025-07-21 20:16:10.127 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:16:10.224 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][2db85107-8ca2-4602-a2e9-df8fbce1a32f] socks forwarding established\n2025-07-21 20:16:10.339 [info] [command][f7b87f50-e263-4547-a125-376377aa65d6] Process exited with code 0\n2025-07-21 20:16:10.340 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][2db85107-8ca2-4602-a2e9-df8fbce1a32f] socks connection closed\n2025-07-21 20:16:10.340 [info] [command][f7b87f50-e263-4547-a125-376377aa65d6] Socket close event received\n2025-07-21 20:16:10.440 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57870 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:17:10.347 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:17:10.349 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][e54540b8-cc52-471b-8c55-d7b7b224c91f] received connection request\n2025-07-21 20:17:10.350 [info] [command][360c40df-2ff0-4255-b1c7-cce9eccf7b06] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""360c40df-2ff0-4255-b1c7-cce9eccf7b06""}\n2025-07-21 20:17:10.350 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:17:10.454 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][e54540b8-cc52-471b-8c55-d7b7b224c91f] socks forwarding established\n2025-07-21 20:17:10.550 [info] [command][360c40df-2ff0-4255-b1c7-cce9eccf7b06] Process exited with code 0\n2025-07-21 20:17:10.550 [info] [command][360c40df-2ff0-4255-b1c7-cce9eccf7b06] Socket close event received\n2025-07-21 20:17:10.550 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][e54540b8-cc52-471b-8c55-d7b7b224c91f] socks connection closed\n2025-07-21 20:17:10.647 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57887 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:18:10.556 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:18:10.558 [info] [command][49e808bd-06c6-4069-b166-65b989a0f592] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""49e808bd-06c6-4069-b166-65b989a0f592""}\n2025-07-21 20:18:10.559 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][6845df49-ad8d-4bdb-aa2b-8202d140b039] received connection request\n2025-07-21 20:18:10.559 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:18:10.652 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][6845df49-ad8d-4bdb-aa2b-8202d140b039] socks forwarding established\n2025-07-21 20:18:10.769 [info] [command][49e808bd-06c6-4069-b166-65b989a0f592] Process exited with code 0\n2025-07-21 20:18:10.769 [info] [command][49e808bd-06c6-4069-b166-65b989a0f592] Socket close event received\n2025-07-21 20:18:10.770 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][6845df49-ad8d-4bdb-aa2b-8202d140b039] socks connection closed\n2025-07-21 20:18:10.879 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57912 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:19:10.769 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:19:10.771 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][8f3d3a48-9680-40dd-911e-3bbcbf9d4573] received connection request\n2025-07-21 20:19:10.771 [info] [command][212dc51e-1eb7-403a-9e87-780c7bde1950] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""212dc51e-1eb7-403a-9e87-780c7bde1950""}\n2025-07-21 20:19:10.771 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:19:10.893 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][8f3d3a48-9680-40dd-911e-3bbcbf9d4573] socks forwarding established\n2025-07-21 20:19:11.011 [info] [command][212dc51e-1eb7-403a-9e87-780c7bde1950] Process exited with code 0\n2025-07-21 20:19:11.011 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][8f3d3a48-9680-40dd-911e-3bbcbf9d4573] socks connection closed\n2025-07-21 20:19:11.011 [info] [command][212dc51e-1eb7-403a-9e87-780c7bde1950] Socket close event received\n2025-07-21 20:19:11.130 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 57963 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:20:11.012 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:20:11.014 [info] [command][2304a4bd-24a2-4f9d-939e-0e230247386a] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""2304a4bd-24a2-4f9d-939e-0e230247386a""}\n2025-07-21 20:20:11.014 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][d4590bd1-c507-4d81-8d09-3c9de813f0cb] received connection request\n2025-07-21 20:20:11.014 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\n\n2025-07-21 20:20:11.014 [info] (ssh_tunnel) stderr: debug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:20:11.103 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][d4590bd1-c507-4d81-8d09-3c9de813f0cb] socks forwarding established\n2025-07-21 20:20:11.249 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][d4590bd1-c507-4d81-8d09-3c9de813f0cb] socks connection closed\n2025-07-21 20:20:11.249 [info] [command][2304a4bd-24a2-4f9d-939e-0e230247386a] Process exited with code 0\n2025-07-21 20:20:11.249 [info] [command][2304a4bd-24a2-4f9d-939e-0e230247386a] Socket close event received\n2025-07-21 20:20:11.341 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 58022 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:21:11.251 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:21:11.253 [info] [command][0e38ede0-2b08-4afe-8fda-8fb522528b67] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""0e38ede0-2b08-4afe-8fda-8fb522528b67""}\n2025-07-21 20:21:11.254 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:42953][f8063f8a-db57-4bdd-8a45-497c0c2cf1bb] received connection request\n2025-07-21 20:21:11.254 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:21:11.342 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][f8063f8a-db57-4bdd-8a45-497c0c2cf1bb] socks forwarding established\n2025-07-21 20:21:11.435 [info] [command][0e38ede0-2b08-4afe-8fda-8fb522528b67] Process exited with code 0\n2025-07-21 20:21:11.435 [info] [forwarding][multiplex][127.0.0.1:55039 -> 127.0.0.1:55024 -> 127.0.0.1:42953][f8063f8a-db57-4bdd-8a45-497c0c2cf1bb] socks connection closed\n2025-07-21 20:21:11.435 [info] [command][0e38ede0-2b08-4afe-8fda-8fb522528b67] Socket close event received\n2025-07-21 20:21:11.524 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 55024 for 127.0.0.1 port 42953, connect from 127.0.0.1 port 58056 to 127.0.0.1 port 55024, nchannels 6\n\n2025-07-21 20:21:35.221 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:33437][e9c14141-d65e-4a92-9b97-f8715bbedf72] received connection request\n2025-07-21 20:21:35.221 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:21:35.223 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #2)\n2025-07-21 20:21:35.223 [info] Received re-connection request; checking to see if existing connection is still valid\n2025-07-21 20:21:35.232 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:33437][2a96a86f-de78-4dfa-8337-ab3f112f2072] received connection request\n2025-07-21 20:21:35.236 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 6: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:21:38.229 [error] Unexpected error while checking if existing connection is still valid Timeout while checking if existing connection is still valid\n2025-07-21 20:21:38.229 [error] Failed to connect to Cursor server at http://127.0.0.1:55038, attempt 1 of 3 This operation was aborted\n2025-07-21 20:21:38.230 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:33437][d2b45ca6-99cd-4d3c-84cb-ea5652d1ceea] received connection request\n2025-07-21 20:21:38.230 [info] (ssh_tunnel) stderr: debug1: Connection to port 55024 forwarding to socks port 0 requested.\ndebug1: channel 7: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 20:21:38.306 [info] Terminating existing SSH process with pid: 93437\n2025-07-21 20:21:38.306 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-21 20:21:38.306 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-tIR4fK/socket.sock\n2025-07-21 20:21:38.307 [info] (ssh_tunnel): exit: code=null signal=SIGKILL\n2025-07-21 20:21:38.307 [error] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:55024 -> 127.0.0.1:33437][e9c14141-d65e-4a92-9b97-f8715bbedf72] error while creating socks forwarding Socket closed\n2025-07-21 20:21:38.307 [error] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:55024 -> 127.0.0.1:33437][2a96a86f-de78-4dfa-8337-ab3f112f2072] error while creating socks forwarding Socket closed\n2025-07-21 20:21:38.307 [error] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:55024 -> 127.0.0.1:33437][d2b45ca6-99cd-4d3c-84cb-ea5652d1ceea] error while creating socks forwarding Socket closed\n2025-07-21 20:21:38.307 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:55024 -> 127.0.0.1:33437][75c7eafc-7840-4024-91e3-0cbe7eb8cf5e] socks connection closed\n2025-07-21 20:21:38.307 [info] [forwarding][code][127.0.0.1:55038 -> 127.0.0.1:55024 -> 127.0.0.1:33437][f2f510c1-3b57-492b-9a11-93c99a27dfff] socks connection closed\n2025-07-21 20:21:38.309 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_19145.sh"" | ssh -v -T -D 58074 login.haicore.berlin bash --login -c bash\n2025-07-21 20:21:38.310 [info] Started installation script. Waiting for it to finish...\n2025-07-21 20:21:38.310 [info] Waiting for server to install via process(94940)...\n2025-07-21 20:21:38.332 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-21 20:21:38.332 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 20:21:38.333 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 20:21:38.333 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 20:21:38.334 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 20:21:38.335 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 20:21:38.336 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 20:21:38.336 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 20:21:38.336 [info] Retrying connection in 5 seconds...\n2025-07-21 20:21:39.239 [error] Failed to connect to Cursor server at http://127.0.0.1:55038, attempt 2 of 3 This operation was aborted\n2025-07-21 20:21:40.250 [error] Failed to connect to Cursor server at http://127.0.0.1:55038, attempt 3 of 3 This operation was aborted\n2025-07-21 20:21:40.250 [error] Could not re-use existing SOCKS connection; attempting to re-establish SOCKS forwarding Failed to connect to Cursor code server. Ensure that your remote host ssh config has 'AllowTcpForwarding yes' in '/etc/ssh/sshd_config'. Please check the logs and try reinstalling the server.\n2025-07-21 20:21:40.250 [error] Could not re-establish SOCKS forwarding; re-establishing entire SSH connection Remote server is not set\n2025-07-21 20:37:22.654 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_19145.sh\n2025-07-21 20:37:22.655 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:55039\n2025-07-21 20:37:22.656 [error] [forwarding][multiplex][127.0.0.1:55039 -> unknown}][b2ab37d0-fb38-4fb9-afc9-1f6d4e2abad4] remote server not configured\n2025-07-21 20:37:22.656 [info] [command][76daa7dc-5d1d-449a-b91d-86a0331d71d4] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""07bc5132-c661-4376-9ce0-90133aaa97fa"",""id"":""76daa7dc-5d1d-449a-b91d-86a0331d71d4""}\n2025-07-21 20:37:22.656 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-tIR4fK/socket.sock\n2025-07-21 20:37:22.657 [error] [command][76daa7dc-5d1d-449a-b91d-86a0331d71d4] Socket error: Error: read ECONNRESET\n2025-07-21 20:37:22.657 [info] [command][76daa7dc-5d1d-449a-b91d-86a0331d71d4] Socket close event received\n2025-07-21 20:37:22.727 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_99084.sh"" | ssh -v -T -D 58076 login.haicore.berlin bash --login -c bash\n2025-07-21 20:37:22.733 [info] Started installation script. Waiting for it to finish...\n2025-07-21 20:37:22.733 [info] Waiting for server to install via process(94948)...\n2025-07-21 20:37:22.738 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 20:37:22.741 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\ndebug1: Reading configuration data /etc/ssh/crypto.conf\ndebug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 20:37:22.745 [info] (ssh_tunnel) stderr: ssh: connect to host login.haicore.berlin port 22: Undefined error: 0\n\n2025-07-21 20:37:22.745 [info] (ssh_tunnel): exit: code=255 signal=null\n2025-07-21 20:37:22.746 [error] Error installing server: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 20:37:22.746 [error] Failed to connect after 2 attempts: Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 20:37:22.746 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_99084.sh\n2025-07-21 20:37:22.746 [error] Error resolving SSH authority Failed to install the Cursor Server. Please check the logs for more details.\n2025-07-21 22:03:24.352 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #1)\n2025-07-21 22:03:24.365 [info] SSH askpass server listening on /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-5fNZW2/socket.sock\n2025-07-21 22:03:24.366 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-21 22:03:24.368 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-5fNZW2/socket.sock\n2025-07-21 22:03:24.370 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_76620.sh"" | ssh -v -T -D 58451 login.haicore.berlin bash --login -c bash\n2025-07-21 22:03:24.370 [info] Started installation script. Waiting for it to finish...\n2025-07-21 22:03:24.370 [info] Waiting for server to install via process(95154)...\n2025-07-21 22:03:24.376 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\n\n2025-07-21 22:03:24.376 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /Users/franzsrambical/.ssh/config\ndebug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 22:03:24.377 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 22:03:24.377 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 22:03:24.377 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 22:03:24.511 [info] (ssh_tunnel) stderr: debug1: Connection established.\n\n2025-07-21 22:03:24.511 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519 type 3\ndebug1: identity file /Users/franzsrambical/.ssh/id_ed25519-cert type -1\n\n2025-07-21 22:03:24.511 [info] (ssh_tunnel) stderr: debug1: Local version string SSH-2.0-OpenSSH_9.9\n\n2025-07-21 22:03:24.561 [info] (ssh_tunnel) stderr: debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7\ndebug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000\ndebug1: Authenticating to login.haicore.berlin:22 as 'franz.srambical'\n\n2025-07-21 22:03:24.562 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-21 22:03:24.562 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: SSH2_MSG_KEXINIT sent\n\n2025-07-21 22:03:24.603 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT received\ndebug1: kex: algorithm: ecdh-sha2-nistp256\n\n2025-07-21 22:03:24.603 [info] (ssh_tunnel) stderr: debug1: kex: host key algorithm: ssh-ed25519\ndebug1: kex: server->client cipher: aes128-gcm@openssh.com MAC: compression: none\ndebug1: kex: client->server cipher: aes128-gcm@openssh.com MAC: compression: none\n\n2025-07-21 22:03:24.604 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_KEX_ECDH_REPLY\n\n2025-07-21 22:03:24.660 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEX_ECDH_REPLY received\ndebug1: Server host key: ssh-ed25519 SHA256:3/BGZ1UNXR9SufKdsZVtx4Yd+kZTnZzSvRH0l6rtbvo\n\n2025-07-21 22:03:24.660 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-21 22:03:24.660 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: Host 'login.haicore.berlin' is known and matches the ED25519 host key.\ndebug1: Found key in /Users/franzsrambical/.ssh/known_hosts:17\n\n2025-07-21 22:03:24.662 [info] (ssh_tunnel) stderr: debug1: ssh_packet_send2_wrapped: resetting send seqnr 3\ndebug1: rekey out after 4294967296 blocks\ndebug1: SSH2_MSG_NEWKEYS sent\ndebug1: expecting SSH2_MSG_NEWKEYS\n\n2025-07-21 22:03:24.662 [info] (ssh_tunnel) stderr: debug1: ssh_packet_read_poll2: resetting read seqnr 3\ndebug1: SSH2_MSG_NEWKEYS received\ndebug1: rekey in after 4294967296 blocks\n\n2025-07-21 22:03:24.662 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_EXT_INFO received\ndebug1: kex_ext_info_client_parse: server-sig-algs=\n\n2025-07-21 22:03:24.995 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_SERVICE_ACCEPT received\n\n2025-07-21 22:03:25.065 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: publickey\n\n2025-07-21 22:03:25.070 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: bound agent to hostkey\n\n2025-07-21 22:03:25.071 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: ssh_fetch_identitylist: agent contains no identities\ndebug1: Will attempt key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\ndebug1: Offering public key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-21 22:03:25.127 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: keyboard-interactive\n\n2025-07-21 22:03:25.263 [info] (ssh_tunnel) stderr: debug1: read_passphrase: requested to askpass\n\n2025-07-21 22:03:25.648 [info] Askpass server received request: POST /\n2025-07-21 22:03:25.648 [info] Askpass server received request body: {""request"":""(franz.srambical@login.haicore.berlin) Password: ""}\n2025-07-21 22:03:25.648 [info] Received SSH askpass request: (franz.srambical@login.haicore.berlin) Password: \n2025-07-21 22:03:30.142 [error] Password authentication cancelled\n2025-07-21 22:03:30.150 [info] (ssh_tunnel) stderr: Server returned status code: 500\n\n2025-07-21 22:03:32.809 [info] Resolving ssh remote authority 'login.haicore.berlin' (Unparsed 'ssh-remote+7b22686f73744e616d65223a226c6f67696e2e686169636f72652e6265726c696e227d') (attempt #1)\n2025-07-21 22:03:32.819 [info] SSH askpass server listening on /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-QgRGHx/socket.sock\n2025-07-21 22:03:32.820 [info] Using configured platform linux for remote host login.haicore.berlin\n2025-07-21 22:03:32.822 [info] Using askpass script: /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/launchSSHAskpass.sh with javascript file /Users/franzsrambical/.cursor/extensions/anysphere.remote-ssh-1.0.22/dist/scripts/sshAskClient.js. Askpass handle: /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor-ssh-QgRGHx/socket.sock\n2025-07-21 22:03:32.825 [info] Launching SSH server via shell with command: cat ""/var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_13803.sh"" | ssh -v -T -D 58475 login.haicore.berlin bash --login -c bash\n2025-07-21 22:03:32.825 [info] Started installation script. Waiting for it to finish...\n2025-07-21 22:03:32.825 [info] Waiting for server to install via process(95179)...\n2025-07-21 22:03:32.834 [info] (ssh_tunnel) stderr: OpenSSH_9.9p1, LibreSSL 3.3.6\ndebug1: Reading configuration data /Users/franzsrambical/.ssh/config\n\n2025-07-21 22:03:32.835 [info] (ssh_tunnel) stderr: debug1: /Users/franzsrambical/.ssh/config line 22: Applying options for login.haicore.berlin\ndebug1: Reading configuration data /etc/ssh/ssh_config\n\n2025-07-21 22:03:32.835 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/ssh_config.d/100-macos.conf\ndebug1: /etc/ssh/ssh_config.d/100-macos.conf line 1: Applying options for *\n\n2025-07-21 22:03:32.835 [info] (ssh_tunnel) stderr: debug1: Reading configuration data /etc/ssh/crypto.conf\n\n2025-07-21 22:03:32.835 [info] (ssh_tunnel) stderr: debug1: Authenticator provider $SSH_SK_PROVIDER did not resolve; disabling\ndebug1: Connecting to login.haicore.berlin port 22.\n\n2025-07-21 22:03:32.883 [info] (ssh_tunnel) stderr: debug1: Connection established.\n\n2025-07-21 22:03:32.883 [info] (ssh_tunnel) stderr: debug1: identity file /Users/franzsrambical/.ssh/id_ed25519 type 3\ndebug1: identity file /Users/franzsrambical/.ssh/id_ed25519-cert type -1\n\n2025-07-21 22:03:32.883 [info] (ssh_tunnel) stderr: debug1: Local version string SSH-2.0-OpenSSH_9.9\n\n2025-07-21 22:03:32.946 [info] (ssh_tunnel) stderr: debug1: Remote protocol version 2.0, remote software version OpenSSH_8.7\ndebug1: compat_banner: match: OpenSSH_8.7 pat OpenSSH* compat 0x04000000\ndebug1: Authenticating to login.haicore.berlin:22 as 'franz.srambical'\n\n2025-07-21 22:03:32.946 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-21 22:03:32.946 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: SSH2_MSG_KEXINIT sent\n\n2025-07-21 22:03:32.988 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEXINIT received\ndebug1: kex: algorithm: ecdh-sha2-nistp256\ndebug1: kex: host key algorithm: ssh-ed25519\ndebug1: kex: server->client cipher: aes128-gcm@openssh.com MAC: compression: none\ndebug1: kex: client->server cipher: aes128-gcm@openssh.com MAC: compression: none\n\n2025-07-21 22:03:32.988 [info] (ssh_tunnel) stderr: debug1: expecting SSH2_MSG_KEX_ECDH_REPLY\n\n2025-07-21 22:03:33.057 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_KEX_ECDH_REPLY received\ndebug1: Server host key: ssh-ed25519 SHA256:3/BGZ1UNXR9SufKdsZVtx4Yd+kZTnZzSvRH0l6rtbvo\n\n2025-07-21 22:03:33.057 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /Users/franzsrambical/.ssh/known_hosts2: No such file or directory\ndebug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts: No such file or directory\n\n2025-07-21 22:03:33.057 [info] (ssh_tunnel) stderr: debug1: load_hostkeys: fopen /etc/ssh/ssh_known_hosts2: No such file or directory\ndebug1: Host 'login.haicore.berlin' is known and matches the ED25519 host key.\ndebug1: Found key in /Users/franzsrambical/.ssh/known_hosts:17\n\n2025-07-21 22:03:33.059 [info] (ssh_tunnel) stderr: debug1: ssh_packet_send2_wrapped: resetting send seqnr 3\ndebug1: rekey out after 4294967296 blocks\n\n2025-07-21 22:03:33.059 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_NEWKEYS sent\ndebug1: expecting SSH2_MSG_NEWKEYS\ndebug1: ssh_packet_read_poll2: resetting read seqnr 3\ndebug1: SSH2_MSG_NEWKEYS received\ndebug1: rekey in after 4294967296 blocks\n\n2025-07-21 22:03:33.059 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_EXT_INFO received\ndebug1: kex_ext_info_client_parse: server-sig-algs=\n\n2025-07-21 22:03:33.396 [info] (ssh_tunnel) stderr: debug1: SSH2_MSG_SERVICE_ACCEPT received\n\n2025-07-21 22:03:33.454 [info] (ssh_tunnel) stderr: debug1: Authentications that can continue: publickey,gssapi-keyex,gssapi-with-mic,password,keyboard-interactive\ndebug1: Next authentication method: publickey\n\n2025-07-21 22:03:33.456 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: bound agent to hostkey\n\n2025-07-21 22:03:33.457 [info] (ssh_tunnel) stderr: debug1: get_agent_identities: ssh_fetch_identitylist: agent contains no identities\ndebug1: Will attempt key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\ndebug1: Offering public key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-21 22:03:33.514 [info] (ssh_tunnel) stderr: debug1: Server accepts key: /Users/franzsrambical/.ssh/id_ed25519 ED25519 SHA256:Iq/4xZ7XCaNSqbQ9N2LsZ+UYwGSzaQ+kLJHcOv/9ogQ explicit\n\n2025-07-21 22:03:33.592 [info] (ssh_tunnel) stderr: Authenticated to login.haicore.berlin ([141.80.150.4]:22) using ""publickey"".\n\n2025-07-21 22:03:33.592 [info] (ssh_tunnel) stderr: debug1: Local connections to LOCALHOST:58475 forwarded to remote address socks:0\ndebug1: Local forwarding listening on ::1 port 58475.\n\n2025-07-21 22:03:33.592 [info] (ssh_tunnel) stderr: debug1: channel 0: new port-listener [port listener] (inactive timeout: 0)\ndebug1: Local forwarding listening on 127.0.0.1 port 58475.\ndebug1: channel 1: new port-listener [port listener] (inactive timeout: 0)\n\n2025-07-21 22:03:33.593 [info] (ssh_tunnel) stderr: debug1: channel 2: new session [client-session] (inactive timeout: 0)\ndebug1: Requesting no-more-sessions@openssh.com\ndebug1: Entering interactive session.\ndebug1: pledge: filesystem\n\n2025-07-21 22:03:33.660 [info] (ssh_tunnel) stderr: debug1: client_input_global_request: rtype hostkeys-00@openssh.com want_reply 0\n\n2025-07-21 22:03:33.662 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts for login.haicore.berlin / (none)\n\n2025-07-21 22:03:33.673 [info] (ssh_tunnel) stderr: debug1: client_input_hostkeys: searching /Users/franzsrambical/.ssh/known_hosts2 for login.haicore.berlin / (none)\ndebug1: client_input_hostkeys: hostkeys file /Users/franzsrambical/.ssh/known_hosts2 does not exist\ndebug1: client_input_hostkeys: no new or deprecated keys from server\ndebug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\ndebug1: Remote: /home/franz.srambical/.ssh/authorized_keys:1: key options: agent-forwarding port-forwarding pty user-rc x11-forwarding\n\n2025-07-21 22:03:33.839 [info] (ssh_tunnel) stderr: debug1: Sending environment.\n\n2025-07-21 22:03:33.839 [info] (ssh_tunnel) stderr: debug1: Sending command: bash --login -c bash\ndebug1: pledge: network\n\n2025-07-21 22:03:34.362 [info] (ssh_tunnel) stdout: Using TMP_DIR: /run/user/961800067\nLocking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\nServer script already installed in /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server\nChecking node executable\nv20.18.2\nChecking for running multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\nRunning multiplex server: \nCreating multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nCreating directory for multiplex server: /home/franz.srambical/.cursor-server/bin/multiplex-server\nWriting multiplex server script to /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js\n\n2025-07-21 22:03:34.412 [info] (ssh_tunnel) stdout: Starting multiplex server: /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node /home/franz.srambical/.cursor-server/bin/multiplex-server/45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15.js 11b30778-0571-4c98-9c78-895aa2e4f200\nMultiplex server started with PID 1699749 and wrote pid to file /run/user/961800067/cursor-remote-multiplex.pid.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nReading multiplex server token file /run/user/961800067/cursor-remote-multiplex.token.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\nMultiplex server token file found\nReading multiplex server log file /run/user/961800067/cursor-remote-multiplex.log.c403edc4db82e26fa41a0903d75ac6d0.45e440a0fc5a5d12380c7a83a49ab82c55f715a5d60292da31f8d75730a9ee15\n\n2025-07-21 22:03:34.888 [info] (ssh_tunnel) stdout: Checking for code servers\nCode server script is already running /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server. Running processes are 1608910 sh /home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/bin/cursor-server --start-server --host=127.0.0.1 --port 0 --connection-token-file /run/user/961800067/cursor-remote-code.token.c403edc4db82e26fa41a0903d75ac6d0 --telemetry-level off --enable-remote-auto-shutdown --accept-server-license-terms\nCode server log file is /run/user/961800067/cursor-remote-code.log.c403edc4db82e26fa41a0903d75ac6d0\nd82a941ee1490ccd6082ee76: start\nexitCode==0==\nnodeExecutable==/home/franz.srambical/.cursor-server/bin/a8e95743c5268be73767c46944a71f4465d05c90/node==\nerrorMessage====\nisFatalError==false==\nmultiplexListeningOn==45441==\nmultiplexConnectionToken==11b30778-0571-4c98-9c78-895aa2e4f200==\ncodeListeningOn==34715==\ncodeConnectionToken==d56c0086-b507-48dc-bbeb-73bed3240052==\ndetectedPlatform==linux==\narch==x64==\nSSH_AUTH_SOCK====\nd82a941ee1490ccd6082ee76: end\nUnlocking /run/user/961800067/cursor-remote-lock.c403edc4db82e26fa41a0903d75ac6d0\n\n2025-07-21 22:03:34.891 [info] Server install command exit code: 0\n2025-07-21 22:03:34.891 [info] Deleting local script /var/folders/nn/241fnlwx03d7k7qt2jg98txr0000gn/T/cursor_remote_install_13803.sh\n2025-07-21 22:03:34.893 [info] [forwarding][code] creating new forwarding server\n2025-07-21 22:03:34.893 [info] [forwarding][code] server listening on 127.0.0.1:58479\n2025-07-21 22:03:34.893 [info] [forwarding][code] Set up server\n2025-07-21 22:03:34.894 [info] [remote-ssh] codeListeningOn (remote=[object Object]; local=[object Object]) codeConnectionToken: d56c0086-b507-48dc-bbeb-73bed3240052\n2025-07-21 22:03:34.894 [info] [forwarding][multiplex] creating new forwarding server\n2025-07-21 22:03:34.895 [info] [forwarding][multiplex] server listening on 127.0.0.1:58480\n2025-07-21 22:03:34.895 [info] [forwarding][multiplex] Set up server\n2025-07-21 22:03:34.898 [info] [remote-ssh] multiplexListeningOn (remote=[object Object]; local=[object Object]) multiplexConnectionToken: 11b30778-0571-4c98-9c78-895aa2e4f200\n2025-07-21 22:03:34.898 [info] [remote-ssh] Pinging remote server on port 127.0.0.1:58480\n2025-07-21 22:03:34.902 [info] [command][ed808478-286a-43df-b4b3-f8dcd46b6faa] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""11b30778-0571-4c98-9c78-895aa2e4f200"",""id"":""ed808478-286a-43df-b4b3-f8dcd46b6faa""}\n2025-07-21 22:03:34.904 [info] [forwarding][multiplex][127.0.0.1:58480 -> 127.0.0.1:45441][ebda891d-0f5d-49b7-9e27-b094c2d5ce18] received connection request\n2025-07-21 22:03:34.905 [info] (ssh_tunnel) stderr: debug1: Connection to port 58475 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 22:03:34.919 [info] [forwarding][code][127.0.0.1:58479 -> 127.0.0.1:34715][ab09c3f9-4700-43f8-a28e-737aadc98cb9] received connection request\n2025-07-21 22:03:34.919 [info] (ssh_tunnel) stderr: debug1: Connection to port 58475 forwarding to socks port 0 requested.\ndebug1: channel 4: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 22:03:34.949 [info] [forwarding][multiplex][127.0.0.1:58480 -> 127.0.0.1:58475 -> 127.0.0.1:45441][ebda891d-0f5d-49b7-9e27-b094c2d5ce18] socks forwarding established\n2025-07-21 22:03:34.977 [info] [forwarding][code][127.0.0.1:58479 -> 127.0.0.1:58475 -> 127.0.0.1:34715][ab09c3f9-4700-43f8-a28e-737aadc98cb9] socks forwarding established\n2025-07-21 22:03:34.999 [info] [command][ed808478-286a-43df-b4b3-f8dcd46b6faa] Process exited with code 0\n2025-07-21 22:03:34.999 [info] [forwarding][multiplex][127.0.0.1:58480 -> 127.0.0.1:58475 -> 127.0.0.1:45441][ebda891d-0f5d-49b7-9e27-b094c2d5ce18] socks connection closed\n2025-07-21 22:03:34.999 [info] [command][ed808478-286a-43df-b4b3-f8dcd46b6faa] Socket close event received\n2025-07-21 22:03:35.159 [info] (ssh_tunnel) stderr: debug1: channel 3: free: direct-tcpip: listening port 58475 for 127.0.0.1 port 45441, connect from 127.0.0.1 port 58482 to 127.0.0.1 port 58475, nchannels 5\n\n2025-07-21 22:03:35.162 [info] Successfully connected to Cursor server at http://127.0.0.1:58479/version\n2025-07-21 22:03:35.162 [info] [execServer][spawn] command: echo, args: 1, options: {}\n2025-07-21 22:03:35.163 [info] [forwarding][multiplex][127.0.0.1:58480 -> 127.0.0.1:45441][f006c83e-1c37-4fe6-92fb-b70b8f701f94] received connection request\n2025-07-21 22:03:35.163 [info] [command][36942475-88e4-4c4f-8ea7-b444a73dde7e] Sending command request: {""command"":""echo"",""args"":[""1""],""env"":{},""token"":""11b30778-0571-4c98-9c78-895aa2e4f200"",""id"":""36942475-88e4-4c4f-8ea7-b444a73dde7e""}\n2025-07-21 22:03:35.163 [info] (ssh_tunnel) stderr: debug1: Connection to port 58475 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 22:03:35.227 [info] [forwarding][multiplex][127.0.0.1:58480 -> 127.0.0.1:58475 -> 127.0.0.1:45441][f006c83e-1c37-4fe6-92fb-b70b8f701f94] socks forwarding established\n2025-07-21 22:03:35.276 [info] [forwarding][multiplex][127.0.0.1:58480 -> 127.0.0.1:58475 -> 127.0.0.1:45441][f006c83e-1c37-4fe6-92fb-b70b8f701f94] socks connection closed\n2025-07-21 22:03:35.276 [info] [command][36942475-88e4-4c4f-8ea7-b444a73dde7e] Process exited with code 0\n2025-07-21 22:03:35.276 [info] Successfully ran 'echo 1' against the multiplex server\n2025-07-21 22:03:35.277 [info] [remote-ssh] Resolved exec server. Socks port: 58475\n2025-07-21 22:03:35.277 [info] [remote-ssh] Resolved authority: {""host"":""127.0.0.1"",""port"":58479,""connectionToken"":""d56c0086-b507-48dc-bbeb-73bed3240052"",""extensionHostEnv"":{}}. Socks port: 58475\n2025-07-21 22:03:35.277 [info] [command][36942475-88e4-4c4f-8ea7-b444a73dde7e] Socket close event received\n2025-07-21 22:03:35.300 [info] [forwarding][code][127.0.0.1:58479 -> 127.0.0.1:34715][eecdefd3-4775-432d-9e64-7cd34c881553] received connection request\n2025-07-21 22:03:35.300 [info] (ssh_tunnel) stderr: debug1: Connection to port 58475 forwarding to socks port 0 requested.\ndebug1: channel 5: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 22:03:35.319 [info] (ssh_tunnel) stderr: debug1: channel 3: free: direct-tcpip: listening port 58475 for 127.0.0.1 port 45441, connect from 127.0.0.1 port 58486 to 127.0.0.1 port 58475, nchannels 6\n\n2025-07-21 22:03:35.340 [info] [forwarding][code][127.0.0.1:58479 -> 127.0.0.1:58475 -> 127.0.0.1:34715][eecdefd3-4775-432d-9e64-7cd34c881553] socks forwarding established\n2025-07-21 22:03:35.395 [info] [forwarding][code][127.0.0.1:58479 -> 127.0.0.1:34715][56554075-115e-42c0-a609-13941726c2c1] received connection request\n2025-07-21 22:03:35.395 [info] (ssh_tunnel) stderr: debug1: Connection to port 58475 forwarding to socks port 0 requested.\ndebug1: channel 3: new dynamic-tcpip [dynamic-tcpip] (inactive timeout: 0)\n\n2025-07-21 22:03:35.436 [info] [forwarding][code][127.0.0.1:58479 -> 127.0.0.1:58475 -> 127.0.0.1:34715][56554075-115e-42c0-a609-13941726c2c1] socks forwarding established\n2025-07-21 22:03:35.588 [info] Saved platform linux for remote host login.haicore.berlin\n2025-07-21 22:03:38.205 [info] (ssh_tunnel) stderr: debug1: channel 4: free: direct-tcpip: listening port 58475 for 127.0.0.1 port 34715, connect from 127.0.0.1 port 58484 to 127.0.0.1 port 58475, nchannels 6\n\n2025-07-21 22:03:38.205 [info] [forwarding][code][127.0.0.1:58479 -> 127.0.0.1:58475 -> 127.0.0.1:34715][ab09c3f9-4700-43f8-a28e-737aadc98cb9] socks connection closed\n",log,tab +3,93,"experiments/sample.sh",0,0,"",shellscript,tab +4,206,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:03:39 PM [info] Activating crowd-code\n10:03:39 PM [info] Recording started\n10:03:39 PM [info] Initializing git provider using file system watchers...\n",Log,tab +5,251,"extension-output-pdoom-org.crowd-code-#1-crowd-code",153,0,"10:03:39 PM [info] Git repository found\n10:03:39 PM [info] Git provider initialized successfully\n10:03:39 PM [info] Initial git state: [object Object]\n",Log,content +6,490,"TERMINAL",0,0,"",,terminal_focus +7,491,"TERMINAL",0,0,"",,terminal_focus +8,662,"TERMINAL",0,0,"",,terminal_focus +9,662,"experiments/sample.sh",0,0,"",shellscript,tab +10,720,"TERMINAL",0,0,"",,terminal_focus +11,1052,"TERMINAL",0,0,"source /home/franz.srambical/jafar/.venv/bin/activate",,terminal_command +12,1080,"TERMINAL",0,0,"source /home/franz.srambical/jafar/.venv/bin/activate",,terminal_command +13,5412,"TERMINAL",0,0,"squeue --me",,terminal_command +14,5425,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 13784 franz.sram interacti 1 8 R 2025-07-21T19:45:03 2025-07-21T19:45:03 2:18:41 1-00:00:00 hai004\r\n]0;franz.srambical@hai-login1:~/jafar",,terminal_output +15,10357,"TERMINAL",0,0,"bash",,terminal_focus +16,15412,"TERMINAL",0,0,"scancel --me",,terminal_command +17,15440,"TERMINAL",0,0,"]633;C]0;franz.srambical@hai-login1:~/jafar",,terminal_output +18,17029,"TERMINAL",0,0,"bash",,terminal_focus +19,20266,"TERMINAL",0,0,"squeue --me",,terminal_command +20,20273,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 13784 franz.sram interacti 1 8 CG 2025-07-21T19:45:03 2025-07-21T19:45:03 2:18:51 1-00:00:00 hai004\r\n]0;franz.srambical@hai-login1:~/jafar",,terminal_output +21,23962,"TERMINAL",0,0,"bash",,terminal_focus +22,25013,"TERMINAL",0,0,"",,terminal_focus +23,25506,"TERMINAL",0,0,"source /home/franz.srambical/jafar/.venv/bin/activate",,terminal_command +24,25544,"TERMINAL",0,0,"]633;C]0;franz.srambical@hai-login1:~/jafar",,terminal_output +25,25910,"TERMINAL",0,0,"bash",,terminal_focus +26,26249,"TERMINAL",0,0,"bash",,terminal_focus +27,34160,"TERMINAL",0,0,"salloc --gpus=8 --ntasks-per-node=8 --cpus-per-task=1 --mem=100G",,terminal_command +28,34235,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 13785\r\nsalloc: Nodes hai005 are ready for job\r\n",,terminal_output +29,34658,"TERMINAL",0,0,"Running inside SLURM, Job ID 13785.\r\n",,terminal_output +30,34721,"TERMINAL",0,0,"]0;franz.srambical@hai-login1:~/jafar[?2004h[franz.srambical@hai005.haicore.berlin:~/jafar] $ ",,terminal_output +31,42637,"TERMINAL",0,0,"\r[franz.srambical@hai005.haicore.berlin:~/jafar] $ ",,terminal_output +32,46619,"TERMINAL",0,0,"\r[franz.srambical@hai005.haicore.berlin:~/jafar] $ ",,terminal_output +33,48892,"sample.py",0,0,"from dataclasses import dataclass\nfrom typing import Optional\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom flax.training.train_state import TrainState\nimport grain\nimport orbax.checkpoint as ocp\nimport optax\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n checkpoint_step: Optional[int] = None\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_co_train: bool = True\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=args.lam_co_train,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n use_maskgit=False,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n is_training=False,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\ndummy_train_state = TrainState.create(\n apply_fn=genie.apply,\n params=params,\n tx=optax.adamw(\n optax.warmup_cosine_decay_schedule(\n 0, 0, 1, 2 # dummy values\n )\n ), \n)\nhandler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\nhandler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\ncheckpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=ocp.CheckpointManagerOptions(step_format_fixed_length=6),\n handler_registry=handler_registry\n)\nabstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, dummy_train_state\n)\n\nrestored = checkpoint_manager.restore(\n args.checkpoint_step or checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n ),\n)\nrestored_train_state = restored[""model_state""]\nparams = restored_train_state.params\n\n\ndef _sampling_wrapper(module, batch):\n # return module.sample_maskgit(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n # sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n sampling_fn = nn.apply(_sampling_wrapper, genie)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid\n\ndef _get_dataloader_iterator():\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n# --- Get video + latent actions ---\ngrain_iterator = _get_dataloader_iterator()\nvideo_batch = next(grain_iterator)\n# video_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n\nvideo_batch = video_batch.astype(args.dtype) / 255.0\n# Get latent actions for all videos in the batch\nbatch = dict(videos=video_batch[:,:args.seq_len])\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n# --- Sample + evaluate video ---\nprint(""autoreg sampling..."")\nvid = _autoreg_sample(rng, video_batch, action_batch)\nprint(""autoreg sampling done. calculating ssim and saving video"")\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\ntrue_videos = (video_batch * 255).astype(np.uint8)\npred_videos = (vid * 255).astype(np.uint8)\nvideo_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\nvideo_comparison[0] = true_videos[:, :args.seq_len]\nvideo_comparison[1] = pred_videos\nframes = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n# --- Save video --- \nimgs = [Image.fromarray(img) for img in frames]\n# Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\nfor t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +34,52811,"TERMINAL",0,0,"b",,terminal_output +35,53009,"TERMINAL",0,0,"as",,terminal_output +36,53066,"TERMINAL",0,0,"h",,terminal_output +37,53138,"TERMINAL",0,0," ",,terminal_output +38,53394,"TERMINAL",0,0,"e",,terminal_output +39,53592,"TERMINAL",0,0,"p",,terminal_output +40,53649,"TERMINAL",0,0,"x",,terminal_output +41,53922,"TERMINAL",0,0,"",,terminal_output +42,54058,"TERMINAL",0,0,"",,terminal_output +43,54160,"TERMINAL",0,0,"",,terminal_output +44,54483,"TERMINAL",0,0,"e",,terminal_output +45,54692,"TERMINAL",0,0,"xp",,terminal_output +46,54749,"TERMINAL",0,0,"e",,terminal_output +47,54909,"TERMINAL",0,0,"i",,terminal_output +48,54971,"TERMINAL",0,0,"m",,terminal_output +49,55245,"TERMINAL",0,0,"",,terminal_output +50,55432,"TERMINAL",0,0,"",,terminal_output +51,55629,"TERMINAL",0,0,"ri",,terminal_output +52,55734,"TERMINAL",0,0,"ments/",,terminal_output +53,55865,"TERMINAL",0,0,"s",,terminal_output +54,56022,"TERMINAL",0,0,"a",,terminal_output +55,56153,"TERMINAL",0,0,"p",,terminal_output +56,56255,"TERMINAL",0,0,"ml",,terminal_output +57,56309,"TERMINAL",0,0,"",,terminal_output +58,56844,"TERMINAL",0,0,"",,terminal_output +59,57077,"TERMINAL",0,0,"",,terminal_output +60,57201,"TERMINAL",0,0,"",,terminal_output +61,57354,"TERMINAL",0,0,"mp",,terminal_output +62,57591,"TERMINAL",0,0,"l",,terminal_output +63,57791,"TERMINAL",0,0,"e.sh ",,terminal_output +64,58121,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +65,63532,"sample.py",793,0,"",python,selection_command +66,64011,"sample.py",3800,0,"",python,selection_command +67,67058,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +68,67854,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n is_training: bool\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n is_training=self.is_training,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n is_training=self.is_training,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1.0,\n sample_argmax: bool = False,\n ):\n """"""\n Autoregressively samples with KV caching.\n """"""\n # This method assumes the model was initialized with decode=True\n assert self.decode, ""sample_causal with KV cache requires model to be in decode mode.""\n\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # --- Get latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, D)\n\n # --- Initialize cache with context frames ---\n # The first call to the dynamics model will populate the KV cache.\n # We only care about the logits for the very last token of the context\n # to predict the first new token.\n dyna_inputs = {\n ""video_tokens"": token_idxs,\n ""latent_actions"": action_tokens[:, :T-1] \n }\n # Note: We are now calling self.dynamics directly\n logits = self.dynamics(dyna_inputs, training=False)[""token_logits""]\n \n # Logits for the first token of the first generated frame (t=T)\n next_token_logits = logits[:, T-1, N-1, :]\n \n # The full sequence of tokens to be filled in\n generated_tokens = jnp.zeros((B, seq_len, N), dtype=token_idxs.dtype)\n generated_tokens = generated_tokens.at[:, :T].set(token_idxs)\n\n # --- Autoregressive generation loop ---\n rng = batch[""rng""]\n \n # We have N tokens per frame. We start generating from frame T.\n # The total number of tokens to generate is (seq_len - T) * N\n for t_step in range(T * N, seq_len * N):\n frame_idx = t_step // N\n patch_idx = t_step % N\n\n # Sample or argmax\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n )\n \n # Place the new token in the sequence\n generated_tokens = generated_tokens.at[:, frame_idx, patch_idx].set(next_token)\n\n # If we just generated the last token, break\n if frame_idx == seq_len - 1 and patch_idx == N - 1:\n break\n \n # Prepare input for the next step: just the token we generated\n # The model will use its cache for all previous tokens.\n next_input_token = next_token[:, None, None] # Reshape to (B, 1, 1)\n \n # Get the action for the current frame\n # Action for frame `i` influences transition from `i-1` to `i`\n current_action = action_tokens[:, frame_idx-1:frame_idx] # (B, 1, D)\n\n next_dyna_inputs = {\n ""video_tokens"": next_input_token,\n ""latent_actions"": current_action,\n }\n\n # This call uses and updates the cache, and is very fast!\n next_token_logits = self.dynamics(next_dyna_inputs, training=False)[""token_logits""]\n next_token_logits = next_token_logits[:, 0, 0, :] # Squeeze singleton dims\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n generated_tokens, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames\n\n @nn.compact\n def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) \n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n is_training=True,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n is_training=True,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +69,71846,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +70,72843,"genie.py",4168,0,"",python,selection_command +71,76618,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\r\n backend = _init_backend(platform)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\r\n backend = registration.factory()\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 518, in factory\r\n return xla_client.make_c_api_client(plugin_name, updated_options, None)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\r\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\r\nRuntimeError: Bad StatusOr access: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 65, in \r\n rng = jax.random.PRNGKey(args.seed)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 249, in PRNGKey\r\n return _return_prng_keys(True, _key('PRNGKey', seed, impl))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 201, in _key\r\n return prng.random_seed(seed, impl=impl)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/prng.py"", line 551, in random_seed\r\n seeds_arr = jnp.asarray(np.int64(seeds))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 383, in asarray\r\n return array(a, dtype=dtype, copy=bool(copy), order=order, device=device)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 268, in array\r\n out_array: Array = lax._convert_element_type(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 1726, in _convert_element_type\r\n return convert_element_type_p.bind(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 4902, in _convert_element_type_bind_with_trace\r\n operand = core.Primitive.bind_with_trace(convert_element_type_p, trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\nRuntimeError: Unable to initialize backend 'cuda': Bad StatusOr access: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\r\nF0721 22:04:55.653346 2046733 pjrt_stream_executor_client.cc:925] Non-OK-status: local_device->ThenExecuteCallback( local_device->host_to_device_stream(), [staging_buffer{std::move(staging_buffer)}, on_done_with_host_buffer{ std::move(on_done_with_host_buffer)}]() mutable { if (on_done_with_host_buffer) { std::move (*on_done_with_host_buffer)(); } })\r\nStatus: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 65, in \r\n rng = jax.random.PRNGKey(args.seed)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 249, in PRNGKey\r\n return _return_prng_keys(True, _key('PRNGKey', seed, impl))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 201, in _key\r\n return prng.random_seed(seed, impl=impl)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/prng.py"", line 551, in random_seed\r\n seeds_arr = jnp.asarray(np.int64(seeds))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 383, in asarray\r\n return array(a, dtype=dtype, copy=bool(copy), order=order, device=device)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 268, in array\r\n out_array: Array = lax._convert_element_type(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 1726, in _convert_element_type\r\n return convert_element_type_p.bind(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 4902, in _convert_element_type_bind_with_trace\r\n operand = core.Primitive.bind_with_trace(convert_element_type_p, trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\nRuntimeError: Bad StatusOr access: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n",,terminal_output +72,76759,"TERMINAL",0,0,"F0721 22:04:55.781534 2046725 pjrt_stream_executor_client.cc:925] Non-OK-status: local_device->ThenExecuteCallback( local_device->host_to_device_stream(), [staging_buffer{std::move(staging_buffer)}, on_done_with_host_buffer{ std::move(on_done_with_host_buffer)}]() mutable { if (on_done_with_host_buffer) { std::move (*on_done_with_host_buffer)(); } })\r\nStatus: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n*** Check failure stack trace: ***\r\n @ 0x7fca71e58754 absl::lts_20230802::log_internal::LogMessage::SendToLog()\r\n @ 0x7fca71e585c4 absl::lts_20230802::log_internal::LogMessage::Flush()\r\n @ 0x7fca71e58af9 absl::lts_20230802::log_internal::LogMessageFatal::~LogMessageFatal()\r\n @ 0x7fca69bbbf9b std::_Function_handler<>::_M_invoke()\r\n @ 0x7fca71ce4bd4 Eigen::ThreadPoolTempl<>::WorkerLoop()\r\n @ 0x7fca71ce4a61 std::__invoke_impl<>()\r\n @ 0x7fca71cd262f tsl::(anonymous namespace)::PThread::ThreadFn()\r\n @ 0x7fcd4fe89c02 start_thread\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 65, in \r\n rng = jax.random.PRNGKey(args.seed)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 249, in PRNGKey\r\n return _return_prng_keys(True, _key('PRNGKey', seed, impl))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 201, in _key\r\n return prng.random_seed(seed, impl=impl)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/prng.py"", line 551, in random_seed\r\n seeds_arr = jnp.asarray(np.int64(seeds))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 383, in asarray\r\n return array(a, dtype=dtype, copy=bool(copy), order=order, device=device)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 268, in array\r\n out_array: Array = lax._convert_element_type(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 1726, in _convert_element_type\r\n return convert_element_type_p.bind(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 4902, in _convert_element_type_bind_with_trace\r\n operand = core.Primitive.bind_with_trace(convert_element_type_p, trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\nRuntimeError: Bad StatusOr access: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n",,terminal_output +73,76837,"TERMINAL",0,0,"*** Check failure stack trace: ***\r\n @ 0x7f5691e58754 absl::lts_20230802::log_internal::LogMessage::SendToLog()\r\n @ 0x7f5691e585c4 absl::lts_20230802::log_internal::LogMessage::Flush()\r\n @ 0x7f5691e58af9 absl::lts_20230802::log_internal::LogMessageFatal::~LogMessageFatal()\r\n @ 0x7f5689bbbf9b std::_Function_handler<>::_M_invoke()\r\n @ 0x7f5691ce4bd4 Eigen::ThreadPoolTempl<>::WorkerLoop()\r\n @ 0x7f5691ce4a61 std::__invoke_impl<>()\r\n @ 0x7f5691cd262f tsl::(anonymous namespace)::PThread::ThreadFn()\r\n @ 0x7f596fa89c02 start_thread\r\nE0721 22:04:55.891581 2044478 cuda_dnn.cc:535] Could not create cudnn handle: CUDNN_STATUS_INTERNAL_ERROR\r\nE0721 22:04:55.891829 2044478 cuda_dnn.cc:539] Memory usage: 851968 bytes free, 85029158912 bytes total.\r\nE0721 22:04:55.903993 2044478 cuda_dnn.cc:535] Could not create cudnn handle: CUDNN_STATUS_INTERNAL_ERROR\r\nE0721 22:04:55.904587 2044478 cuda_dnn.cc:539] Memory usage: 851968 bytes free, 85029158912 bytes total.\r\n",,terminal_output +74,76939,"TERMINAL",0,0,"E0721 22:04:55.954999 2044478 cuda_dnn.cc:535] Could not create cudnn handle: CUDNN_STATUS_INTERNAL_ERROR\r\nE0721 22:04:55.955632 2044478 cuda_dnn.cc:539] Memory usage: 851968 bytes free, 85029158912 bytes total.\r\nE0721 22:04:55.976102 2044478 cuda_dnn.cc:535] Could not create cudnn handle: CUDNN_STATUS_INTERNAL_ERROR\r\nE0721 22:04:55.976588 2044478 cuda_dnn.cc:539] Memory usage: 851968 bytes free, 85029158912 bytes total.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 65, in \r\n rng = jax.random.PRNGKey(args.seed)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 249, in PRNGKey\r\n return _return_prng_keys(True, _key('PRNGKey', seed, impl))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 201, in _key\r\n return prng.random_seed(seed, impl=impl)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/prng.py"", line 551, in random_seed\r\n seeds_arr = jnp.asarray(np.int64(seeds))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 383, in asarray\r\n return array(a, dtype=dtype, copy=bool(copy), order=order, device=device)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 268, in array\r\n out_array: Array = lax._convert_element_type(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 1726, in _convert_element_type\r\n return convert_element_type_p.bind(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 4902, in _convert_element_type_bind_with_trace\r\n operand = core.Primitive.bind_with_trace(convert_element_type_p, trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\njaxlib._jax.XlaRuntimeError: FAILED_PRECONDITION: DNN library initialization failed. Look at the errors above for more details.\r\n",,terminal_output +75,77332,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +76,77869,"TERMINAL",0,0,"2025-07-21 22:04:56.931743: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:0: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n",,terminal_output +77,77921,"TERMINAL",0,0,"2025-07-21 22:04:56.983610: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:0: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n",,terminal_output +78,78858,"TERMINAL",0,0,"srun: error: hai005: task 4: Exited with exit code 1\r\n",,terminal_output +79,79814,"TERMINAL",0,0,"srun: error: hai005: task 1: Exited with exit code 1\r\n",,terminal_output +80,80826,"TERMINAL",0,0,"2025-07-21 22:04:59.886685: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +81,84533,"TERMINAL",0,0,"srun: error: hai005: task 2: Aborted (core dumped)\r\n",,terminal_output +82,85281,"TERMINAL",0,0,"2025-07-21 22:05:04.346351: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +83,85943,"TERMINAL",0,0,"2025-07-21 22:05:04.972882: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +84,86613,"TERMINAL",0,0,"2025-07-21 22:05:05.643815: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\nsrun: error: hai005: task 5: Aborted (core dumped)\r\n",,terminal_output +85,89161,"TERMINAL",0,0,"2025-07-21 22:05:08.225243: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +86,91788,"TERMINAL",0,0,"2025-07-21 22:05:10.852386: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +87,92426,"TERMINAL",0,0,"2025-07-21 22:05:11.492234: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +88,92616,"TERMINAL",0,0,"2025-07-21 22:05:11.676956: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +89,95776,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 102, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/fast/home/franz.srambical/jafar/genie.py"", line 117, in __call__\r\n dyna_outputs = self.dynamics(outputs, training)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics.py"", line 80, in setup\r\n self.dynamics = STTransformer(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'is_training'\r\n",,terminal_output +90,98062,"TERMINAL",0,0,"srun: error: hai005: task 3: Exited with exit code 1\r\n",,terminal_output +91,98579,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 102, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/fast/home/franz.srambical/jafar/genie.py"", line 117, in __call__\r\n dyna_outputs = self.dynamics(outputs, training)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics.py"", line 80, in setup\r\n self.dynamics = STTransformer(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'is_training'\r\n",,terminal_output +92,98922,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 102, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/fast/home/franz.srambical/jafar/genie.py"", line 117, in __call__\r\n dyna_outputs = self.dynamics(outputs, training)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics.py"", line 80, in setup\r\n self.dynamics = STTransformer(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'is_training'\r\n",,terminal_output +93,99045,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 102, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/fast/home/franz.srambical/jafar/genie.py"", line 117, in __call__\r\n dyna_outputs = self.dynamics(outputs, training)\r\n File ""/fast/home/franz.srambical/jafar/models/dynamics.py"", line 80, in setup\r\n self.dynamics = STTransformer(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/kw_only_dataclasses.py"", line 235, in init_wrapper\r\n dataclass_init(self, *args, **kwargs)\r\nTypeError: STTransformer.__init__() missing 1 required positional argument: 'is_training'\r\n",,terminal_output +94,100836,"TERMINAL",0,0,"srun: error: hai005: task 6: Exited with exit code 1\r\n",,terminal_output +95,101219,"TERMINAL",0,0,"srun: error: hai005: task 7: Exited with exit code 1\r\n",,terminal_output +96,101762,"TERMINAL",0,0,"srun: error: hai005: task 0: Exited with exit code 1\r\n]0;franz.srambical@hai-login1:~/jafar[?2004h[franz.srambical@hai005.haicore.berlin:~/jafar] $ ",,terminal_output +97,106640,"utils/nn.py",0,0,"import math\nfrom typing import Tuple\nfrom functools import partial\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n# class STBlock2(nn.Module):\n # dim: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.remat\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # --- Spatial attention ---\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n\n # # --- Temporal attention ---\n # x = x.swapaxes(1, 2)\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n # x = x.swapaxes(1, 2)\n\n # # --- Feedforward ---\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n # z = nn.Dense(\n # self.dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # z = nn.gelu(z)\n # x = x + z\n\n # return x\n\n# class CausalTransformer(nn.Module):\n # model_dim: int\n # out_dim: int\n # num_blocks: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # Input projection and normalization\n # x = nn.Sequential(\n # [\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.Dense(self.model_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # ]\n # )(x)\n # # Causal transformer blocks\n # for _ in range(self.num_blocks):\n # x = STBlock2(\n # dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n\n # # Output projection\n # x = nn.Dense(\n # self.out_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n is_training: bool\n spatial_bert: bool = True\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(self.use_flash_attention, is_causal=not self.spatial_bert),\n decode=self.is_training,\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(self.use_flash_attention, is_causal=True),\n decode=self.is_training,\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n is_training: bool\n spatial_bert: bool = True\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n spatial_bert=self.spatial_bert,\n use_flash_attention=self.use_flash_attention,\n is_training=self.is_training,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n \n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = 'cudnn' if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, '... l h d -> (...) l h d')\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False)\n return jnp.logical_and(attention_mask, expanded_mask)\n \n original_shape = query.shape\n original_seq_len = query.shape[-3]\n \n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n \n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n \n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n \n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n \n return attention_fn\n\n",python,tab +98,116801,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spatial_bert=True,\n use_flash_attention=self.use_flash_attention,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n self.use_flash_attention,\n spatial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n # vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n # logits = self.dynamics(vid_embed_padded)[:, :-1, :-1]\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, :, :-1]\n mask = jnp.ones(vid_embed.shape[:-1])\n # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)\n # jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,tab +99,116802,"models/dynamics.py",453,13,"STTransformer",python,selection_command +100,117251,"models/dynamics.py",465,0,"",python,selection_command +101,117511,"models/dynamics.py",468,0,"",python,selection_command +102,117754,"models/dynamics.py",496,0,"",python,selection_command +103,117788,"models/dynamics.py",526,0,"",python,selection_command +104,117813,"models/dynamics.py",555,0,"",python,selection_command +105,117846,"models/dynamics.py",583,0,"",python,selection_command +106,117896,"models/dynamics.py",609,0,"",python,selection_command +107,117922,"models/dynamics.py",639,0,"",python,selection_command +108,118061,"models/dynamics.py",663,0,"",python,selection_command +109,118314,"models/dynamics.py",694,0,"",python,selection_command +110,118528,"models/dynamics.py",751,0,"\n ",python,content +111,119398,"models/dynamics.py",764,0,"is_training=training,",python,content +112,119724,"models/dynamics.py",784,0,"",python,selection_command +113,120664,"models/dynamics.py",776,0,"",python,selection_command +114,121086,"models/dynamics.py",776,0,"-",python,content +115,121086,"models/dynamics.py",777,0,"",python,selection_keyboard +116,121179,"models/dynamics.py",777,0,"s",python,content +117,121180,"models/dynamics.py",778,0,"",python,selection_keyboard +118,121863,"models/dynamics.py",777,1,"",python,content +119,121960,"models/dynamics.py",776,1,"",python,content +120,122190,"models/dynamics.py",776,0,"s",python,content +121,122190,"models/dynamics.py",777,0,"",python,selection_keyboard +122,122272,"models/dynamics.py",777,0,"e",python,content +123,122272,"models/dynamics.py",778,0,"",python,selection_keyboard +124,122320,"models/dynamics.py",778,0,"l",python,content +125,122320,"models/dynamics.py",779,0,"",python,selection_keyboard +126,122410,"models/dynamics.py",779,0,"f",python,content +127,122410,"models/dynamics.py",780,0,"",python,selection_keyboard +128,122615,"models/dynamics.py",780,0,".",python,content +129,122615,"models/dynamics.py",781,0,"",python,selection_keyboard +130,122800,"models/dynamics.py",781,0,"i",python,content +131,122800,"models/dynamics.py",782,0,"",python,selection_keyboard +132,122879,"models/dynamics.py",782,0,"s",python,content +133,122879,"models/dynamics.py",783,0,"",python,selection_keyboard +134,123134,"models/dynamics.py",783,0,"_",python,content +135,123135,"models/dynamics.py",784,0,"",python,selection_keyboard +136,123487,"models/dynamics.py",783,0,"",python,selection_command +137,133686,"models/dynamics.py",2342,0,"",python,selection_command +138,134409,"models/dynamics.py",2381,0,"",python,selection_command +139,134658,"models/dynamics.py",2409,0,"",python,selection_command +140,134677,"models/dynamics.py",2439,0,"",python,selection_command +141,134711,"models/dynamics.py",2468,0,"",python,selection_command +142,134745,"models/dynamics.py",2496,0,"",python,selection_command +143,134794,"models/dynamics.py",2522,0,"",python,selection_command +144,134811,"models/dynamics.py",2550,0,"",python,selection_command +145,134843,"models/dynamics.py",2576,0,"",python,selection_command +146,134985,"models/dynamics.py",2614,0,"",python,selection_command +147,135664,"models/dynamics.py",2621,0,"\n is_training=self.is_training,",python,content +148,136875,"models/dynamics.py",2615,0,"",python,selection_command +149,137145,"models/dynamics.py",2577,0,"",python,selection_command +150,137222,"models/dynamics.py",2588,0,"",python,selection_command +151,137663,"models/dynamics.py",2270,0,"",python,selection_command +152,139520,"models/dynamics.py",2295,0,"\n is_training: bool",python,content +153,140283,"models/dynamics.py",2249,0,"",python,selection_command +154,140540,"models/dynamics.py",2222,0,"",python,selection_command +155,140565,"models/dynamics.py",2203,0,"",python,selection_command +156,140596,"models/dynamics.py",2184,0,"",python,selection_command +157,140631,"models/dynamics.py",2164,0,"",python,selection_command +158,140667,"models/dynamics.py",2143,0,"",python,selection_command +159,140804,"models/dynamics.py",2124,0,"",python,selection_command +160,140962,"models/dynamics.py",2119,0,"",python,selection_command +161,141118,"models/dynamics.py",2074,0,"",python,selection_command +162,141274,"models/dynamics.py",2033,0,"",python,selection_command +163,141720,"models/dynamics.py",2035,0,"",python,selection_command +164,144549,"models/dynamics.py",0,0,"",python,selection_command +165,145398,"models/dynamics.py",29,0,"",python,selection_command +166,145646,"models/dynamics.py",30,0,"",python,selection_command +167,145665,"models/dynamics.py",41,0,"",python,selection_command +168,145701,"models/dynamics.py",65,0,"",python,selection_command +169,145731,"models/dynamics.py",89,0,"",python,selection_command +170,145771,"models/dynamics.py",103,0,"",python,selection_command +171,145805,"models/dynamics.py",104,0,"",python,selection_command +172,145836,"models/dynamics.py",139,0,"",python,selection_command +173,145870,"models/dynamics.py",140,0,"",python,selection_command +174,145903,"models/dynamics.py",141,0,"",python,selection_command +175,145936,"models/dynamics.py",175,0,"",python,selection_command +176,145969,"models/dynamics.py",208,0,"",python,selection_command +177,146004,"models/dynamics.py",209,0,"",python,selection_command +178,146038,"models/dynamics.py",228,0,"",python,selection_command +179,146085,"models/dynamics.py",249,0,"",python,selection_command +180,146103,"models/dynamics.py",269,0,"",python,selection_command +181,146136,"models/dynamics.py",288,0,"",python,selection_command +182,146170,"models/dynamics.py",307,0,"",python,selection_command +183,146203,"models/dynamics.py",329,0,"",python,selection_command +184,146238,"models/dynamics.py",356,0,"",python,selection_command +185,146270,"models/dynamics.py",377,0,"",python,selection_command +186,146305,"models/dynamics.py",407,0,"",python,selection_command +187,146554,"models/dynamics.py",377,0,"",python,selection_command +188,147056,"models/dynamics.py",406,0,"\n is_training: bool",python,content +189,147804,"models/dynamics.py",356,0,"",python,selection_command +190,148046,"models/dynamics.py",329,0,"",python,selection_command +191,148105,"models/dynamics.py",307,0,"",python,selection_command +192,148124,"models/dynamics.py",288,0,"",python,selection_command +193,148146,"models/dynamics.py",269,0,"",python,selection_command +194,148177,"models/dynamics.py",249,0,"",python,selection_command +195,148210,"models/dynamics.py",228,0,"",python,selection_command +196,148243,"models/dynamics.py",209,0,"",python,selection_command +197,148277,"models/dynamics.py",208,0,"",python,selection_command +198,148399,"models/dynamics.py",175,0,"",python,selection_command +199,148563,"models/dynamics.py",141,0,"",python,selection_command +200,148672,"models/dynamics.py",147,0,"",python,selection_command +201,148830,"models/dynamics.py",162,0,"",python,selection_command +202,149084,"models/dynamics.py",147,0,"",python,selection_command +203,151425,"genie.py",0,0,"",python,tab +204,151425,"genie.py",2283,15,"DynamicsMaskGIT",python,selection_command +205,151938,"genie.py",2297,0,"",python,selection_command +206,152259,"genie.py",2304,0,"",python,selection_command +207,152497,"genie.py",2345,0,"",python,selection_command +208,152531,"genie.py",2397,0,"",python,selection_command +209,152567,"genie.py",2446,0,"",python,selection_command +210,152594,"genie.py",2493,0,"",python,selection_command +211,152626,"genie.py",2531,0,"",python,selection_command +212,152660,"genie.py",2575,0,"",python,selection_command +213,152698,"genie.py",2621,0,"",python,selection_command +214,153020,"genie.py",2655,0,"",python,selection_command +215,153613,"genie.py",2712,0,"\n is_training=self.is_training,",python,content +216,156142,"genie.py",3208,0,"\n is_training=self.is_training,",python,content +217,157043,"genie.py",3221,0,"",python,selection_command +218,157169,"genie.py",3225,0,"",python,selection_command +219,157339,"genie.py",3236,0,"",python,selection_command +220,157490,"genie.py",3237,0,"",python,selection_command +221,157655,"genie.py",3241,0,"",python,selection_command +222,157888,"genie.py",3242,0,"",python,selection_command +223,158246,"genie.py",972,0,"",python,selection_command +224,160661,"genie.py",1613,0,"",python,selection_command +225,161753,"genie.py",1630,0,"",python,selection_command +226,161930,"genie.py",2185,0,"",python,selection_command +227,162093,"genie.py",2202,0,"",python,selection_command +228,162264,"genie.py",2729,0,"",python,selection_command +229,163035,"genie.py",2746,0,"",python,selection_command +230,163198,"genie.py",3225,0,"",python,selection_command +231,163622,"genie.py",3242,0,"",python,selection_command +232,165671,"TERMINAL",0,0,"[franz.srambical@hai005.haicore.berlin:~/jafar] $ ",,terminal_output +233,165746,"TERMINAL",0,0,"bash experiments/sample.sh ",,terminal_output +234,165983,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +235,168558,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +236,173098,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +237,173172,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +238,174534,"TERMINAL",0,0,"2025-07-21 22:06:33.592546: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +239,182995,"TERMINAL",0,0,"2025-07-21 22:06:42.058067: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +240,183156,"TERMINAL",0,0,"2025-07-21 22:06:42.214189: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +241,183542,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +242,183657,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +243,183714,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +244,183869,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +245,186087,"TERMINAL",0,0,"2025-07-21 22:06:45.154779: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:0: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n",,terminal_output +246,186336,"TERMINAL",0,0,"2025-07-21 22:06:45.401230: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:0: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n",,terminal_output +247,188648,"TERMINAL",0,0,"2025-07-21 22:06:47.711757: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:0: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n",,terminal_output +248,190777,"TERMINAL",0,0,"2025-07-21 22:06:49.840958: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:0: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n",,terminal_output +249,191048,"TERMINAL",0,0,"2025-07-21 22:06:50.111436: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +250,191119,"TERMINAL",0,0,"2025-07-21 22:06:50.182092: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:0: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n",,terminal_output +251,191956,"TERMINAL",0,0,"E0721 22:06:50.937631 2056985 cuda_dnn.cc:535] Could not create cudnn handle: CUDNN_STATUS_INTERNAL_ERROR\r\nE0721 22:06:50.937868 2056985 cuda_dnn.cc:539] Memory usage: 131072 bytes free, 85029158912 bytes total.\r\nE0721 22:06:50.941979 2056985 cuda_dnn.cc:535] Could not create cudnn handle: CUDNN_STATUS_INTERNAL_ERROR\r\nE0721 22:06:50.942162 2056985 cuda_dnn.cc:539] Memory usage: 131072 bytes free, 85029158912 bytes total.\r\nF0721 22:06:50.950825 2059565 pjrt_stream_executor_client.cc:925] Non-OK-status: local_device->ThenExecuteCallback( local_device->host_to_device_stream(), [staging_buffer{std::move(staging_buffer)}, on_done_with_host_buffer{ std::move(on_done_with_host_buffer)}]() mutable { if (on_done_with_host_buffer) { std::move (*on_done_with_host_buffer)(); } })\r\nStatus: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\nF0721 22:06:50.955774 2059844 pjrt_stream_executor_client.cc:925] Non-OK-status: local_device->ThenExecuteCallback( local_device->host_to_device_stream(), [staging_buffer{std::move(staging_buffer)}, on_done_with_host_buffer{ std::move(on_done_with_host_buffer)}]() mutable { if (on_done_with_host_buffer) { std::move (*on_done_with_host_buffer)(); } })\r\nStatus: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\nE0721 22:06:50.991143 2056985 cuda_dnn.cc:535] Could not create cudnn handle: CUDNN_STATUS_INTERNAL_ERROR\r\nE0721 22:06:50.991342 2056985 cuda_dnn.cc:539] Memory usage: 131072 bytes free, 85029158912 bytes total.\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 65, in \r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 65, in \r\n rng = jax.random.PRNGKey(args.seed)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 249, in PRNGKey\r\n rng = jax.random.PRNGKey(args.seed)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 249, in PRNGKey\r\n return _return_prng_keys(True, _key('PRNGKey', seed, impl))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 201, in _key\r\n return _return_prng_keys(True, _key('PRNGKey', seed, impl))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 201, in _key\r\n return prng.random_seed(seed, impl=impl)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/prng.py"", line 551, in random_seed\r\n return prng.random_seed(seed, impl=impl)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/prng.py"", line 551, in random_seed\r\nE0721 22:06:50.995229 2056985 cuda_dnn.cc:535] Could not create cudnn handle: CUDNN_STATUS_INTERNAL_ERROR\r\n seeds_arr = jnp.asarray(np.int64(seeds))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 383, in asarray\r\nE0721 22:06:50.995411 2056985 cuda_dnn.cc:539] Memory usage: 131072 bytes free, 85029158912 bytes total.\r\n seeds_arr = jnp.asarray(np.int64(seeds))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 383, in asarray\r\n return array(a, dtype=dtype, copy=bool(copy), order=order, device=device)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 268, in array\r\n return array(a, dtype=dtype, copy=bool(copy), order=order, device=device)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 268, in array\r\n out_array: Array = lax._convert_element_type(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 1726, in _convert_element_type\r\n out_array: Array = lax._convert_element_type(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 1726, in _convert_element_type\r\n return convert_element_type_p.bind(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return convert_element_type_p.bind(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self._true_bind(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 4902, in _convert_element_type_bind_with_trace\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 4902, in _convert_element_type_bind_with_trace\r\n operand = core.Primitive.bind_with_trace(convert_element_type_p, trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n operand = core.Primitive.bind_with_trace(convert_element_type_p, trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return trace.process_primitive(self, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\nRuntimeError: Bad StatusOr access: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n outs = fun(*args)\r\nRuntimeError: Bad StatusOr access: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 65, in \r\n rng = jax.random.PRNGKey(args.seed)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 249, in PRNGKey\r\n return _return_prng_keys(True, _key('PRNGKey', seed, impl))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 201, in _key\r\n return prng.random_seed(seed, impl=impl)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/prng.py"", line 551, in random_seed\r\n seeds_arr = jnp.asarray(np.int64(seeds))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 383, in asarray\r\n return array(a, dtype=dtype, copy=bool(copy), order=order, device=device)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 268, in array\r\n out_array: Array = lax._convert_element_type(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 1726, in _convert_element_type\r\n return convert_element_type_p.bind(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 4902, in _convert_element_type_bind_with_trace\r\n operand = core.Primitive.bind_with_trace(convert_element_type_p, trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\njaxlib._jax.XlaRuntimeError: FAILED_PRECONDITION: DNN library initialization failed. Look at the errors above for more details.\r\n",,terminal_output +252,192072,"TERMINAL",0,0,"*** Check failure stack trace: ***\r\n @ 0x7fc4bf079754 absl::lts_20230802::log_internal::LogMessage::SendToLog()\r\n @ 0x7fc4bf0795c4 absl::lts_20230802::log_internal::LogMessage::Flush()\r\n @ 0x7fc4bf079af9 absl::lts_20230802::log_internal::LogMessageFatal::~LogMessageFatal()\r\n @ 0x7fc4b6ddcf9b std::_Function_handler<>::_M_invoke()\r\n @ 0x7fc4bef05bd4 Eigen::ThreadPoolTempl<>::WorkerLoop()\r\n @ 0x7fc4bef05a61 std::__invoke_impl<>()\r\n @ 0x7fc4beef362f tsl::(anonymous namespace)::PThread::ThreadFn()\r\n @ 0x7fc79c689c02 start_thread\r\n*** Check failure stack trace: ***\r\n @ 0x7f2ec429a754 absl::lts_20230802::log_internal::LogMessage::SendToLog()\r\n @ 0x7f2ec429a5c4 absl::lts_20230802::log_internal::LogMessage::Flush()\r\n @ 0x7f2ec429aaf9 absl::lts_20230802::log_internal::LogMessageFatal::~LogMessageFatal()\r\n @ 0x7f2ebbffdf9b std::_Function_handler<>::_M_invoke()\r\n @ 0x7f2ec4126bd4 Eigen::ThreadPoolTempl<>::WorkerLoop()\r\n @ 0x7f2ec4126a61 std::__invoke_impl<>()\r\n @ 0x7f2ec411462f tsl::(anonymous namespace)::PThread::ThreadFn()\r\n @ 0x7f31a1689c02 start_thread\r\n",,terminal_output +253,192183,"TERMINAL",0,0,"F0721 22:06:51.203948 2059572 pjrt_stream_executor_client.cc:925] Non-OK-status: local_device->ThenExecuteCallback( local_device->host_to_device_stream(), [staging_buffer{std::move(staging_buffer)}, on_done_with_host_buffer{ std::move(on_done_with_host_buffer)}]() mutable { if (on_done_with_host_buffer) { std::move (*on_done_with_host_buffer)(); } })\r\nStatus: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 65, in \r\n rng = jax.random.PRNGKey(args.seed)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 249, in PRNGKey\r\n return _return_prng_keys(True, _key('PRNGKey', seed, impl))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/random.py"", line 201, in _key\r\n return prng.random_seed(seed, impl=impl)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/prng.py"", line 551, in random_seed\r\n seeds_arr = jnp.asarray(np.int64(seeds))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 383, in asarray\r\n return array(a, dtype=dtype, copy=bool(copy), order=order, device=device)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array.py"", line 268, in array\r\n out_array: Array = lax._convert_element_type(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 1726, in _convert_element_type\r\n return convert_element_type_p.bind(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 4902, in _convert_element_type_bind_with_trace\r\n operand = core.Primitive.bind_with_trace(convert_element_type_p, trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\nRuntimeError: Bad StatusOr access: RESOURCE_EXHAUSTED: : CUDA_ERROR_OUT_OF_MEMORY: out of memory\r\n",,terminal_output +254,192262,"TERMINAL",0,0,"*** Check failure stack trace: ***\r\n @ 0x7fd294c37754 absl::lts_20230802::log_internal::LogMessage::SendToLog()\r\n @ 0x7fd294c375c4 absl::lts_20230802::log_internal::LogMessage::Flush()\r\n @ 0x7fd294c37af9 absl::lts_20230802::log_internal::LogMessageFatal::~LogMessageFatal()\r\n @ 0x7fd28c99af9b std::_Function_handler<>::_M_invoke()\r\n @ 0x7fd294ac3bd4 Eigen::ThreadPoolTempl<>::WorkerLoop()\r\n @ 0x7fd294ac3a61 std::__invoke_impl<>()\r\n @ 0x7fd294ab162f tsl::(anonymous namespace)::PThread::ThreadFn()\r\n @ 0x7fd572689c02 start_thread\r\n",,terminal_output +255,194172,"TERMINAL",0,0,"srun: error: hai005: task 3: Exited with exit code 1\r\n",,terminal_output +256,194259,"TERMINAL",0,0,"2025-07-21 22:06:53.322809: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +257,195322,"TERMINAL",0,0,"2025-07-21 22:06:54.365352: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +258,196037,"TERMINAL",0,0,"2025-07-21 22:06:55.094962: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +259,198602,"TERMINAL",0,0,"srun: error: hai005: task 7: Aborted (core dumped)\r\n",,terminal_output +260,199471,"TERMINAL",0,0,"srun: error: hai005: task 2: Aborted (core dumped)\r\n",,terminal_output +261,201111,"TERMINAL",0,0,"srun: error: hai005: task 5: Aborted (core dumped)\r\n",,terminal_output +262,204127,"TERMINAL",0,0,"2025-07-21 22:07:03.192799: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +263,210155,"TERMINAL",0,0,"2025-07-21 22:07:09.216976: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +264,210739,"TERMINAL",0,0,"2025-07-21 22:07:09.780761: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +265,211702,"TERMINAL",0,0,"2025-07-21 22:07:10.764308: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +266,225226,"TERMINAL",0,0,"2025-07-21 22:07:24.281404: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +267,226783,"TERMINAL",0,0,"2025-07-21 22:07:25.843203: W external/xla/xla/tsl/framework/bfc_allocator.cc:501] Allocator (GPU_0_bfc) ran out of memory trying to allocate 1.00MiB (rounded to 1048576)requested by op \r\nIf the cause is memory fragmentation maybe the environment variable 'TF_GPU_ALLOCATOR=cuda_malloc_async' will improve the situation. \r\nCurrent allocation summary follows.\r\nCurrent allocation summary follows.\r\n2025-07-21 22:07:25.844859: W external/xla/xla/tsl/framework/bfc_allocator.cc:512] ****************************************************************************************************\r\n",,terminal_output +268,231627,"TERMINAL",0,0,"autoreg sampling...\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 185, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 145, in _autoreg_sample\r\n generated_vid = sampling_fn(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3023, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 136, in _sampling_wrapper\r\n return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/genie.py"", line 140, in sample_causal\r\n assert self.decode, ""sample_causal with KV cache requires model to be in decode mode.""\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1314, in __getattr__\r\n raise AttributeError(msg)\r\nAttributeError: ""Genie"" object has no attribute ""decode"".\r\n",,terminal_output +269,234453,"TERMINAL",0,0,"srun: error: hai005: task 0: Exited with exit code 1\r\n",,terminal_output +270,237513,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 124, in \r\n restored = checkpoint_manager.restore(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1647, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 562, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 304, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 323, in _restore\r\n return self._handler.restore(directory, args=args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 859, in restore\r\n restored[item_name] = handler.restore(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/standard_checkpoint_handler.py"", line 259, in restore\r\n return self._impl.restore(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 816, in restore\r\n return self._handler_impl.restore(directory, args=args)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 815, in restore\r\n tree_memory_size, restored_item = asyncio_utils.run_sync(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/asyncio_utils.py"", line 50, in run_sync\r\n return asyncio.run(coro)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/asyncio/runners.py"", line 44, in run\r\n return loop.run_until_complete(main)\r\n File ""/home/franz.srambical/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/asyncio/base_events.py"", line 649, in run_until_complete\r\n return future.result()\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 632, in _maybe_deserialize\r\n deserialized_batches += await asyncio.gather(*deserialized_batches_ops)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py"", line 1319, in deserialize\r\n *ret, array_metadatas = await asyncio.gather(*deserialize_ops)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/serialization.py"", line 555, in async_deserialize\r\n return await read_and_create_array(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/serialization.py"", line 509, in read_and_create_array\r\n dbs = sum(await asyncio.gather(*read_array_coros), [])\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/serialization.py"", line 463, in _read_array_index_and_device_put\r\n result.append(jax.device_put(shard, Format(dll, sharding))) # pytype: disable=wrong-arg-types\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 2613, in device_put\r\n out_flat = dispatch.device_put_p.bind(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 583, in _batched_device_put_impl\r\n shard_arg_results = pxla.shard_args(dsa_shardings, [None] * len(dsa_xs),\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 137, in shard_args\r\n return shard_arg_handlers[type(arg)]([arg], shardings, layouts,\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 219, in _shard_np_array\r\n results.append(batched_device_put(aval, sharding, shards, devices))\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 244, in batched_device_put\r\n return xc.batched_device_put(aval, sharding, xs, list(devices), committed)\r\njaxlib._jax.XlaRuntimeError: RESOURCE_EXHAUSTED: Out of memory while trying to allocate 1048576 bytes.\r\n",,terminal_output +271,237860,"TERMINAL",0,0,"autoreg sampling...\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 185, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 145, in _autoreg_sample\r\n generated_vid = sampling_fn(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3023, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 136, in _sampling_wrapper\r\n return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/genie.py"", line 140, in sample_causal\r\n assert self.decode, ""sample_causal with KV cache requires model to be in decode mode.""\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1314, in __getattr__\r\n raise AttributeError(msg)\r\nAttributeError: ""Genie"" object has no attribute ""decode"".\r\n",,terminal_output +272,240132,"TERMINAL",0,0,"srun: error: hai005: task 1: Exited with exit code 1\r\n",,terminal_output +273,241223,"TERMINAL",0,0,"srun: error: hai005: task 6: Exited with exit code 1\r\n",,terminal_output +274,242535,"TERMINAL",0,0,"autoreg sampling...\r\nTraceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 185, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 145, in _autoreg_sample\r\n generated_vid = sampling_fn(\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3023, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/sample.py"", line 136, in _sampling_wrapper\r\n return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/fast/home/franz.srambical/jafar/genie.py"", line 140, in sample_causal\r\n assert self.decode, ""sample_causal with KV cache requires model to be in decode mode.""\r\n File ""/fast/home/franz.srambical/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1314, in __getattr__\r\n raise AttributeError(msg)\r\nAttributeError: ""Genie"" object has no attribute ""decode"".\r\n",,terminal_output +275,245311,"TERMINAL",0,0,"srun: error: hai005: task 4: Exited with exit code 1\r\n]0;franz.srambical@hai-login1:~/jafar[?2004h[franz.srambical@hai005.haicore.berlin:~/jafar] $ ",,terminal_output +276,270923,"TERMINAL",0,0,"\r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ ",,terminal_output +277,271067,"TERMINAL",0,0,"\r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ ",,terminal_output +278,272719,"TERMINAL",0,0,"\r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ \r[franz.srambical@hai005.haicore.berlin:~/jafar] $ ",,terminal_output +279,272770,"TERMINAL",0,0,"\r[franz.srambical@hai005.haicore.berlin:~/jafar] $ ",,terminal_output +280,292014,"sample.py",0,0,"",python,tab +281,298615,"experiments/sample.sh",0,0,"",shellscript,tab diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0f5513f7-8bc9-4c5d-856d-79d92f75113d1751284706913-2025_06_30-13.59.01.459/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0f5513f7-8bc9-4c5d-856d-79d92f75113d1751284706913-2025_06_30-13.59.01.459/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..171b3582dd2ace11ccfbad07d3edb23ec11a8ec3 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0f5513f7-8bc9-4c5d-856d-79d92f75113d1751284706913-2025_06_30-13.59.01.459/source.csv @@ -0,0 +1,52 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,263,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"1:59:01 PM [info] Activating crowd-code\n1:59:01 PM [info] Recording started\n1:59:01 PM [info] Initializing git provider using file system watchers...\n1:59:01 PM [info] Git repository found\n1:59:01 PM [info] Git provider initialized successfully\n1:59:01 PM [info] Initial git state: [object Object]\n",Log,tab +3,3941,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +4,3980,"TERMINAL",0,0,"]633;E;2025-06-30 13:59:05 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;ae710212-bbd7-466b-8215-e56dfc4f7a88]633;C",,terminal_output +5,4004,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +6,31538,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +7,33235,"TERMINAL",0,0,"bash",,terminal_focus +8,35303,"scripts_horeka/train_tokenizer.sh",0,0,"#!/usr/bin/env bash\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=""debug""\nslurm_job_id=""0000""\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=16 \\n --min_lr=4.24e-4 \\n --max_lr=4.24e-4 \\n --log_image_interval=100 \\n --log \\n --name=test-wandb-tags-$slurm_job_id \\n --tags test tokenizer debug \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir",shellscript,tab +9,35920,"TERMINAL",0,0,"bash",,terminal_focus +10,39024,"TERMINAL",0,0,"queue",,terminal_command +11,39105,"TERMINAL",0,0,"]633;E;2025-06-30 13:59:40 queue;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Jun 30 13:59:40 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)",,terminal_output +12,39625,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +13,1168748,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=4 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G --mail-user=mihir@pdoom.org --mail-type=ALL",,terminal_command +14,1168797,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:30 salloc --time=01:00:00 --partition=accelerated --nodes=4 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G --mail-user=mihir@pdoom.org --mail-type=ALL;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;C",,terminal_output +15,1168893,"TERMINAL",0,0,"salloc: Pending job allocation 3306136\r\nsalloc: job 3306136 queued and waiting for resources\r\n",,terminal_output +16,1169921,"TERMINAL",0,0,"bash",,terminal_focus +17,1170955,"TERMINAL",0,0,"queue",,terminal_command +18,1171010,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:32 queue;ead59344-49db-4336-9336-47fae706e637]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Jun 30 14:18:32 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3306136 accelerat interact tum_cte0 PD\t0:00\t 4 (Resources)",,terminal_output +19,1172154,"TERMINAL",0,0,"3\t ",,terminal_output +20,1173084,"TERMINAL",0,0,"4\t ",,terminal_output +21,1174201,"TERMINAL",0,0,"5\t ",,terminal_output +22,1174691,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +23,1175459,"TERMINAL",0,0,"idle",,terminal_command +24,1175485,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:36 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 154 nodes idle\r\nPartition dev_accelerated : 0 nodes idle\r\nPartition accelerated : 1 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +25,1181058,"TERMINAL",0,0,"^C",,terminal_command +26,1181077,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;ead59344-49db-4336-9336-47fae706e637]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D",,terminal_output +27,1183520,"TERMINAL",0,0,"idle",,terminal_command +28,1183538,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:44 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 154 nodes idle\r\nPartition dev_accelerated : 2 nodes idle\r\nPartition accelerated : 1 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +29,1185175,"TERMINAL",0,0,"^C",,terminal_command +30,1185191,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;ead59344-49db-4336-9336-47fae706e637]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D",,terminal_output +31,1186579,"TERMINAL",0,0,"idle",,terminal_command +32,1186594,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:47 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 154 nodes idle\r\nPartition dev_accelerated : 2 nodes idle\r\nPartition accelerated : 1 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +33,1187256,"TERMINAL",0,0,"idle",,terminal_command +34,1187270,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:48 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 154 nodes idle\r\nPartition dev_accelerated : 2 nodes idle\r\nPartition accelerated : 1 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +35,1187998,"TERMINAL",0,0,"idle",,terminal_command +36,1188014,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:49 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 154 nodes idle\r\nPartition dev_accelerated : 2 nodes idle\r\nPartition accelerated : 1 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +37,1188374,"TERMINAL",0,0,"idle",,terminal_command +38,1188379,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:49 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 154 nodes idle\r\nPartition dev_accelerated : 2 nodes idle\r\nPartition accelerated : 1 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +39,1188713,"TERMINAL",0,0,"idle",,terminal_command +40,1188724,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:50 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 154 nodes idle\r\nPartition dev_accelerated : 2 nodes idle\r\nPartition accelerated : 1 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +41,1189024,"TERMINAL",0,0,"idle",,terminal_command +42,1189039,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:50 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 154 nodes idle\r\nPartition dev_accelerated : 2 nodes idle\r\nPartition accelerated : 1 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +43,1189680,"TERMINAL",0,0,"",,terminal_command +44,1189694,"TERMINAL",0,0,"\r\n[?2004l\r]633;E;;ead59344-49db-4336-9336-47fae706e637]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D",,terminal_output +45,1190195,"TERMINAL",0,0,"idle",,terminal_command +46,1190210,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:51 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 154 nodes idle\r\nPartition dev_accelerated : 1 nodes idle\r\nPartition accelerated : 1 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +47,1192901,"TERMINAL",0,0,"idle",,terminal_command +48,1192917,"TERMINAL",0,0,"]633;E;2025-06-30 14:18:54 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 152 nodes idle\r\nPartition dev_accelerated : 1 nodes idle\r\nPartition accelerated : 1 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +49,1194207,"TERMINAL",0,0,"salloc",,terminal_focus +50,1198907,"TERMINAL",0,0,"^Csalloc: Job allocation 3306136 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;1",,terminal_output +51,1199160,"TERMINAL",0,0,"^C",,terminal_command +52,1199175,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-17a23500-007e-4825-8127-4f0062137ef91759750602496-2025_10_06-13.37.19.164/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-17a23500-007e-4825-8127-4f0062137ef91759750602496-2025_10_06-13.37.19.164/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..0657ca5427691ae729b1de6fc13792229e6abdbe --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-17a23500-007e-4825-8127-4f0062137ef91759750602496-2025_10_06-13.37.19.164/source.csv @@ -0,0 +1,3941 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,4,"dataset_duplicates.ipynb",0,0,"import os\nimport pickle\nimport hashlib\nimport numpy as np\nfrom collections import defaultdict\nfrom array_record.python.array_record_module import ArrayRecordReader\nimport multiprocessing\nfrom tqdm import tqdm # Using tqdm for a nice progress bar\n",python,tab +2,406,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"1:37:19 PM [info] Activating crowd-code\n1:37:19 PM [info] Recording started\n1:37:19 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,626,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"1:37:19 PM [info] Git repository found\n1:37:19 PM [info] Git provider initialized successfully\n1:37:19 PM [info] Initial git state: [object Object]\n",Log,content +4,703,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",0,0,"# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\n# /$$ /$$ /$$ /$$$$$$$ /$$$$$$$ /$$$$$$$ /$$$$$$ #\n# | $$ | $$|__/ | $$__ $$ | $$__ $$| $$__ $$ /$$__ $$ #\n# | $$ | $$ /$$ /$$$$$$$$| $$ \ $$ /$$$$$$ /$$$$$$ /$$$$$$/$$$$ | $$ \ $$| $$ \ $$| $$ \ $$ #\n# | $$ / $$/| $$|____ /$$/| $$ | $$ /$$__ $$ /$$__ $$| $$_ $$_ $$ | $$$$$$$/| $$$$$$$/| $$ | $$ #\n# \ $$ $$/ | $$ /$$$$/ | $$ | $$| $$ \ $$| $$ \ $$| $$ \ $$ \ $$ | $$____/ | $$____/ | $$ | $$ #\n# \ $$$/ | $$ /$$__/ | $$ | $$| $$ | $$| $$ | $$| $$ | $$ | $$ | $$ | $$ | $$ | $$ #\n# \ $/ | $$ /$$$$$$$$| $$$$$$$/| $$$$$$/| $$$$$$/| $$ | $$ | $$ | $$ | $$ | $$$$$$/ #\n# \_/ |__/|________/|_______/ \______/ \______/ |__/ |__/ |__/ |__/ |__/ \______/ #\n# #\n# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\n\n# FORK OF LEANDRO KIELIGER'S DOOM PPO TUTORIAL: https://lkieliger.medium.com/deep-reinforcement-learning-in-practice-by-playing-doom-part-1-getting-started-618c99075c77\n\n# SCRIPT TO RUN PPO AGENT AND GENERATE DATASET FOR DOOM ENVIRONMENT.\n\nfrom dataclasses import dataclass\nimport imageio\nfrom common import envs\nimport torch\nimport json\nfrom vizdoom.vizdoom import GameVariable\nimport os\nfrom PIL import Image\n\nimport numpy as np\nfrom train_ppo_parallel import DoomWithBotsCurriculum, game_instance\nfrom stable_baselines3.common.vec_env import VecTransposeImage, DummyVecEnv, SubprocVecEnv\n\nfrom loguru import logger\nimport tyro\nfrom jasmine_data.utils import save_chunks\n\n# To replicate frame_skip in the environment\nACTION_REPEAT = 4\n\n\n@dataclass\nclass Args:\n num_episodes_train: int = 1000\n num_episodes_val: int = 100\n num_episodes_test: int = 100\n min_episode_length: int = 100\n max_episode_length: int = 1000\n num_parallel_envs: int = 100\n target_width: int = 320\n target_height: int = 240\n chunk_size: int = 160\n chunks_per_file: int = 100\n agent_path: str = """"\n seed: int = 0\n output_dir: str = ""data/vizdoom_episodes""\n generate_gif: bool = False\n\n\nargs = tyro.cli(Args)\ndevice = torch.device(""cuda"" if torch.cuda.is_available() else ""cpu"")\nlogger.info(f""Using device: {device}"")\n\n\ndef dummy_vec_env_with_bots_curriculum(n_envs=1, **kwargs) -> VecTransposeImage:\n """"""Wraps a Doom game instance in a vectorized environment with shaped rewards and curriculum.""""""\n scenario = kwargs.pop(""scenario"") # Remove 'scenario' from kwargs\n return VecTransposeImage(\n DummyVecEnv(\n [lambda: DoomWithBotsCurriculum(game_instance(scenario), **kwargs)] * n_envs\n )\n )\n\n\n# TODO move to utils\ndef downsample_resolution(img):\n if img.shape[:2] != (args.target_height, args.target_width):\n resample_filter = Image.LANCZOS\n img = Image.fromarray(img)\n img = img.resize(\n (args.target_width, args.target_height), resample=resample_filter\n )\n img = np.array(img)\n return img\n\n\ndef make_gif(agent, eval_env_args):\n """"""Generate a GIF by running the agent in the environment.\n\n Args:\n agent: The trained PPO agent.\n file_path (str): Path to save the generated GIF.\n eval_env_args (dict): Arguments for the evaluation environment.\n num_episodes (int): Number of episodes to run.\n\n Returns:\n list: Collected health values for analysis.\n """"""\n # Set frame_skip to 1 to capture all frames\n eval_env_args[""frame_skip""] = 1\n env = dummy_vec_env_with_bots_curriculum(1, **eval_env_args)\n\n images = []\n actions = []\n health_values = []\n current_action = None\n frame_counter = 0\n\n obs = env.reset()\n\n done = False\n while not done and frame_counter < args.max_episode_length:\n if frame_counter % ACTION_REPEAT == 0:\n current_action, _ = agent.predict(obs)\n\n obs, _, done, _ = env.step(current_action)\n\n # Get the raw screen buffer from the Doom game instance\n screen = env.venv.envs[0].game.get_state().screen_buffer\n screen = downsample_resolution(screen)\n\n # Get the current health value\n health = env.venv.envs[0].game.get_game_variable(GameVariable.HEALTH)\n health_values.append(health) # Store the health value\n\n actions.append(current_action)\n images.append(screen)\n\n frame_counter += 1\n\n print(""Health values:"", health_values)\n print(""Number of health values:"", len(health_values))\n print(""Number of actions:"", len(actions))\n print(""Number of images:"", len(images))\n\n # Save only the first 1000 frames to avoid large file size\n output_path = os.path.join(args.output_dir, ""output.gif"")\n imageio.mimsave(output_path, images, fps=20)\n env.close()\n logger.info(f""GIF saved to {args.output_dir}"")\n\n return health_values\n\n\ndef make_array_records_dataset(agent, eval_env_args, num_episodes, split):\n """"""Generate a dataset by running the agent in the environment and saving the data as array record files.\n\n Args:\n agent: The trained PPO agent.\n output_dir (str): Directory to save the array record files.\n eval_env_args (dict): Arguments for the evaluation environment.\n num_episodes (int): Number of episodes to run.\n """"""\n # Set frame_skip to 1 to capture all frames\n eval_env_args[""frame_skip""] = 1\n env = dummy_vec_env_with_bots_curriculum(args.num_parallel_envs, **eval_env_args)\n\n current_action_B = None\n episode_idx = 0\n episode_metadata = []\n obs_chunks = []\n act_chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n os.makedirs(output_dir_split, exist_ok=True)\n env.venv.render_mode = ""rgb_array""\n\n while episode_idx < num_episodes // args.num_parallel_envs:\n obs = env.reset()\n done = np.array(False)\n frame_counter = 0\n\n observations_seq_TBHWC = []\n actions_seq_TB = []\n health_values_seq_TB = []\n episode_obs_chunks_NTBHWC = []\n episode_act_chunks_NTB = []\n\n # --- Run episode ---\n while not done.any() and frame_counter < args.max_episode_length:\n screen_BHWC = [\n downsample_resolution(env_i.game.get_state().screen_buffer)\n for env_i in env.venv.envs\n ]\n health_B = [\n env_i.game.get_game_variable(GameVariable.HEALTH)\n for env_i in env.venv.envs\n ]\n if frame_counter % ACTION_REPEAT == 0:\n current_action_B, _ = agent.predict(obs)\n\n obs, _, done, _ = env.step(current_action_B)\n\n observations_seq_TBHWC.append(screen_BHWC)\n actions_seq_TB.append(current_action_B)\n health_values_seq_TB.append(health_B)\n\n while len(observations_seq_TBHWC) >= args.chunk_size:\n episode_obs_chunks_NTBHWC.append(observations_seq_TBHWC[: args.chunk_size])\n episode_act_chunks_NTB.append(actions_seq_TB[: args.chunk_size])\n observations_seq_TBHWC = observations_seq_TBHWC[args.chunk_size :]\n actions_seq_TB = actions_seq_TB[args.chunk_size :]\n\n frame_counter += 1\n\n # --- Save episode ---\n if frame_counter >= args.min_episode_length:\n if observations_seq_TBHWC:\n if len(observations_seq_TBHWC) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(observations_seq_TBHWC)} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks_NTBHWC.append(observations_seq_TBHWC)\n episode_act_chunks_NTB.append(actions_seq_TB)\n episode_obs_chunks_NBTHWC = [\n np.transpose(seq, (1,0,2,3,4)).astype(np.uint8) \n for seq in episode_obs_chunks_NTBHWC\n ]\n obs_chunks_data = [\n chunk for batch in episode_obs_chunks_NBTHWC for chunk in batch\n ]\n episode_act_chunks_NBT = [\n np.transpose(seq).astype(np.uint8) \n for seq in episode_act_chunks_NTB\n ]\n act_chunks_data = [\n chunk for batch in episode_act_chunks_NBT for chunk in batch\n ]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n ep_metadata, file_idx, obs_chunks, act_chunks = save_chunks(\n file_idx, args.chunks_per_file, output_dir_split, obs_chunks, act_chunks\n )\n episode_metadata.extend(ep_metadata)\n\n print(f""Episode {episode_idx} completed, length: {frame_counter}."")\n print(f""Total number of frames until now: {file_idx * args.chunk_size * args.chunks_per_file}"")\n episode_idx += 1\n else:\n print(f""Episode too short ({frame_counter}), resampling..."")\n env.close()\n return episode_metadata\n\n\ndef main():\n assert (\n args.num_episodes_train % args.num_parallel_envs == 0\n and args.num_episodes_train >= args.num_parallel_envs\n )\n assert (\n args.num_episodes_val % args.num_parallel_envs == 0\n and args.num_episodes_val >= args.num_parallel_envs\n )\n assert (\n args.num_episodes_test % args.num_parallel_envs == 0\n and args.num_episodes_test >= args.num_parallel_envs\n )\n scenario = ""deathmatch_simple""\n\n env_args = {\n ""scenario"": scenario,\n ""frame_skip"": 1,\n ""frame_processor"": envs.default_frame_processor,\n ""n_bots"": 8,\n ""shaping"": True,\n ""initial_level"": 5,\n ""max_level"": 5,\n ""rolling_mean_length"": 10,\n }\n\n eval_env_args = dict(env_args)\n new_env = dummy_vec_env_with_bots_curriculum(1, **env_args)\n agent = envs.load_model(\n args.agent_path,\n new_env,\n )\n\n if args.generate_gif:\n make_gif(agent, eval_env_args)\n return\n\n train_episode_metadata = make_array_records_dataset(\n agent,\n num_episodes=args.num_episodes_train,\n eval_env_args=eval_env_args,\n split=""train"",\n )\n # val_episode_metadata = make_array_records_dataset(\n # agent,\n # num_episodes=args.num_episodes_val,\n # eval_env_args=eval_env_args,\n # split=""val"",\n # )\n # test_episode_metadata = make_array_records_dataset(\n # agent,\n # num_episodes=args.num_episodes_test,\n # eval_env_args=eval_env_args,\n # split=""test"",\n # )\n # --- Save metadata ---\n metadata = {\n ""env"": ""coinrun"",\n ""num_actions"": 18, # TODO mihir\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),\n # ""avg_episode_len_val"": np.mean(\n # [ep[""avg_seq_len""] for ep in val_episode_metadata]\n # ),\n # ""avg_episode_len_test"": np.mean(\n # [ep[""avg_seq_len""] for ep in test_episode_metadata]\n # ),\n # ""episode_metadata_train"": train_episode_metadata,\n # ""episode_metadata_val"": val_episode_metadata,\n # ""episode_metadata_test"": test_episode_metadata,\n }\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(f""Done generating dataset."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +5,3791,"data/jasmine_data/ViZDoomPPO/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/dummy_vec_env.py",0,0,"import warnings\nfrom collections import OrderedDict\nfrom copy import deepcopy\nfrom typing import Any, Callable, Dict, List, Optional, Sequence, Type\n\nimport gymnasium as gym\nimport numpy as np\n\nfrom stable_baselines3.common.vec_env.base_vec_env import VecEnv, VecEnvIndices, VecEnvObs, VecEnvStepReturn\nfrom stable_baselines3.common.vec_env.patch_gym import _patch_env\nfrom stable_baselines3.common.vec_env.util import copy_obs_dict, dict_to_obs, obs_space_info\n\n\nclass DummyVecEnv(VecEnv):\n """"""\n Creates a simple vectorized wrapper for multiple environments, calling each environment in sequence on the current\n Python process. This is useful for computationally simple environment such as ``Cartpole-v1``,\n as the overhead of multiprocess or multithread outweighs the environment computation time.\n This can also be used for RL methods that\n require a vectorized environment, but that you want a single environments to train with.\n\n :param env_fns: a list of functions\n that return environments to vectorize\n :raises ValueError: If the same environment instance is passed as the output of two or more different env_fn.\n """"""\n\n actions: np.ndarray\n\n def __init__(self, env_fns: List[Callable[[], gym.Env]]):\n self.envs = [_patch_env(fn()) for fn in env_fns]\n if len(set([id(env.unwrapped) for env in self.envs])) != len(self.envs):\n raise ValueError(\n ""You tried to create multiple environments, but the function to create them returned the same instance ""\n ""instead of creating different objects. ""\n ""You are probably using `make_vec_env(lambda: env)` or `DummyVecEnv([lambda: env] * n_envs)`. ""\n ""You should replace `lambda: env` by a `make_env` function that ""\n ""creates a new instance of the environment at every call ""\n ""(using `gym.make()` for instance). You can take a look at the documentation for an example. ""\n ""Please read https://github.com/DLR-RM/stable-baselines3/issues/1151 for more information.""\n )\n env = self.envs[0]\n super().__init__(len(env_fns), env.observation_space, env.action_space)\n obs_space = env.observation_space\n self.keys, shapes, dtypes = obs_space_info(obs_space)\n\n self.buf_obs = OrderedDict([(k, np.zeros((self.num_envs, *tuple(shapes[k])), dtype=dtypes[k])) for k in self.keys])\n self.buf_dones = np.zeros((self.num_envs,), dtype=bool)\n self.buf_rews = np.zeros((self.num_envs,), dtype=np.float32)\n self.buf_infos: List[Dict[str, Any]] = [{} for _ in range(self.num_envs)]\n self.metadata = env.metadata\n\n def step_async(self, actions: np.ndarray) -> None:\n self.actions = actions\n\n def step_wait(self) -> VecEnvStepReturn:\n # Avoid circular imports\n for env_idx in range(self.num_envs):\n obs, self.buf_rews[env_idx], terminated, truncated, self.buf_infos[env_idx] = self.envs[env_idx].step(\n self.actions[env_idx]\n )\n # convert to SB3 VecEnv api\n self.buf_dones[env_idx] = terminated or truncated\n # See https://github.com/openai/gym/issues/3102\n # Gym 0.26 introduces a breaking change\n self.buf_infos[env_idx][""TimeLimit.truncated""] = truncated and not terminated\n\n if self.buf_dones[env_idx]:\n # save final observation where user can get it, then reset\n self.buf_infos[env_idx][""terminal_observation""] = obs\n obs, self.reset_infos[env_idx] = self.envs[env_idx].reset()\n self._save_obs(env_idx, obs)\n return (self._obs_from_buf(), np.copy(self.buf_rews), np.copy(self.buf_dones), deepcopy(self.buf_infos))\n\n def reset(self) -> VecEnvObs:\n for env_idx in range(self.num_envs):\n maybe_options = {""options"": self._options[env_idx]} if self._options[env_idx] else {}\n obs, self.reset_infos[env_idx] = self.envs[env_idx].reset(seed=self._seeds[env_idx], **maybe_options)\n self._save_obs(env_idx, obs)\n # Seeds and options are only used once\n self._reset_seeds()\n self._reset_options()\n return self._obs_from_buf()\n\n def close(self) -> None:\n for env in self.envs:\n env.close()\n\n def get_images(self) -> Sequence[Optional[np.ndarray]]:\n if self.render_mode != ""rgb_array"":\n warnings.warn(\n f""The render mode is {self.render_mode}, but this method assumes it is `rgb_array` to obtain images.""\n )\n return [None for _ in self.envs]\n return [env.render() for env in self.envs] # type: ignore[misc]\n\n def render(self, mode: Optional[str] = None) -> Optional[np.ndarray]:\n """"""\n Gym environment rendering. If there are multiple environments then\n they are tiled together in one image via ``BaseVecEnv.render()``.\n\n :param mode: The rendering type.\n """"""\n return super().render(mode=mode)\n\n def _save_obs(self, env_idx: int, obs: VecEnvObs) -> None:\n for key in self.keys:\n if key is None:\n self.buf_obs[key][env_idx] = obs\n else:\n self.buf_obs[key][env_idx] = obs[key] # type: ignore[call-overload]\n\n def _obs_from_buf(self) -> VecEnvObs:\n return dict_to_obs(self.observation_space, copy_obs_dict(self.buf_obs))\n\n def get_attr(self, attr_name: str, indices: VecEnvIndices = None) -> List[Any]:\n """"""Return attribute from vectorized environment (see base class).""""""\n target_envs = self._get_target_envs(indices)\n return [getattr(env_i, attr_name) for env_i in target_envs]\n\n def set_attr(self, attr_name: str, value: Any, indices: VecEnvIndices = None) -> None:\n """"""Set attribute inside vectorized environments (see base class).""""""\n target_envs = self._get_target_envs(indices)\n for env_i in target_envs:\n setattr(env_i, attr_name, value)\n\n def env_method(self, method_name: str, *method_args, indices: VecEnvIndices = None, **method_kwargs) -> List[Any]:\n """"""Call instance methods of vectorized environments.""""""\n target_envs = self._get_target_envs(indices)\n return [getattr(env_i, method_name)(*method_args, **method_kwargs) for env_i in target_envs]\n\n def env_is_wrapped(self, wrapper_class: Type[gym.Wrapper], indices: VecEnvIndices = None) -> List[bool]:\n """"""Check if worker environments are wrapped with a given wrapper""""""\n target_envs = self._get_target_envs(indices)\n # Import here to avoid a circular import\n from stable_baselines3.common import env_util\n\n return [env_util.is_wrapped(env_i, wrapper_class) for env_i in target_envs]\n\n def _get_target_envs(self, indices: VecEnvIndices) -> List[gym.Env]:\n indices = self._get_indices(indices)\n return [self.envs[i] for i in indices]\n",python,tab +6,6524,"TERMINAL",0,0,"",,terminal_focus +7,8659,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/.venv/bin/activate",,terminal_command +8,9876,"TERMINAL",0,0,"bash",,terminal_focus +9,9953,"TERMINAL",0,0,"bash",,terminal_focus +10,12796,"TERMINAL",0,0,"queue",,terminal_command +11,12851,"TERMINAL",0,0,"]633;C",,terminal_output +12,12945,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Oct 6 13:37:31 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3543728 accelerat preproce tum_cte0 R 18:00:01\t 1 hkn04253543729 accelerat preproce tum_cte0 R 18:00:01\t 1 hkn06323543730 accelerat preproce tum_cte0 R 18:00:01\t 1 hkn0632",,terminal_output +13,13975,"TERMINAL",0,0,"2222",,terminal_output +14,14849,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +15,16329,"TERMINAL",0,0,"fqueue",,terminal_command +16,16408,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.... hkn1991.localdomain: Mon Oct 6 13:37:35 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3543728\taccelerated preprocess_doom_50m_120x160 tum_cte0 RUNNING 18:00:05 1-18:00:00\r 1 hkn04253543729\taccelerated\t preprocess_doom_50m_60x80 tum_cte0 RUNNING 18:00:05 1-18:00:00\r 1 hkn06323543730\tacceleratedpreprocess_doom_50m tum_cte0 RUNNING 18:00:05 1-18:00:00\r 1 hkn0632",,terminal_output +17,17436,"TERMINAL",0,0,"6666",,terminal_output +18,18512,"TERMINAL",0,0,"7777",,terminal_output +19,19479,"TERMINAL",0,0,"8888",,terminal_output +20,20533,"TERMINAL",0,0,"9999",,terminal_output +21,21615,"TERMINAL",0,0,"40101010",,terminal_output +22,21770,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +23,25295,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +24,25705,"TERMINAL",0,0,"ls",,terminal_command +25,25735,"TERMINAL",0,0,"]633;Ccheckpoints data_atari data_doom huggingface scripts\r\ncount_items.sh data_breakout data_minecraft logs\r\ndata data_coinrun data_new possibly_corrupt_files_in_this_workspace.txt\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output +26,28468,"TERMINAL",0,0,"cd data_doom/",,terminal_command +27,28995,"TERMINAL",0,0,"ls",,terminal_command +28,29023,"TERMINAL",0,0,"]633;Cdev doom_episodes_10m_low_res_bak doom_episodes_50m\r\ndev_bak doom_episodes_1m doom_episodes_50m_120x160_fixed\r\ndoom_episodes_10m doom_episodes_1m_120x160_fixed doom_episodes_50m_60x80_fixed\r\ndoom_episodes_10m_120x160_fixed doom_episodes_1m_60x80 doom_episodes_50m_bak\r\ndoom_episodes_10m_60x80_fixed doom_episodes_1m_bak doom_episodes_50m_low_res_bak\r\ndoom_episodes_10m_bak doom_episodes_1m_low_res_bak\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +29,49926,"TERMINAL",0,0,"ls -l doom_episodes_1m/train/ | wc -l",,terminal_command +30,49981,"TERMINAL",0,0,"]633;C",,terminal_output +31,50032,"TERMINAL",0,0,"71\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +32,54766,"TERMINAL",0,0,"ls -l doom_episodes_10m/train/ | wc -l",,terminal_command +33,54825,"TERMINAL",0,0,"]633;C",,terminal_output +34,55283,"TERMINAL",0,0,"701\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +35,103321,"TERMINAL",0,0,"dev",,terminal_command +36,104106,"TERMINAL",0,0,"ls",,terminal_command +37,106386,"TERMINAL",0,0,"deactivate",,terminal_command +38,110246,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +39,130042,"slurm/jobs/mihir/horeka/doom/train_tokenizer_default_1gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default_single_gpu\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=240 \\n --image_width=320 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=8 \\n --init_lr=0 \\n --log \\n --name=doom-tokenizer-default-$slurm_job_id \\n --tags tokenizer doom default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val\n",shellscript,tab +40,151272,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default_single_gpu\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=240 \\n --image_width=320 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=8 \\n --init_lr=0 \\n --log \\n --name=doom-tokenizer-default-$slurm_job_id \\n --tags tokenizer doom default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val\n",shellscript,tab +41,156121,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",450,0,"",shellscript,selection_mouse +42,156164,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",449,0,"",shellscript,selection_command +43,158284,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",450,0,"",shellscript,selection_command +44,159015,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",450,0,"_",shellscript,content +45,159017,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",451,0,"",shellscript,selection_keyboard +46,160467,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",451,0,"8",shellscript,content +47,160469,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",452,0,"",shellscript,selection_keyboard +48,160537,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",452,0,"0",shellscript,content +49,160538,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",453,0,"",shellscript,selection_keyboard +50,161016,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",453,0,"x",shellscript,content +51,161017,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",454,0,"",shellscript,selection_keyboard +52,161537,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",454,0,"6",shellscript,content +53,161539,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",455,0,"",shellscript,selection_keyboard +54,161568,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",455,0,"0",shellscript,content +55,161570,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",456,0,"",shellscript,selection_keyboard +56,162072,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",455,0,"",shellscript,selection_command +57,166251,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1247,0,"",shellscript,selection_mouse +58,166774,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1246,0,"",shellscript,selection_mouse +59,167882,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1246,0,".",shellscript,content +60,167884,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1247,0,"",shellscript,selection_keyboard +61,168297,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1246,1,"",shellscript,content +62,168537,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1246,0,"-",shellscript,content +63,168538,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1247,0,"",shellscript,selection_keyboard +64,170197,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1247,0,"8",shellscript,content +65,170198,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1248,0,"",shellscript,selection_keyboard +66,170220,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1248,0,"0",shellscript,content +67,170221,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1249,0,"",shellscript,selection_keyboard +68,170691,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1249,0,"x",shellscript,content +69,170693,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1250,0,"",shellscript,selection_keyboard +70,171347,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1250,0,"6",shellscript,content +71,171348,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1251,0,"",shellscript,selection_keyboard +72,171432,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1251,0,"0",shellscript,content +73,171433,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1252,0,"",shellscript,selection_keyboard +74,171962,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1251,0,"",shellscript,selection_command +75,172642,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1297,0,"",shellscript,selection_mouse +76,173326,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1304,0,"",shellscript,selection_mouse +77,173327,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1303,0,"",shellscript,selection_command +78,175877,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1303,0,"8",shellscript,content +79,175879,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1304,0,"",shellscript,selection_keyboard +80,175952,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1304,0,"0",shellscript,content +81,175953,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1305,0,"",shellscript,selection_keyboard +82,176415,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1305,0,"x",shellscript,content +83,176416,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1306,0,"",shellscript,selection_keyboard +84,176773,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1306,0,"6",shellscript,content +85,176774,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1307,0,"",shellscript,selection_keyboard +86,176848,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1307,0,"0",shellscript,content +87,176849,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1308,0,"",shellscript,selection_keyboard +88,177436,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1308,0," ",shellscript,content +89,177437,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1309,0,"",shellscript,selection_keyboard +90,177786,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1308,0,"",shellscript,selection_command +91,179866,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1100,0,"",shellscript,selection_mouse +92,180040,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1099,3,"240",shellscript,selection_mouse +93,182344,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1099,3,"",shellscript,content +94,183857,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1099,0,"6",shellscript,content +95,183859,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1100,0,"",shellscript,selection_keyboard +96,184060,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1100,0,"0",shellscript,content +97,184061,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1101,0,"",shellscript,selection_keyboard +98,184227,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1125,0,"",shellscript,selection_command +99,184713,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1122,3,"",shellscript,content +100,185851,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1122,0,"8",shellscript,content +101,185852,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1123,0,"",shellscript,selection_keyboard +102,185915,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1123,0,"0",shellscript,content +103,185918,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1124,0,"",shellscript,selection_keyboard +104,191761,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",683,0,"",shellscript,selection_mouse +105,192493,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",796,0,"",shellscript,selection_command +106,192935,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",683,0,"",shellscript,selection_command +107,193140,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",682,0,"",shellscript,selection_command +108,193904,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",682,0,"0",shellscript,content +109,193906,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",683,0,"",shellscript,selection_keyboard +110,194135,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",797,0,"",shellscript,selection_command +111,194411,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",796,0,"",shellscript,selection_command +112,194594,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",795,0,"",shellscript,selection_command +113,194737,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",794,0,"",shellscript,selection_command +114,195113,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",794,0,"0",shellscript,content +115,195114,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",795,0,"",shellscript,selection_keyboard +116,196152,"TERMINAL",0,0,"bash",,terminal_focus +117,198626,"TERMINAL",0,0,"undefined[tum_cte0515@hkn1991 jasmine]$ cd $ws_dir",,terminal_command +118,199028,"TERMINAL",0,0,"ls",,terminal_command +119,199087,"TERMINAL",0,0,"]633;Ccheckpoints data_atari data_doom huggingface scripts\r\ncount_items.sh data_breakout data_minecraft logs\r\ndata data_coinrun data_new possibly_corrupt_files_in_this_workspace.txt\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output +120,200939,"TERMINAL",0,0,"bash",,terminal_focus +121,211652,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +122,211706,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3546199\r\n",,terminal_output +123,211829,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +124,214062,"slurm/jobs/mihir/horeka/doom/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +125,225807,"TERMINAL",0,0,"bash",,terminal_focus +126,229277,"TERMINAL",0,0,"cd data_doom/",,terminal_command +127,229619,"TERMINAL",0,0,"ls",,terminal_command +128,229625,"TERMINAL",0,0,"]633;Cdev doom_episodes_10m_low_res_bak doom_episodes_50m\r\ndev_bak doom_episodes_1m doom_episodes_50m_120x160_fixed\r\ndoom_episodes_10m doom_episodes_1m_120x160_fixed doom_episodes_50m_60x80_fixed\r\ndoom_episodes_10m_120x160_fixed doom_episodes_1m_60x80 doom_episodes_50m_bak\r\ndoom_episodes_10m_60x80_fixed doom_episodes_1m_bak doom_episodes_50m_low_res_bak\r\ndoom_episodes_10m_bak doom_episodes_1m_low_res_bak\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +129,238875,"TERMINAL",0,0,"salloc: Nodes hkn0401 are ready for job\r\n",,terminal_output +130,239744,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +131,240781,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +132,248494,"TERMINAL",0,0,"cd doom_episodes_10m_60x80_fixed/",,terminal_command +133,250036,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1432,0,"",shellscript,selection_mouse +134,254990,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",451,1,"8",shellscript,selection_command +135,255029,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",451,2,"80",shellscript,selection_command +136,255449,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",451,3,"80x",shellscript,selection_command +137,255873,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",451,4,"80x6",shellscript,selection_command +138,255952,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",451,5,"80x60",shellscript,selection_command +139,260029,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",451,5,"60x80",shellscript,content +140,260034,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1247,5,"80x60",shellscript,selection_command +141,260188,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1247,5,"60x80",shellscript,content +142,260192,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1303,5,"80x60",shellscript,selection_command +143,260571,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1303,5,"60x80",shellscript,content +144,263376,"TERMINAL",0,0,"pwd",,terminal_command +145,267449,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,0,"",shellscript,selection_mouse +146,267532,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,2,"/h",shellscript,selection_mouse +147,267550,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,3,"/hk",shellscript,selection_mouse +148,267561,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,5,"/hkfs",shellscript,selection_mouse +149,267576,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,121,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/wor",shellscript,selection_mouse +150,267608,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,123,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/",shellscript,selection_mouse +151,267609,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,125,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/wo",shellscript,selection_mouse +152,267637,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,127,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/work",shellscript,selection_mouse +153,267665,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,128,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/works",shellscript,selection_mouse +154,267666,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,130,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/workspa",shellscript,selection_mouse +155,267693,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,133,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/workspace/",shellscript,selection_mouse +156,267724,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,137,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/workspace/scra",shellscript,selection_mouse +157,267725,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,140,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/workspace/scratch",shellscript,selection_mouse +158,267760,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,143,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tu",shellscript,selection_mouse +159,267761,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,146,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_i",shellscript,selection_mouse +160,267803,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,148,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind",shellscript,selection_mouse +161,267804,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,151,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind369",shellscript,selection_mouse +162,267805,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,41,"/hkfs/work/workspace/scratch/tum_ind3695-",shellscript,selection_mouse +163,267812,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,45,"/hkfs/work/workspace/scratch/tum_ind3695-jafa",shellscript,selection_mouse +164,267841,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",576,25,"\narray_records_dir_train=",shellscript,selection_mouse +165,268148,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,89,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/train",shellscript,selection_mouse +166,268593,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,87,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/tra",shellscript,selection_mouse +167,268626,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,86,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/tr",shellscript,selection_mouse +168,268901,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,85,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/t",shellscript,selection_mouse +169,269046,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,84,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m/",shellscript,selection_mouse +170,270186,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,84,"",shellscript,content +171,271317,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",606,0,"\n",shellscript,content +172,272263,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",606,1,"",shellscript,content +173,273319,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",601,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed",shellscript,content +174,273918,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",696,0,"/",shellscript,content +175,273919,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",697,0,"",shellscript,selection_keyboard +176,275742,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",808,0,"",shellscript,selection_mouse +177,275935,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",806,2,"0m",shellscript,selection_mouse +178,275943,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",797,11,"pisodes_10m",shellscript,selection_mouse +179,275965,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",808,5,"/val\n",shellscript,selection_mouse +180,276039,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",743,65,"ce/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +181,276040,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",738,70,"rkspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +182,276042,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",735,73,"/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +183,276059,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",734,74,"k/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +184,276088,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",733,75,"rk/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +185,276145,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",732,76,"ork/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +186,276146,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",731,77,"work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +187,276185,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",729,79,"s/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +188,276217,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",808,5,"/val\n",shellscript,selection_mouse +189,276387,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",725,83,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +190,276388,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",724,84,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +191,276547,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",725,83,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +192,276619,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",726,82,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +193,277120,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",725,83,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",shellscript,selection_mouse +194,278022,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",725,83,"",shellscript,content +195,278278,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",725,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed",shellscript,content +196,280038,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1027,0,"",shellscript,selection_mouse +197,289341,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1203,0,"",shellscript,selection_mouse +198,290357,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1203,1,"",shellscript,content +199,290928,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1203,0,"4",shellscript,content +200,290929,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1204,0,"",shellscript,selection_keyboard +201,290998,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1204,0,"8",shellscript,content +202,290999,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1205,0,"",shellscript,selection_keyboard +203,291876,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1225,0,"",shellscript,selection_mouse +204,292212,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1237,0,"",shellscript,selection_mouse +205,292579,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",1262,0,"",shellscript,selection_mouse +206,293335,"TERMINAL",0,0,"srun",,terminal_focus +207,294401,"TERMINAL",0,0,"s",,terminal_output +208,294468,"TERMINAL",0,0,"o",,terminal_output +209,294556,"TERMINAL",0,0,"u",,terminal_output +210,294612,"TERMINAL",0,0,"r",,terminal_output +211,294785,"TERMINAL",0,0,"c",,terminal_output +212,294934,"TERMINAL",0,0,"e",,terminal_output +213,295080,"TERMINAL",0,0," ",,terminal_output +214,295179,"TERMINAL",0,0,".",,terminal_output +215,295235,"TERMINAL",0,0,"v",,terminal_output +216,295331,"TERMINAL",0,0,"env/",,terminal_output +217,295559,"TERMINAL",0,0,"b",,terminal_output +218,295638,"TERMINAL",0,0,"in/",,terminal_output +219,295875,"TERMINAL",0,0,"ac",,terminal_output +220,296031,"TERMINAL",0,0,"tivate",,terminal_output +221,296292,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +222,300117,"TERMINAL",0,0,"g",,terminal_output +223,300239,"TERMINAL",0,0,"it",,terminal_output +224,300362,"TERMINAL",0,0," ",,terminal_output +225,300508,"TERMINAL",0,0,"b",,terminal_output +226,300578,"TERMINAL",0,0,"r",,terminal_output +227,300751,"TERMINAL",0,0,"a",,terminal_output +228,300839,"TERMINAL",0,0,"n",,terminal_output +229,300893,"TERMINAL",0,0,"c",,terminal_output +230,301019,"TERMINAL",0,0,"h",,terminal_output +231,301243,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +232,301812,"TERMINAL",0,0," ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n:",,terminal_output +233,303692,"TERMINAL",0,0,"\r\r:",,terminal_output +234,304134,"TERMINAL",0,0,"\r hotfix/eval-full-frame-fix\r\n:",,terminal_output +235,304940,"TERMINAL",0,0,"\r hotfix/fix-val-loss-maskgit-masking\r\n:\r hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n:\r hotfix/sampling-shapes-error\r\n:\r input_pipeline/add-npy2array_record\r\n:\r logging-variants\r\n:\r lr-schedules\r\n:\r main\r\n:\r maskgit-different-maskprob-per-sample\r\n:\r maskgit-sampling-iterative-unmasking-fix\r\n:\r metrics-logging-for-dynamics-model\r\n:\r monkey-patch\r\n:\r new-arch-sampling\r\n:\r prepend-action-maskgit\r\n:",,terminal_output +236,305017,"TERMINAL",0,0,"\r preprocess_video\r\n:\r refactor-full-frame-val-loss\r\n:",,terminal_output +237,305126,"TERMINAL",0,0,"\r refactor-tmp\r\n:\r remove-restore-branching\r\n:\r revised-dataloader\r\n:\r runner\r\n:",,terminal_output +238,305217,"TERMINAL",0,0,"\r runner-grain\r\n:\r sample-ali-branch\r\n:\r sample-from-different-topologies\r\n:",,terminal_output +239,305304,"TERMINAL",0,0,"\r sampling-script-add-metrics\r\n:\r sampling-startframe-indexing-fix\r\n:\r seeding-data-generation\r\n:",,terminal_output +240,305721,"TERMINAL",0,0,"\r speedup-tfrecord-preprocessing\r\n:\r train_lam_coinrun_ablation_wsd_3e-6_28747\r\n:\r val-loss\r\n:\r* vizdoom-dataset\r\n:\r z-loss\r\n:\r zloss-runs\r\n:\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)",,terminal_output +241,305920,"TERMINAL",0,0,"\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)",,terminal_output +242,306484,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +243,309438,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +244,312557,"TERMINAL",0,0,"s",,terminal_output +245,312633,"TERMINAL",0,0,"h",,terminal_output +246,312762,"TERMINAL",0,0," ",,terminal_output +247,312932,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +248,313205,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh\r\n[?2004l\r",,terminal_output +249,313385,"TERMINAL",0,0,"#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=05:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_default_single_gpu_60x80\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val\r\n\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_tokenizer.py \\r\n --save_ckpt \\r\n --image_height=60 \\r\n --image_width=80 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=48 \\r\n --init_lr=0 \\r\n --log \\r\n --name=doom-tokenizer-default-60x80-$slurm_job_id \\r\n --tags tokenizer doom default 60x80 \\r\n --entity instant-uv \\r\n --project jafar \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val\r\n",,terminal_output +250,313676,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=310924\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759750850\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759754450\r\nSLURM_PMI2_SRUN_PORT=42403\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3546199\r\nSLURM_PTY_PORT=35939\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=31\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=103\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=40421\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3546199\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=40421\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +251,313805,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +252,343670,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +253,344760,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251006_134302-1fdola37\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run doom-tokenizer-default-60x80-3546199\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/1fdola37\r\n",,terminal_output +254,347556,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['decoder', 'encoder', 'vq']\r\nParameter counts:\r\n{'decoder': 16858736, 'encoder': 16858752, 'vq': 32768, 'total': 33750256}\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_tokenizer.py"", line 574, in \r\n main(args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_tokenizer.py"", line 332, in main\r\n val_iterator = build_dataloader(args, args.val_data_dir)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_tokenizer.py"", line 162, in build_dataloader\r\n for x in os.listdir(data_dir)\r\n ^^^^^^^^^^^^^^^^^^^^\r\nFileNotFoundError: [Errno 2] No such file or directory: '/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val'\r\n",,terminal_output +255,348333,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run doom-tokenizer-default-60x80-3546199 at: https://wandb.ai/instant-uv/jafar/runs/1fdola37\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251006_134302-1fdola37/logs\r\n",,terminal_output +256,348412,"TERMINAL",0,0,"W1006 13:43:07.487927 311292 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": CANCELLED: CANCELLED\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""CANCELLED"", grpc_status:1} [type.googleapis.com/tensorflow.DerivedStatus='']\r\n",,terminal_output +257,349187,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +258,390939,"TERMINAL",0,0,"bash",,terminal_focus +259,392509,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0401 jasmine]$ \r(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +260,392565,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0401 jasmine]$ \r(jasmine) [tum_cte0515@hkn0401 jasmine]$ \r(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +261,394601,"TERMINAL",0,0,"ls",,terminal_command +262,399664,"TERMINAL",0,0,"cd ..",,terminal_command +263,400275,"TERMINAL",0,0,"ls",,terminal_command +264,406306,"TERMINAL",0,0,"cd doom_episodes_1m",,terminal_command +265,407009,"TERMINAL",0,0,"l",,terminal_command +266,407036,"TERMINAL",0,0,"]633;Cbash: l: command not found...\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",,terminal_output +267,408269,"TERMINAL",0,0,"ls",,terminal_command +268,408295,"TERMINAL",0,0,"]633;Cmetadata.json train\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",,terminal_output +269,410088,"TERMINAL",0,0,"cd ..",,terminal_command +270,410716,"TERMINAL",0,0,"ls",,terminal_command +271,415724,"TERMINAL",0,0,"ls dev_bak/",,terminal_command +272,416098,"TERMINAL",0,0,"ls",,terminal_command +273,474008,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",0,0,"",python,tab +274,478182,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",6098,0,"",python,selection_mouse +275,478274,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",6095,12,"num_episodes",python,selection_mouse +276,478838,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",6091,0,"",python,selection_mouse +277,478994,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",6081,11,"episode_idx",python,selection_mouse +278,479555,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",6120,0,"",python,selection_mouse +279,479690,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",6116,17,"num_parallel_envs",python,selection_mouse +280,480573,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",6102,0,"",python,selection_mouse +281,480752,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",6095,12,"num_episodes",python,selection_mouse +282,481288,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",6086,0,"",python,selection_mouse +283,481424,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",6081,11,"episode_idx",python,selection_mouse +284,489076,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11041,0,"",python,selection_mouse +285,489078,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11040,0,"",python,selection_command +286,489105,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11040,1,")",python,selection_mouse +287,489106,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11041,0,"",python,selection_command +288,491021,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11040,0,"",python,selection_command +289,491735,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11034,7," # )",python,selection_command +290,491967,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11010,31," # split=""test"",\n # )",python,selection_command +291,492100,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10971,70," # eval_env_args=eval_env_args,\n # split=""test"",\n # )",python,selection_command +292,492260,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10924,117," # num_episodes=args.num_episodes_test,\n # eval_env_args=eval_env_args,\n # split=""test"",\n # )",python,selection_command +293,492414,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10907,134," # agent,\n # num_episodes=args.num_episodes_test,\n # eval_env_args=eval_env_args,\n # split=""test"",\n # )",python,selection_command +294,492676,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10849,192," # test_episode_metadata = make_array_records_dataset(\n # agent,\n # num_episodes=args.num_episodes_test,\n # eval_env_args=eval_env_args,\n # split=""test"",\n # )",python,selection_command +295,492848,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10841,200," # )\n # test_episode_metadata = make_array_records_dataset(\n # agent,\n # num_episodes=args.num_episodes_test,\n # eval_env_args=eval_env_args,\n # split=""test"",\n # )",python,selection_command +296,492990,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10818,223," # split=""val"",\n # )\n # test_episode_metadata = make_array_records_dataset(\n # agent,\n # num_episodes=args.num_episodes_test,\n # eval_env_args=eval_env_args,\n # split=""test"",\n # )",python,selection_command +297,493380,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10779,262," # eval_env_args=eval_env_args,\n # split=""val"",\n # )\n # test_episode_metadata = make_array_records_dataset(\n # agent,\n # num_episodes=args.num_episodes_test,\n # eval_env_args=eval_env_args,\n # split=""test"",\n # )",python,selection_command +298,493557,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10733,308," # num_episodes=args.num_episodes_val,\n # eval_env_args=eval_env_args,\n # split=""val"",\n # )\n # test_episode_metadata = make_array_records_dataset(\n # agent,\n # num_episodes=args.num_episodes_test,\n # eval_env_args=eval_env_args,\n # split=""test"",\n # )",python,selection_command +299,493699,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10716,325," # agent,\n # num_episodes=args.num_episodes_val,\n # eval_env_args=eval_env_args,\n # split=""val"",\n # )\n # test_episode_metadata = make_array_records_dataset(\n # agent,\n # num_episodes=args.num_episodes_test,\n # eval_env_args=eval_env_args,\n # split=""test"",\n # )",python,selection_command +300,493830,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10659,382," # val_episode_metadata = make_array_records_dataset(\n # agent,\n # num_episodes=args.num_episodes_val,\n # eval_env_args=eval_env_args,\n # split=""val"",\n # )\n # test_episode_metadata = make_array_records_dataset(\n # agent,\n # num_episodes=args.num_episodes_test,\n # eval_env_args=eval_env_args,\n # split=""test"",\n # )",python,selection_command +301,494078,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10663,0,"",python,selection_command +302,494790,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11038,1,"",python,content +303,494790,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11018,1,"",python,content +304,494790,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10979,1,"",python,content +305,494790,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10932,1,"",python,content +306,494791,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10915,1,"",python,content +307,494791,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10853,1,"",python,content +308,494791,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10845,1,"",python,content +309,494791,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10826,1,"",python,content +310,494791,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10787,1,"",python,content +311,494791,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10741,1,"",python,content +312,494791,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10724,1,"",python,content +313,494791,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10663,1,"",python,content +314,494913,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11027,1,"",python,content +315,494913,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11008,1,"",python,content +316,494913,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10970,1,"",python,content +317,494913,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10924,1,"",python,content +318,494914,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10908,1,"",python,content +319,494914,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10847,1,"",python,content +320,494914,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10840,1,"",python,content +321,494914,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10822,1,"",python,content +322,494914,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10784,1,"",python,content +323,494914,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10739,1,"",python,content +324,494914,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10723,1,"",python,content +325,494914,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10663,1,"",python,content +326,495171,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10662,0,"",python,selection_command +327,496444,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10656,0,"",python,selection_command +328,497086,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10653,5," )",python,selection_command +329,497310,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10630,28," split=""train"",\n )",python,selection_command +330,497440,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10593,65," eval_env_args=eval_env_args,\n split=""train"",\n )",python,selection_command +331,497597,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10547,111," num_episodes=args.num_episodes_train,\n eval_env_args=eval_env_args,\n split=""train"",\n )",python,selection_command +332,497744,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10532,126," agent,\n num_episodes=args.num_episodes_train,\n eval_env_args=eval_env_args,\n split=""train"",\n )",python,selection_command +333,497903,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10475,183," train_episode_metadata = make_array_records_dataset(\n agent,\n num_episodes=args.num_episodes_train,\n eval_env_args=eval_env_args,\n split=""train"",\n )",python,selection_command +334,498122,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10479,0,"",python,selection_command +335,498879,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10657,0,"#",python,content +336,498879,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10638,0,"#",python,content +337,498880,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10601,0,"#",python,content +338,498880,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10555,0,"#",python,content +339,498880,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10540,0,"#",python,content +340,498880,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10479,0,"#",python,content +341,498881,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10480,0,"",python,selection_keyboard +342,499170,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10663,0," ",python,content +343,499170,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10643,0," ",python,content +344,499170,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10605,0," ",python,content +345,499170,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10558,0," ",python,content +346,499171,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10542,0," ",python,content +347,499171,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10480,0," ",python,content +348,499171,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10481,0,"",python,selection_keyboard +349,499411,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",10480,0,"",python,selection_command +350,504127,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11433,0,"",python,selection_mouse +351,505254,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11419,41," # ""avg_episode_len_val"": np.mean(",python,selection_command +352,505482,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11419,106," # ""avg_episode_len_val"": np.mean(\n # [ep[""avg_seq_len""] for ep in val_episode_metadata]",python,selection_command +353,505614,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11419,119," # ""avg_episode_len_val"": np.mean(\n # [ep[""avg_seq_len""] for ep in val_episode_metadata]\n # ),",python,selection_command +354,505791,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11419,162," # ""avg_episode_len_val"": np.mean(\n # [ep[""avg_seq_len""] for ep in val_episode_metadata]\n # ),\n # ""avg_episode_len_test"": np.mean(",python,selection_command +355,505922,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11419,228," # ""avg_episode_len_val"": np.mean(\n # [ep[""avg_seq_len""] for ep in val_episode_metadata]\n # ),\n # ""avg_episode_len_test"": np.mean(\n # [ep[""avg_seq_len""] for ep in test_episode_metadata]",python,selection_command +356,506065,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11419,241," # ""avg_episode_len_val"": np.mean(\n # [ep[""avg_seq_len""] for ep in val_episode_metadata]\n # ),\n # ""avg_episode_len_test"": np.mean(\n # [ep[""avg_seq_len""] for ep in test_episode_metadata]\n # ),",python,selection_command +357,506213,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11419,301," # ""avg_episode_len_val"": np.mean(\n # [ep[""avg_seq_len""] for ep in val_episode_metadata]\n # ),\n # ""avg_episode_len_test"": np.mean(\n # [ep[""avg_seq_len""] for ep in test_episode_metadata]\n # ),\n # ""episode_metadata_train"": train_episode_metadata,",python,selection_command +358,506364,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11419,357," # ""avg_episode_len_val"": np.mean(\n # [ep[""avg_seq_len""] for ep in val_episode_metadata]\n # ),\n # ""avg_episode_len_test"": np.mean(\n # [ep[""avg_seq_len""] for ep in test_episode_metadata]\n # ),\n # ""episode_metadata_train"": train_episode_metadata,\n # ""episode_metadata_val"": val_episode_metadata,",python,selection_command +359,506690,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11419,415," # ""avg_episode_len_val"": np.mean(\n # [ep[""avg_seq_len""] for ep in val_episode_metadata]\n # ),\n # ""avg_episode_len_test"": np.mean(\n # [ep[""avg_seq_len""] for ep in test_episode_metadata]\n # ),\n # ""episode_metadata_train"": train_episode_metadata,\n # ""episode_metadata_val"": val_episode_metadata,\n # ""episode_metadata_test"": test_episode_metadata,",python,selection_command +360,506903,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11427,0,"",python,selection_command +361,507622,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11785,1,"",python,content +362,507622,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11729,1,"",python,content +363,507622,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11669,1,"",python,content +364,507622,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11656,1,"",python,content +365,507622,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11594,1,"",python,content +366,507622,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11547,1,"",python,content +367,507622,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11534,1,"",python,content +368,507623,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11473,1,"",python,content +369,507623,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11427,1,"",python,content +370,507765,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11777,1,"",python,content +371,507765,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11722,1,"",python,content +372,507765,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11663,1,"",python,content +373,507765,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11651,1,"",python,content +374,507766,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11590,1,"",python,content +375,507766,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11544,1,"",python,content +376,507766,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11532,1,"",python,content +377,507766,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11472,1,"",python,content +378,507766,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11427,1,"",python,content +379,507860,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11426,0,"",python,selection_command +380,508297,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11415,0,"",python,selection_command +381,508478,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11350,0,"",python,selection_command +382,509135,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11308,0,"",python,selection_command +383,509803,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11301,41," ""avg_episode_len_train"": np.mean(",python,selection_command +384,510800,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11301,106," ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]",python,selection_command +385,510930,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11301,117," ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),",python,selection_command +386,511133,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11309,0,"",python,selection_command +387,511892,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11416,0,"#",python,content +388,511892,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11355,0,"#",python,content +389,511893,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11309,0,"#",python,content +390,511894,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11310,0,"",python,selection_keyboard +391,512115,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11419,0," ",python,content +392,512115,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11357,0," ",python,content +393,512115,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11310,0," ",python,content +394,512116,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11311,0,"",python,selection_keyboard +395,512333,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11310,0,"",python,selection_command +396,512722,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11354,0,"",python,selection_command +397,512861,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11421,0,"",python,selection_command +398,512999,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11434,0,"",python,selection_command +399,513531,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11474,0,"",python,selection_command +400,513612,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11537,0,"",python,selection_command +401,513753,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11548,0,"",python,selection_command +402,513888,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11589,0,"",python,selection_command +403,514025,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11653,0,"",python,selection_command +404,514210,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11664,0,"",python,selection_command +405,514434,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11663,0,"",python,selection_command +406,515000,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11663,0,"#",python,content +407,515002,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11664,0,"",python,selection_keyboard +408,515154,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11664,0," ",python,content +409,515155,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11665,0,"",python,selection_keyboard +410,515280,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11664,0,"",python,selection_command +411,536529,"TERMINAL",0,0,"cd doom_episodes_10m",,terminal_command +412,537081,"TERMINAL",0,0,"ls",,terminal_command +413,537108,"TERMINAL",0,0,"]633;Cmetadata.json train\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m",,terminal_output +414,547715,"TERMINAL",0,0,"mv metadata.json metadata_train.json",,terminal_command +415,558712,"TERMINAL",0,0,"cd ../doom_episodes_10m_60x80_fixed/",,terminal_command +416,560759,"TERMINAL",0,0,"mv metadata.json metadata_train.json",,terminal_command +417,569350,"TERMINAL",0,0,"cd ../doom_episodes_10m_120x160_fixed/",,terminal_command +418,570958,"TERMINAL",0,0,"mv metadata.json metadata_train.json",,terminal_command +419,571428,"TERMINAL",0,0,"ls",,terminal_command +420,573132,"TERMINAL",0,0,"cd ..",,terminal_command +421,574320,"TERMINAL",0,0,"ls",,terminal_command +422,631148,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_60x80.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --partition=accelerated\n#SBATCH --gres=gpu:1\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --job-name=preprocess_doom_10m_60x80\n\n# export PYTHONUNBUFFERED=1\n\n\n# source .venv/bin/activate\n\nsource .venv/bin/activate\n\npython jasmine_data/ViZDoomPPO/load_model_generate_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed \\n --agent_path /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \\n --num_episodes_train 10_000 \\n --target_width 80 \\n --target_height 60 \\n \n\n",shellscript,tab +423,646894,"TERMINAL",0,0,"srun",,terminal_focus +424,648008,"TERMINAL",0,0,"s",,terminal_output +425,648107,"TERMINAL",0,0,"h",,terminal_output +426,648200,"TERMINAL",0,0," ",,terminal_output +427,648499,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_60x80.sh",,terminal_output +428,649458,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_60x80.sh\r\n[?2004l\rpython: can't open file '/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py': [Errno 2] No such file or directory\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +429,651146,"TERMINAL",0,0,"c",,terminal_output +430,651240,"TERMINAL",0,0,"d",,terminal_output +431,651514,"TERMINAL",0,0," ",,terminal_output +432,651569,"TERMINAL",0,0,"d",,terminal_output +433,651731,"TERMINAL",0,0,"a",,terminal_output +434,652004,"TERMINAL",0,0,"ta/",,terminal_output +435,652474,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h(jasmine) [tum_cte0515@hkn0401 data]$ ",,terminal_output +436,653001,"TERMINAL",0,0,"d",,terminal_output +437,653171,"TERMINAL",0,0,"e",,terminal_output +438,653340,"TERMINAL",0,0,"a",,terminal_output +439,653420,"TERMINAL",0,0,"c",,terminal_output +440,653609,"TERMINAL",0,0,"t",,terminal_output +441,654392,"TERMINAL",0,0,"i",,terminal_output +442,654684,"TERMINAL",0,0,"va",,terminal_output +443,654737,"TERMINAL",0,0,"t",,terminal_output +444,654858,"TERMINAL",0,0,"e",,terminal_output +445,655162,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h[tum_cte0515@hkn0401 data]$ ",,terminal_output +446,656381,"TERMINAL",0,0,"deactivate",,terminal_output +447,656441,"TERMINAL",0,0,"cd data/",,terminal_output +448,657043,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_60x80.sh",,terminal_output +449,658021,"TERMINAL",0,0,"",,terminal_output +450,658170,"TERMINAL",0,0,"",,terminal_output +451,658835,"TERMINAL",0,0,"",,terminal_output +452,658990,"TERMINAL",0,0,"",,terminal_output +453,659215,"TERMINAL",0,0,"",,terminal_output +454,659469,"TERMINAL",0,0,"",,terminal_output +455,659760,"TERMINAL",0,0,"",,terminal_output +456,660465,"TERMINAL",0,0,".slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_60x80.sh\r",,terminal_output +457,660611,"TERMINAL",0,0,".slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_60x80.sh\r",,terminal_output +458,661431,"TERMINAL",0,0,"/slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_60x80.sh\r",,terminal_output +459,662120,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +460,671553,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py:596: UserWarning: WARN: plugin: shimmy.registration:register_gymnasium_envs raised Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 594, in load_plugin_envs\r\n fn()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 304, in register_gymnasium_envs\r\n _register_atari_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 205, in _register_atari_envs\r\n import ale_py\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/__init__.py"", line 66, in \r\n register_v0_v4_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 176, in register_v0_v4_envs\r\n _register_rom_configs(legacy_games, obs_types, versions)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 62, in _register_rom_configs\r\n gymnasium.register(\r\nAttributeError: partially initialized module 'gymnasium' has no attribute 'register' (most likely due to a circular import)\r\n\r\n logger.warn(f""plugin: {plugin.value} raised {traceback.format_exc()}"")\r\n",,terminal_output +461,676159,"TERMINAL",0,0,"Using device: cuda\r\n",,terminal_output +462,676774,"TERMINAL",0,0,"2025-10-06 13:48:35.753 | INFO  | __main__::58 - Using device: cuda\r\n",,terminal_output +463,676924,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +464,677017,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object clip_range. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object lr_schedule. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n",,terminal_output +465,681559,"TERMINAL",0,0,"Model loaded from /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\n",,terminal_output +466,681749,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +467,681930,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +468,682117,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +469,682298,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +470,682480,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +471,682654,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +472,682837,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +473,683015,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +474,683199,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +475,683382,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +476,683559,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +477,683742,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +478,683929,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +479,684110,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +480,684289,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +481,684478,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +482,684656,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +483,684839,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +484,685036,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +485,685229,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +486,685409,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +487,685568,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +488,685754,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +489,685933,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +490,686112,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +491,686298,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +492,686482,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +493,686670,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +494,686846,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +495,687020,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +496,687205,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +497,687389,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +498,687568,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +499,687763,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +500,687933,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +501,688114,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +502,688297,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +503,688488,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +504,688661,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +505,688848,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +506,689029,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +507,689225,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +508,689424,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +509,689588,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +510,689766,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +511,689949,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +512,690173,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +513,690340,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +514,690500,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +515,690683,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +516,690867,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +517,691052,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +518,691234,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +519,691440,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +520,691600,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +521,691781,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +522,691976,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +523,692146,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +524,692356,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +525,692523,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +526,692699,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +527,692883,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +528,693066,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +529,693250,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +530,693441,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +531,693619,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +532,693802,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +533,693989,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +534,694176,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +535,694375,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +536,694543,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +537,694724,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +538,694912,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +539,695106,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +540,695284,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +541,695466,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +542,695649,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +543,695832,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +544,696017,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +545,696198,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +546,696387,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +547,696570,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +548,696793,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +549,696940,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +550,697123,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +551,697333,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +552,697500,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +553,697681,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +554,697866,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +555,698049,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +556,698235,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +557,698442,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +558,698602,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +559,698785,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +560,698975,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +561,699160,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +562,699351,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +563,699528,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +564,699724,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +565,699898,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +566,883504,"TERMINAL",0,0,"Warning: Inconsistent chunk_sizes. Episode has 40 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +567,886747,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val/data_0000.array_record with 100 video chunks\r\n",,terminal_output +568,889153,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val/data_0001.array_record with 100 video chunks\r\n",,terminal_output +569,891603,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val/data_0002.array_record with 100 video chunks\r\n",,terminal_output +570,894035,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val/data_0003.array_record with 100 video chunks\r\n",,terminal_output +571,896475,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val/data_0004.array_record with 100 video chunks\r\n",,terminal_output +572,898879,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val/data_0005.array_record with 100 video chunks\r\n",,terminal_output +573,899493,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val/data_0006.array_record with 100 video chunks\r\nEpisode 0 completed, length: 1000.\r\nTotal number of frames until now: 112000\r\n",,terminal_output +574,900479,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +575,900655,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +576,900859,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +577,901056,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +578,901255,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +579,901457,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +580,901657,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +581,901857,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +582,902062,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +583,902257,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +584,902455,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +585,902656,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +586,902860,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +587,903059,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +588,903254,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +589,903480,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +590,903651,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +591,903854,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +592,904055,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +593,904257,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +594,904479,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +595,904654,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +596,904856,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +597,905059,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +598,905258,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +599,905482,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +600,905734,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +601,905860,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +602,906108,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +603,906271,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +604,906494,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +605,906701,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +606,906867,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +607,907068,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +608,907280,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +609,907474,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +610,907673,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +611,907876,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +612,908075,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +613,908275,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +614,908480,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +615,908695,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +616,908882,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +617,909083,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +618,909286,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +619,909485,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +620,909694,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +621,909894,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +622,910091,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +623,910319,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +624,910497,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +625,910697,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +626,910903,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +627,911099,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +628,911302,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +629,911577,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +630,911716,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +631,911906,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +632,912105,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +633,912319,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +634,912506,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +635,912710,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +636,912913,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +637,913114,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +638,913322,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +639,913523,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +640,913723,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +641,913935,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +642,914127,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +643,914339,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +644,914535,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +645,914736,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +646,914936,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +647,915142,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +648,915375,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +649,915542,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +650,915776,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +651,915943,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +652,916151,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +653,916370,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +654,916553,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +655,916753,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +656,916955,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +657,917155,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +658,917377,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +659,917562,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +660,917765,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +661,917970,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +662,918173,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +663,918377,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +664,918583,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +665,918785,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +666,918986,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +667,919193,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +668,919418,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +669,919596,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +670,919880,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +671,920007,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +672,920223,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +673,920412,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +674,1104589,"TERMINAL",0,0,"Warning: Inconsistent chunk_sizes. Episode has 40 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +675,1107804,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/test/data_0000.array_record with 100 video chunks\r\n",,terminal_output +676,1110243,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/test/data_0001.array_record with 100 video chunks\r\n",,terminal_output +677,1112692,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/test/data_0002.array_record with 100 video chunks\r\n",,terminal_output +678,1115107,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/test/data_0003.array_record with 100 video chunks\r\n",,terminal_output +679,1117528,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/test/data_0004.array_record with 100 video chunks\r\n",,terminal_output +680,1119963,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/test/data_0005.array_record with 100 video chunks\r\n",,terminal_output +681,1120575,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/test/data_0006.array_record with 100 video chunks\r\nEpisode 0 completed, length: 1000.\r\nTotal number of frames until now: 112000\r\n",,terminal_output +682,1121323,"TERMINAL",0,0,"Done generating dataset.\r\n",,terminal_output +683,1122230,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h[tum_cte0515@hkn0401 data]$ ",,terminal_output +684,1794290,"TERMINAL",0,0,"sh ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_60x80.sh",,terminal_output +685,1795253,"TERMINAL",0,0,"",,terminal_output +686,1795816,"TERMINAL",0,0,"",,terminal_output +687,1796192,"TERMINAL",0,0,"",,terminal_output +688,1796274,"TERMINAL",0,0,"",,terminal_output +689,1796463,"TERMINAL",0,0,"",,terminal_output +690,1796542,"TERMINAL",0,0,"",,terminal_output +691,1796693,"TERMINAL",0,0,"",,terminal_output +692,1798775,"TERMINAL",0,0,"1",,terminal_output +693,1798920,"TERMINAL",0,0,"2",,terminal_output +694,1799095,"TERMINAL",0,0,"0x160.sh ",,terminal_output +695,1803622,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +696,1805092,"TERMINAL",0,0,"bash",,terminal_focus +697,1805093,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py:596: UserWarning: WARN: plugin: shimmy.registration:register_gymnasium_envs raised Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 594, in load_plugin_envs\r\n fn()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 304, in register_gymnasium_envs\r\n _register_atari_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 205, in _register_atari_envs\r\n import ale_py\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/__init__.py"", line 66, in \r\n register_v0_v4_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 176, in register_v0_v4_envs\r\n _register_rom_configs(legacy_games, obs_types, versions)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 62, in _register_rom_configs\r\n gymnasium.register(\r\nAttributeError: partially initialized module 'gymnasium' has no attribute 'register' (most likely due to a circular import)\r\n\r\n logger.warn(f""plugin: {plugin.value} raised {traceback.format_exc()}"")\r\n",,terminal_output +698,1806279,"TERMINAL",0,0,"queue",,terminal_command +699,1806310,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Oct 6 14:07:25 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3543728 accelerat preproce tum_cte0 R 18:29:55\t 1 hkn04253543729 accelerat preproce tum_cte0 R 18:29:55\t 1 hkn06323543730 accelerat preproce tum_cte0 R 18:29:55\t 1 hkn06323546199 dev_accel interact tum_cte0 R26:35\t 1 hkn0401",,terminal_output +700,1806311,"TERMINAL",0,0,"Using device: cuda\r\n2025-10-06 14:07:25.031 | INFO  | __main__::58 - Using device: cuda\r\nAL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object clip_range. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object lr_schedule. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n",,terminal_output +701,1807014,"TERMINAL",0,0,"Model loaded from /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\n",,terminal_output +702,1807198,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +703,1807273,"TERMINAL",0,0,"66666",,terminal_output +704,1807425,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +705,1807483,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +706,1807570,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +707,1807785,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +708,1807953,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +709,1808120,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +710,1808287,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +711,1808465,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +712,1808644,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +713,1808845,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +714,1809010,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +715,1809391,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\nAL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +716,1809598,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +717,1809754,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +718,1809927,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +719,1810116,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +720,1810280,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +721,1810487,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +722,1810638,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +723,1810865,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +724,1811001,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +725,1811191,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +726,1811364,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +727,1811547,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +728,1811731,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +729,1811907,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +730,1812091,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +731,1812275,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +732,1812465,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +733,1812645,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +734,1812844,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +735,1813030,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +736,1813972,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\nAL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\nAL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\nAL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\nAL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +737,1814124,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +738,1814276,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +739,1814456,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +740,1814639,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +741,1814846,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +742,1815002,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +743,1815220,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +744,1815376,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +745,1815557,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +746,1815741,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +747,1815939,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +748,1816108,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +749,1816287,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +750,1816489,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +751,1816652,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +752,1816844,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +753,1817038,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +754,1817204,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +755,1817381,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +756,1817567,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +757,1817756,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +758,1817931,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +759,1818120,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +760,1818299,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +761,1818496,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +762,1818690,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +763,1818856,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +764,1819038,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +765,1819221,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +766,1819409,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +767,1819605,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +768,1819816,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +769,1820187,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\nAL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +770,1820345,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +771,1820515,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +772,1820702,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +773,1820899,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +774,1821072,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +775,1821254,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +776,1821443,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +777,1821631,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +778,1821817,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +779,1822007,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +780,1822179,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +781,1822457,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +782,1822587,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +783,1822875,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +784,1822977,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +785,1823109,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +786,1823290,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +787,1823473,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +788,1823654,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +789,1823840,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +790,1824027,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +791,1824205,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +792,1824393,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +793,1824597,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +794,1824768,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +795,1824978,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +796,1825135,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +797,1825327,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +798,1909010,"TERMINAL",0,0,"srun",,terminal_focus +799,1910127,"TERMINAL",0,0,"bash",,terminal_focus +800,1923963,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_120x160.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --partition=accelerated\n#SBATCH --gres=gpu:1\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --job-name=preprocess_doom_10m_120x160\n\n# export PYTHONUNBUFFERED=1\n\n\n# source .venv/bin/activate\n\nsource .venv/bin/activate\n\npython jasmine_data/ViZDoomPPO/load_model_generate_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed \\n --agent_path /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \\n --num_episodes_train 10_000 \\n --target_width 160 \\n --target_height 120 \\n \n\n",shellscript,tab +801,1925344,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --partition=accelerated\n#SBATCH --gres=gpu:1\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --job-name=preprocess_doom_10m\n\n# export PYTHONUNBUFFERED=1\n\n\n# source .venv/bin/activate\n\nsource .venv/bin/activate\n\npython jasmine_data/ViZDoomPPO/load_model_generate_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m \\n --agent_path /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \\n --num_episodes_train 10_000 \\n \n\n",shellscript,tab +802,1928165,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_60x80.sh",0,0,"",shellscript,tab +803,2020487,"TERMINAL",0,0,"srun",,terminal_focus +804,2025348,"TERMINAL",0,0,"bash",,terminal_focus +805,2027215,"TERMINAL",0,0,"ls",,terminal_command +806,2027253,"TERMINAL",0,0,"]633;C",,terminal_output +807,2027349,"TERMINAL",0,0,"dev doom_episodes_10m_low_res_bak doom_episodes_50m\r\ndev_bak doom_episodes_1m doom_episodes_50m_120x160_fixed\r\ndoom_episodes_10m doom_episodes_1m_120x160_fixed doom_episodes_50m_60x80_fixed\r\ndoom_episodes_10m_120x160_fixed doom_episodes_1m_60x80 doom_episodes_50m_bak\r\ndoom_episodes_10m_60x80_fixed doom_episodes_1m_bak doom_episodes_50m_low_res_bak\r\ndoom_episodes_10m_bak doom_episodes_1m_low_res_bak\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +808,2035097,"TERMINAL",0,0,"Warning: Inconsistent chunk_sizes. Episode has 40 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +809,2047815,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val/data_0000.array_record with 100 video chunks\r\n",,terminal_output +810,2057338,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val/data_0001.array_record with 100 video chunks\r\n",,terminal_output +811,2066844,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val/data_0002.array_record with 100 video chunks\r\n",,terminal_output +812,2076465,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val/data_0003.array_record with 100 video chunks\r\n",,terminal_output +813,2080677,"TERMINAL",0,0,"cp -r doom_episodes_10m_60x80_fixed/val doom_episodes_1m_60x80/",,terminal_command +814,2080746,"TERMINAL",0,0,"]633;C",,terminal_output +815,2082658,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +816,2086122,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val/data_0004.array_record with 100 video chunks\r\n",,terminal_output +817,2095741,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val/data_0005.array_record with 100 video chunks\r\n",,terminal_output +818,2097204,"TERMINAL",0,0,"ls doom_episodes_1m_60x80/",,terminal_command +819,2097995,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val/data_0006.array_record with 100 video chunks\r\nEpisode 0 completed, length: 1000.\r\nTotal number of frames until now: 112000\r\n",,terminal_output +820,2099004,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +821,2099340,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +822,2099492,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +823,2099750,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +824,2099974,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +825,2100257,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +826,2100477,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +827,2100719,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +828,2100950,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +829,2101170,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\n",,terminal_output +830,2101222,"TERMINAL",0,0,"Built action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +831,2101428,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +832,2101665,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +833,2101900,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +834,2102144,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +835,2102410,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +836,2102628,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +837,2102913,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +838,2103141,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +839,2103350,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +840,2103604,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +841,2103833,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +842,2104105,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +843,2104344,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +844,2104555,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +845,2104790,"TERMINAL",0,0,"cp -r doom_episodes_10m_60x80_fixed/test doom_episodes_1m_60x80/",,terminal_command +846,2104840,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +847,2104840,"TERMINAL",0,0,"]633;C",,terminal_output +848,2105076,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +849,2105282,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +850,2105526,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +851,2105770,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +852,2106008,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +853,2106245,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +854,2106491,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +855,2106653,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +856,2106732,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +857,2107016,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +858,2107215,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +859,2107460,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +860,2107693,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +861,2108030,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +862,2108179,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +863,2108417,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +864,2108662,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +865,2108980,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +866,2109146,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +867,2109403,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +868,2109637,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +869,2109873,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +870,2110122,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +871,2110384,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +872,2110600,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +873,2110845,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +874,2111105,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +875,2111416,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +876,2111565,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +877,2111821,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +878,2112062,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +879,2112289,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +880,2112531,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +881,2112838,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +882,2113031,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +883,2113257,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +884,2113499,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +885,2113763,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +886,2114003,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +887,2114220,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +888,2114476,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +889,2114720,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +890,2114945,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +891,2115220,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +892,2115431,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +893,2115706,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +894,2115914,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +895,2116163,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +896,2116422,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +897,2116642,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +898,2116894,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +899,2117150,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +900,2117451,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +901,2117623,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +902,2117848,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +903,2118116,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +904,2118383,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +905,2118581,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +906,2118824,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +907,2119091,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +908,2119305,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +909,2119601,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +910,2119787,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +911,2120060,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +912,2120272,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +913,2120526,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +914,2120756,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +915,2121038,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +916,2121239,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +917,2121506,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +918,2121764,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +919,2121968,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +920,2122238,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +921,2122458,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +922,2122710,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +923,2122957,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n",,terminal_output +924,2134827,"TERMINAL",0,0,"cp -r doom_episodes_10m_120x160_fixed/val doom_episodes_1m_120x160_fixed/",,terminal_command +925,2134890,"TERMINAL",0,0,"]633;C",,terminal_output +926,2140551,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +927,2146999,"TERMINAL",0,0,"cp -r doom_episodes_10m_120x160_fixed/test doom_episodes_1m_120x160_fixed/",,terminal_command +928,2331672,"TERMINAL",0,0,"Warning: Inconsistent chunk_sizes. Episode has 40 frames, which is smaller than the requested chunk_size: 160. This might lead to performance degradation during training.\r\n",,terminal_output +929,2344488,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/test/data_0000.array_record with 100 video chunks\r\n",,terminal_output +930,2354087,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/test/data_0001.array_record with 100 video chunks\r\n",,terminal_output +931,2360480,"TERMINAL",0,0,"ls doom_episodes_1m_120x160_fixed/",,terminal_command +932,2363647,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/test/data_0002.array_record with 100 video chunks\r\n",,terminal_output +933,2363817,"TERMINAL",0,0,"ls doom_episodes_1m_120x160_fixed/val/",,terminal_command +934,2367024,"TERMINAL",0,0,"ls doom_episodes_1m_120x160_fixed/test/",,terminal_command +935,2373215,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/test/data_0003.array_record with 100 video chunks\r\n",,terminal_output +936,2382873,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/test/data_0004.array_record with 100 video chunks\r\n",,terminal_output +937,2392355,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/test/data_0005.array_record with 100 video chunks\r\n",,terminal_output +938,2394761,"TERMINAL",0,0,"Created /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/test/data_0006.array_record with 100 video chunks\r\nEpisode 0 completed, length: 1000.\r\nTotal number of frames until now: 112000\r\n",,terminal_output +939,2395477,"TERMINAL",0,0,"Done generating dataset.\r\n",,terminal_output +940,2396241,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h[tum_cte0515@hkn0401 data]$ ",,terminal_output +941,2422039,"TERMINAL",0,0,"cp -r doom_episodes_10m_120x160_fixed/test doom_episodes_1m_120x160_fixed/",,terminal_command +942,2422089,"TERMINAL",0,0,"]633;C",,terminal_output +943,2427980,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +944,2433726,"TERMINAL",0,0,"srun",,terminal_focus +945,2434727,"TERMINAL",0,0,"sh ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_120x160.sh ",,terminal_output +946,2437527,"TERMINAL",0,0,"",,terminal_output +947,2437676,"TERMINAL",0,0,"\r",,terminal_output +948,2437922,"TERMINAL",0,0,"\r\n\r",,terminal_output +949,2438107,"TERMINAL",0,0,"",,terminal_output +950,2438385,"TERMINAL",0,0,"",,terminal_output +951,2438573,"TERMINAL",0,0,"",,terminal_output +952,2438699,"TERMINAL",0,0,"",,terminal_output +953,2439013,"TERMINAL",0,0,"",,terminal_output +954,2439145,"TERMINAL",0,0,"",,terminal_output +955,2439278,"TERMINAL",0,0,"",,terminal_output +956,2439432,"TERMINAL",0,0,"",,terminal_output +957,2439882,"TERMINAL",0,0,"",,terminal_output +958,2440110,"TERMINAL",0,0,".",,terminal_output +959,2440258,"TERMINAL",0,0,"sh ",,terminal_output +960,2440904,"TERMINAL",0,0,"",,terminal_output +961,2441462,"TERMINAL",0,0,"",,terminal_output +962,2441686,"TERMINAL",0,0,"",,terminal_output +963,2442291,"TERMINAL",0,0,"",,terminal_output +964,2442622,"TERMINAL",0,0,"",,terminal_output +965,2442705,"TERMINAL",0,0,"",,terminal_output +966,2443191,"TERMINAL",0,0,"[1@s",,terminal_output +967,2443273,"TERMINAL",0,0,"[1@b",,terminal_output +968,2443386,"TERMINAL",0,0,"[1@a",,terminal_output +969,2443493,"TERMINAL",0,0,"[1@t",,terminal_output +970,2443612,"TERMINAL",0,0,"[1@c",,terminal_output +971,2443668,"TERMINAL",0,0,"[1@h",,terminal_output +972,2443969,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3546292\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h[tum_cte0515@hkn0401 data]$ ",,terminal_output +973,2444638,"TERMINAL",0,0,"q",,terminal_output +974,2444801,"TERMINAL",0,0,"u",,terminal_output +975,2444982,"TERMINAL",0,0,"eu",,terminal_output +976,2445136,"TERMINAL",0,0,"e",,terminal_output +977,2445316,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0401.localdomain: Mon Oct 6 14:18:04 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3546292 accelerat preproce tum_cte0 PD\t0:00\t 1 (Priority)3543728 accelerat preproce tum_cte0 R 18:40:34\t 1 hkn04253543729 accelerat preproce tum_cte0 R 18:40:34\t 1 hkn06323543730 accelerat preproce tum_cte0 R 18:40:34\t 1 hkn06323546199 dev_accel interact tum_cte0 R37:14\t 1 hkn0401",,terminal_output +978,2446297,"TERMINAL",0,0,"55555",,terminal_output +979,2447314,"TERMINAL",0,0,"66666",,terminal_output +980,2448232,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h[tum_cte0515@hkn0401 data]$ ",,terminal_output +981,2450409,"TERMINAL",0,0,"c",,terminal_output +982,2450919,"TERMINAL",0,0,"d",,terminal_output +983,2451077,"TERMINAL",0,0," ",,terminal_output +984,2451142,"TERMINAL",0,0,".",,terminal_output +985,2451291,"TERMINAL",0,0,".",,terminal_output +986,2451469,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +987,2451624,"TERMINAL",0,0,"s",,terminal_output +988,2451824,"TERMINAL",0,0,"o",,terminal_output +989,2451935,"TERMINAL",0,0,"u",,terminal_output +990,2452015,"TERMINAL",0,0,"r",,terminal_output +991,2452238,"TERMINAL",0,0,"c",,terminal_output +992,2452577,"TERMINAL",0,0,"e",,terminal_output +993,2452658,"TERMINAL",0,0," ",,terminal_output +994,2452836,"TERMINAL",0,0,".",,terminal_output +995,2452993,"TERMINAL",0,0,"",,terminal_output +996,2453299,"TERMINAL",0,0,"b",,terminal_output +997,2453449,"TERMINAL",0,0,"",,terminal_output +998,2453992,"TERMINAL",0,0,"",,terminal_output +999,2454490,"TERMINAL",0,0,"v",,terminal_output +1000,2454569,"TERMINAL",0,0,"env/",,terminal_output +1001,2454923,"TERMINAL",0,0,"",,terminal_output +1002,2455002,"TERMINAL",0,0,"b",,terminal_output +1003,2455495,"TERMINAL",0,0,"",,terminal_output +1004,2455776,"TERMINAL",0,0,"b",,terminal_output +1005,2455855,"TERMINAL",0,0,"in/",,terminal_output +1006,2456175,"TERMINAL",0,0,"a",,terminal_output +1007,2456348,"TERMINAL",0,0,"c",,terminal_output +1008,2456498,"TERMINAL",0,0,"tivate",,terminal_output +1009,2457763,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1010,2458623,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +1011,2458910,"TERMINAL",0,0,"s': source .venv/bin/activate\r",,terminal_output +1012,2458988,"TERMINAL",0,0,"h': sbatch ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh ",,terminal_output +1013,2459567,"TERMINAL",0,0,"\r ': sbatch ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh ",,terminal_output +1014,2460115,"TERMINAL",0,0,"\r[4@h ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_120x160",,terminal_output +1015,2461322,"TERMINAL",0,0,"\rsh ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_120x160.sh \r",,terminal_output +1016,2461759,"TERMINAL",0,0,"60x80.sh\r",,terminal_output +1017,2462176,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_60x80.sh\r",,terminal_output +1018,2463554,"TERMINAL",0,0,"doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1019,2465191,"TERMINAL",0,0,"\rjasmine) [tum_cte0515@hkn0401 jasmine]$ sh slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh\r\n\r\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=05:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_default_single_gpu_60x80\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val\r\n\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_tokenizer.py \\r\n --save_ckpt \\r\n --image_height=60 \\r\n --image_width=80 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=48 \\r\n --init_lr=0 \\r\n --log \\r\n --name=doom-tokenizer-default-60x80-$slurm_job_id \\r\n --tags tokenizer doom default 60x80 \\r\n --entity instant-uv \\r\n --project jafar \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val\r\n",,terminal_output +1020,2465318,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=310924\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759750850\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759754450\r\nSLURM_PMI2_SRUN_PORT=42403\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3546199\r\nSLURM_PTY_PORT=35939\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=31\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=103\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=40421\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3546199\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=40421\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +1021,2465443,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +1022,2472697,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1023,2473442,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +1024,2473575,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251006_141831-nvb9t0a1\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run doom-tokenizer-default-60x80-3546199\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/nvb9t0a1\r\n",,terminal_output +1025,2474950,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['decoder', 'encoder', 'vq']\r\nParameter counts:\r\n{'decoder': 16858736, 'encoder': 16858752, 'vq': 32768, 'total': 33750256}\r\n",,terminal_output +1026,2655352,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3546199.1 task 0: running\r\n",,terminal_output +1027,2655684,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3546199.1\r\nsrun: forcing job termination\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-4:\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\nKeyboardInterrupt\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\nTotal memory size: 13.1 GB, Output size: 0.4 GB, Temp size: 12.6 GB, Argument size: 0.4 GB, Host temp size: 0.0 GB.\r\nFLOPs: 8.200e+12, Bytes: 3.600e+11 (335.2 GB), Intensity: 22.8 FLOPs/byte\r\nStarting training from step 0...\r\n\r\nMemstats: After params initialized:\r\n\tUsing (GB) 0.45 / 35.55 (1.265823%) on cuda:0\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_tokenizer.py"", line 574, in \r\n main(args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_tokenizer.py"", line 488, in main\r\n wandb.log(log_dict)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/sdk/wandb_run.py"", line 399, in wrapper\r\n return func(self, *args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/sdk/wandb_run.py"", line 457, in wrapper_fn\r\n return func(self, *args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/sdk/wandb_run.py"", line 444, in wrapper\r\n return func(self, *args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/sdk/wandb_run.py"", line 2028, in log\r\n self._log(data=data, step=step, commit=commit)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/sdk/wandb_run.py"", line 1739, in _log\r\n self._partial_history_callback(data, step, commit)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/sdk/wandb_run.py"", line 399, in wrapper\r\n return func(self, *args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/sdk/wandb_run.py"", line 1566, in _partial_history_callback\r\n self._backend.interface.publish_partial_history(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/sdk/interface/interface.py"", line 694, in publish_partial_history\r\n item.value_json = json_dumps_safer_history(v)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/util.py"", line 874, in json_dumps_safer_history\r\n return dumps(obj, cls=WandBHistoryJSONEncoder, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/json/__init__.py"", line 238, in dumps\r\n **kw).encode(obj)\r\n ^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/json/encoder.py"", line 200, in encode\r\n chunks = self.iterencode(o, _one_shot=True)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/json/encoder.py"", line 258, in iterencode\r\n return _iterencode(o, 0)\r\n ^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/util.py"", line 833, in default\r\n obj, converted = json_friendly(obj)\r\n ^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/util.py"", line 647, in json_friendly\r\n obj = get_jax_tensor(obj)\r\n ^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/wandb/util.py"", line 462, in get_jax_tensor\r\n return jax.device_get(obj)\r\n ^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/api.py"", line 2931, in device_get\r\n return tree_map(_device_get, x)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/tree_util.py"", line 361, in tree_map\r\n return treedef.unflatten(f(*xs) for xs in zip(*all_leaves))\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/tree_util.py"", line 361, in \r\n return treedef.unflatten(f(*xs) for xs in zip(*all_leaves))\r\n ^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/api.py"", line 2892, in _device_get\r\n return toarray()\r\n ^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/array.py"", line 447, in __array__\r\n return np.asarray(self._value, dtype=dtype, **kwds)\r\n ^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/profiler.py"", line 364, in wrapper\r\n return func(*args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/array.py"", line 647, in _value\r\n npy_value, did_copy = self._single_device_array_to_np_array_did_copy()\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +1028,2655761,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n^Csrun: sending Ctrl-C to StepId=3546199.1\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nsrun: job abort in progress\r\nslurmstepd: error: *** STEP 3546199.1 ON hkn0401 CANCELLED AT 2025-10-06T14:21:34 ***\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +1029,2655942,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3546199.1\r\n",,terminal_output +1030,2656093,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1031,2656919,"TERMINAL",0,0,"g",,terminal_output +1032,2656971,"TERMINAL",0,0,"i",,terminal_output +1033,2657065,"TERMINAL",0,0,"t",,terminal_output +1034,2657118,"TERMINAL",0,0," ",,terminal_output +1035,2657310,"TERMINAL",0,0,"s",,terminal_output +1036,2657462,"TERMINAL",0,0,"t",,terminal_output +1037,2657614,"TERMINAL",0,0,"a",,terminal_output +1038,2657714,"TERMINAL",0,0,"t",,terminal_output +1039,2657814,"TERMINAL",0,0,"u",,terminal_output +1040,2657937,"TERMINAL",0,0,"s",,terminal_output +1041,2658016,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1042,2658431,"TERMINAL",0,0,"On branch vizdoom-dataset\r\nYour branch is up to date with 'origin/vizdoom-dataset'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py\r\n\tmodified: jasmine/train_tokenizer.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\t checklist.md\r\n\tali-old-branch.diff\r\n\tdata/_vizdoom.ini\r\n\tdata/jasmine_data/ViZDoomPPO/_vizdoom.ini\r\n\tdata/jasmine_data/ViZDoomPPO/load_model_generate_dataset_fast.py\r\n\tdata/jasmine_data/ViZDoomPPO/logs/tensorboard/\r\n\tdata/jasmine_data/_vizdoom/\r\n\tdata/uv.lock\r\n\tdataset_duplicates.ipynb\r\n\tdiff.diff\r\n\tdiff2.diff\r\n\tgifs/\r\n\tinput_pipeline/\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\tmessage.md\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/\r\n\tuv.lock\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1043,2662811,"TERMINAL",0,0,"f",,terminal_output +1044,2662864,"TERMINAL",0,0,"i",,terminal_output +1045,2663011,"TERMINAL",0,0,"r",,terminal_output +1046,2663203,"TERMINAL",0,0," ",,terminal_output +1047,2663518,"TERMINAL",0,0,"d",,terminal_output +1048,2663778,"TERMINAL",0,0,"",,terminal_output +1049,2663840,"TERMINAL",0,0,"",,terminal_output +1050,2663988,"TERMINAL",0,0,"",,terminal_output +1051,2664153,"TERMINAL",0,0,"",,terminal_output +1052,2664274,"TERMINAL",0,0,"",,terminal_output +1053,2664396,"TERMINAL",0,0,"",,terminal_output +1054,2664780,"TERMINAL",0,0,"g",,terminal_output +1055,2664833,"TERMINAL",0,0,"i",,terminal_output +1056,2664913,"TERMINAL",0,0,"t",,terminal_output +1057,2665138,"TERMINAL",0,0," ",,terminal_output +1058,2665314,"TERMINAL",0,0,"d",,terminal_output +1059,2665504,"TERMINAL",0,0,"i",,terminal_output +1060,2665658,"TERMINAL",0,0,"f",,terminal_output +1061,2665895,"TERMINAL",0,0,"f",,terminal_output +1062,2665993,"TERMINAL",0,0," ",,terminal_output +1063,2666719,"TERMINAL",0,0,"j",,terminal_output +1064,2666771,"TERMINAL",0,0,"a",,terminal_output +1065,2667509,"TERMINAL",0,0,"s",,terminal_output +1066,2667771,"TERMINAL",0,0,"mine/",,terminal_output +1067,2668659,"TERMINAL",0,0,"t",,terminal_output +1068,2668737,"TERMINAL",0,0,"r",,terminal_output +1069,2668874,"TERMINAL",0,0,"ain_",,terminal_output +1070,2669763,"TERMINAL",0,0,"t",,terminal_output +1071,2669841,"TERMINAL",0,0,"o",,terminal_output +1072,2670019,"TERMINAL",0,0,"kenizer.py",,terminal_output +1073,2670516,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\rdiff --git a/jasmine/train_tokenizer.py b/jasmine/train_tokenizer.py\r\nindex 33206b4..19645c6 100644\r\n--- a/jasmine/train_tokenizer.py\r\n+++ b/jasmine/train_tokenizer.py\r\n@@ -1,6 +1,6 @@\r\n import os\r\n \r\n-os.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\r\n+os.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.90"")\r\n \r\n from dataclasses import dataclass, field\r\n from typing import cast, Optional\r\n\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1074,2672198,"TERMINAL",0,0,"g",,terminal_output +1075,2672295,"TERMINAL",0,0,"it",,terminal_output +1076,2672430,"TERMINAL",0,0," ",,terminal_output +1077,2672511,"TERMINAL",0,0,"c",,terminal_output +1078,2672563,"TERMINAL",0,0,"o",,terminal_output +1079,2672711,"TERMINAL",0,0,"m",,terminal_output +1080,2672911,"TERMINAL",0,0,"m",,terminal_output +1081,2672990,"TERMINAL",0,0,"i",,terminal_output +1082,2673135,"TERMINAL",0,0,"t",,terminal_output +1083,2673249,"TERMINAL",0,0," ",,terminal_output +1084,2673392,"TERMINAL",0,0,"-",,terminal_output +1085,2673480,"TERMINAL",0,0,"a",,terminal_output +1086,2673680,"TERMINAL",0,0,"m ",,terminal_output +1087,2675475,"TERMINAL",0,0,"""",,terminal_output +1088,2681883,"TERMINAL",0,0,"m",,terminal_output +1089,2682178,"TERMINAL",0,0,"o",,terminal_output +1090,2683337,"TERMINAL",0,0,"d",,terminal_output +1091,2683416,"TERMINAL",0,0,"i",,terminal_output +1092,2683928,"TERMINAL",0,0,"f",,terminal_output +1093,2683982,"TERMINAL",0,0,"i",,terminal_output +1094,2684185,"TERMINAL",0,0,"e",,terminal_output +1095,2684343,"TERMINAL",0,0,"d",,terminal_output +1096,2684423,"TERMINAL",0,0," ",,terminal_output +1097,2684670,"TERMINAL",0,0,"t",,terminal_output +1098,2685386,"TERMINAL",0,0,"",,terminal_output +1099,2685649,"TERMINAL",0,0,"g",,terminal_output +1100,2685703,"TERMINAL",0,0,"e",,terminal_output +1101,2685834,"TERMINAL",0,0,"n",,terminal_output +1102,2685932,"TERMINAL",0,0,"e",,terminal_output +1103,2686032,"TERMINAL",0,0,"r",,terminal_output +1104,2686218,"TERMINAL",0,0,"a",,terminal_output +1105,2686366,"TERMINAL",0,0,"t",,terminal_output +1106,2686658,"TERMINAL",0,0,"e",,terminal_output +1107,2686781,"TERMINAL",0,0," ",,terminal_output +1108,2686933,"TERMINAL",0,0,"d",,terminal_output +1109,2687144,"TERMINAL",0,0,"at",,terminal_output +1110,2687287,"TERMINAL",0,0,"a",,terminal_output +1111,2687343,"TERMINAL",0,0,"s",,terminal_output +1112,2687572,"TERMINAL",0,0,"et",,terminal_output +1113,2687653,"TERMINAL",0,0," ",,terminal_output +1114,2687835,"TERMINAL",0,0,"s",,terminal_output +1115,2687989,"TERMINAL",0,0,"c",,terminal_output +1116,2688223,"TERMINAL",0,0,"r",,terminal_output +1117,2688277,"TERMINAL",0,0,"i",,terminal_output +1118,2688479,"TERMINAL",0,0,"p",,terminal_output +1119,2688558,"TERMINAL",0,0,"t",,terminal_output +1120,2688851,"TERMINAL",0,0,"""",,terminal_output +1121,2689241,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1122,2690101,"TERMINAL",0,0,"black....................................................................",,terminal_output +1123,2691307,"TERMINAL",0,0,"Failed\r\n- hook id: black\r\n- files were modified by this hook\r\n\r\nreformatted data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py\r\n\r\nAll done! ✨ 🍰 ✨\r\n1 file reformatted, 1 file left unchanged.\r\n\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1124,2692065,"TERMINAL",0,0,"git commit -am ""modified generate dataset script""",,terminal_output +1125,2692229,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1126,2692515,"TERMINAL",0,0,"black....................................................................",,terminal_output +1127,2692672,"TERMINAL",0,0,"Passed\r\n",,terminal_output +1128,2693059,"TERMINAL",0,0,"[vizdoom-dataset ce1591b] modified generate dataset script\r\n 2 files changed, 54 insertions(+), 36 deletions(-)\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1129,2693429,"TERMINAL",0,0,"g",,terminal_output +1130,2693531,"TERMINAL",0,0,"i",,terminal_output +1131,2693559,"TERMINAL",0,0,"t",,terminal_output +1132,2693616,"TERMINAL",0,0," ",,terminal_output +1133,2693767,"TERMINAL",0,0,"p",,terminal_output +1134,2693974,"TERMINAL",0,0,"u",,terminal_output +1135,2694436,"TERMINAL",0,0,"",,terminal_output +1136,2694601,"TERMINAL",0,0,"",,terminal_output +1137,2694756,"TERMINAL",0,0,"",,terminal_output +1138,2695646,"TERMINAL",0,0," ",,terminal_output +1139,2697313,"TERMINAL",0,0,"p",,terminal_output +1140,2697938,"TERMINAL",0,0,"us",,terminal_output +1141,2698083,"TERMINAL",0,0,"h",,terminal_output +1142,2699516,"TERMINAL",0,0,"",,terminal_output +1143,2700141,"TERMINAL",0,0,"c",,terminal_output +1144,2700954,"TERMINAL",0,0,"",,terminal_output +1145,2701192,"TERMINAL",0,0,"m",,terminal_output +1146,2701336,"TERMINAL",0,0,"e",,terminal_output +1147,2701429,"TERMINAL",0,0,"r",,terminal_output +1148,2701524,"TERMINAL",0,0,"g",,terminal_output +1149,2701610,"TERMINAL",0,0,"e",,terminal_output +1150,2701690,"TERMINAL",0,0," ",,terminal_output +1151,2703369,"TERMINAL",0,0,"a",,terminal_output +1152,2703564,"TERMINAL",0,0,"c",,terminal_output +1153,2703734,"TERMINAL",0,0,"t",,terminal_output +1154,2703840,"TERMINAL",0,0,"i",,terminal_output +1155,2703923,"TERMINAL",0,0,"o",,terminal_output +1156,2704187,"TERMINAL",0,0,"n",,terminal_output +1157,2704334,"TERMINAL",0,0,"-",,terminal_output +1158,2704880,"TERMINAL",0,0,"p",,terminal_output +1159,2705104,"TERMINAL",0,0,"r",,terminal_output +1160,2705260,"TERMINAL",0,0,"e",,terminal_output +1161,2705385,"TERMINAL",0,0,"p",,terminal_output +1162,2705542,"TERMINAL",0,0,"e",,terminal_output +1163,2705697,"TERMINAL",0,0,"n",,terminal_output +1164,2705833,"TERMINAL",0,0,"d",,terminal_output +1165,2705984,"TERMINAL",0,0,"-",,terminal_output +1166,2706246,"TERMINAL",0,0,"m",,terminal_output +1167,2706325,"TERMINAL",0,0,"a",,terminal_output +1168,2706431,"TERMINAL",0,0,"s",,terminal_output +1169,2706485,"TERMINAL",0,0,"k",,terminal_output +1170,2706679,"TERMINAL",0,0,"g",,terminal_output +1171,2706857,"TERMINAL",0,0,"it",,terminal_output +1172,2708814,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1173,2709225,"TERMINAL",0,0,"merge: action-prepend-maskgit - not something we can merge\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1174,2715867,"TERMINAL",0,0,"f",,terminal_output +1175,2715950,"TERMINAL",0,0,"i",,terminal_output +1176,2716101,"TERMINAL",0,0,"r",,terminal_output +1177,2716390,"TERMINAL",0,0,"",,terminal_output +1178,2716550,"TERMINAL",0,0,"",,terminal_output +1179,2716708,"TERMINAL",0,0,"",,terminal_output +1180,2716889,"TERMINAL",0,0,"g",,terminal_output +1181,2716986,"TERMINAL",0,0,"i",,terminal_output +1182,2717070,"TERMINAL",0,0,"t",,terminal_output +1183,2717124,"TERMINAL",0,0," ",,terminal_output +1184,2717360,"TERMINAL",0,0,"b",,terminal_output +1185,2717776,"TERMINAL",0,0,"r",,terminal_output +1186,2717966,"TERMINAL",0,0,"a",,terminal_output +1187,2718021,"TERMINAL",0,0,"n",,terminal_output +1188,2718174,"TERMINAL",0,0,"c",,terminal_output +1189,2718228,"TERMINAL",0,0,"h",,terminal_output +1190,2718470,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n:",,terminal_output +1191,2720207,"TERMINAL",0,0,"\r/",,terminal_output +1192,2720878,"TERMINAL",0,0,"pp",,terminal_output +1193,2721028,"TERMINAL",0,0,"rr",,terminal_output +1194,2721177,"TERMINAL",0,0,"ee",,terminal_output +1195,2721238,"TERMINAL",0,0,"pp",,terminal_output +1196,2721385,"TERMINAL",0,0,"ee",,terminal_output +1197,2721553,"TERMINAL",0,0,"nn",,terminal_output +1198,2721638,"TERMINAL",0,0,"dd",,terminal_output +1199,2721719,"TERMINAL",0,0,"\r ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n...skipping...\r\n prepend-action-maskgit\r\n preprocess_video\r\n refactor-full-frame-val-loss\r\n refactor-tmp\r\n remove-restore-branching\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-ali-branch\r\n sample-from-different-topologies\r\n sampling-script-add-metrics\r\n sampling-startframe-indexing-fix\r\n seeding-data-generation\r\n speedup-tfrecord-preprocessing\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n* vizdoom-dataset\r\n z-loss\r\n zloss-runs\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n(END)",,terminal_output +1200,2723099,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1201,2723669,"TERMINAL",0,0,"git branch",,terminal_output +1202,2723840,"TERMINAL",0,0,"merge action-prepend-maskgit",,terminal_output +1203,2724397,"TERMINAL",0,0,"",,terminal_output +1204,2724668,"TERMINAL",0,0,"",,terminal_output +1205,2725650,"TERMINAL",0,0,"prepend-maskgit",,terminal_output +1206,2726102,"TERMINAL",0,0,"",,terminal_output +1207,2726852,"TERMINAL",0,0,"-maskgit",,terminal_output +1208,2727079,"TERMINAL",0,0,"a-maskgit",,terminal_output +1209,2727158,"TERMINAL",0,0,"c-maskgit",,terminal_output +1210,2727387,"TERMINAL",0,0,"t-maskgit",,terminal_output +1211,2727462,"TERMINAL",0,0,"i-maskgit",,terminal_output +1212,2727543,"TERMINAL",0,0,"o-maskgit",,terminal_output +1213,2727705,"TERMINAL",0,0,"n-maskgit",,terminal_output +1214,2727938,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1215,2728394,"TERMINAL",0,0,"Auto-merging data/pyproject.toml\r\nCONFLICT (content): Merge conflict in data/pyproject.toml\r\nAuto-merging jasmine/genie.py\r\nCONFLICT (content): Merge conflict in jasmine/genie.py\r\nAuto-merging jasmine/models/dynamics.py\r\nCONFLICT (content): Merge conflict in jasmine/models/dynamics.py\r\nAuto-merging jasmine/train_dynamics.py\r\nAuto-merging jasmine/train_tokenizer.py\r\nAutomatic merge failed; fix conflicts and then commit the result.\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1216,2732159,"data/pyproject.toml",0,0,"[project]\nname = ""jasmine-data""\nversion = ""0.1.0""\nauthors = [\n {name = ""Alfred Nguyen"", email = ""alfrednguyen02@gmail.com""},\n {name = ""Mihir Mahajan"", email = ""mihir@pdoom.org""},\n {name = ""Franz Srambical"", email = ""franz@pdoom.org""},\n]\nrequires-python = ""==3.10.*""\ndependencies = [\n ""procgen>=0.10.7"",\n ""gym3>=0.3.3"",\n ""array-record>=0.7.2"",\n ""numpy"",\n ""hf-transfer==0.1.9"",\n ""huggingface-hub[cli]>=0.34.3"",\n ""ffmpeg-python==0.2.0"",\n ""pillow>=11.3.0"",\n ""tqdm>=4.67.1"",\n ""tyro>=0.8.5"",\n<<<<<<< HEAD\n ""vizdoom"",\n=======\n ""gymnasium[atari, accept-rom-license]==0.29.1"",\n ""minatar>=1.0.15"",\n ""torch>=2.8.0"",\n ""tensorboard>=2.20.0"",\n ""opencv-python>=4.6.0.66,<5"",\n ""einops>=0.8.0"",\n>>>>>>> prepend-action-maskgit\n]\n\n[build-system]\nrequires = [""uv_build>=0.8.22,<0.9.0""]\nbuild-backend = ""uv_build""\n\n[tool.uv.build-backend]\nmodule-root = """"",plaintext,tab +1217,2740624,"data/pyproject.toml",499,230," ""vizdoom"",\n ""gymnasium[atari, accept-rom-license]==0.29.1"",\n ""minatar>=1.0.15"",\n ""torch>=2.8.0"",\n ""tensorboard>=2.20.0"",\n ""opencv-python>=4.6.0.66,<5"",\n ""einops>=0.8.0"",\n",plaintext,content +1218,2746655,"data/pyproject.toml",0,0,"",plaintext,tab +1219,2748864,"data/pyproject.toml",0,0,"",plaintext,tab +1220,2753105,"data/pyproject.toml",0,0,"",plaintext,tab +1221,2756260,"jasmine/genie.py",0,0,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n max_noise_level: float,\n noise_buckets: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_actions = num_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.num_actions, self.latent_action_dim, rngs=rngs\n )\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n latent_actions_BTm11L = None\n action_embeddings_BTm11L = None\n if self.use_gt_actions:\n assert self.action_embed is not None\n action_indices_E = None\n action_embeddings_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n action_embeddings_BTm11L = action_embeddings_BT1L[:, :-1]\n else:\n assert self.lam is not None\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=(\n action_embeddings_BTm11L\n if self.use_gt_actions\n else latent_actions_BTm11L\n ),\n )\n outputs[""rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n noise_level: float = 0.0,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n assert (\n noise_level <= self.max_noise_level\n ), ""Noise level must not be greater than max_noise_level.""\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, noise_level, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(\n batch, seq_len, noise_level, temperature, sample_argmax\n )\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n noise_level: float,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n noise_level = jnp.array(noise_level)\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n<<<<<<< HEAD\n\n rng, _rng_noise_augmentation = jax.random.split(rng)\n noise_level_B = jnp.tile(noise_level, B)\n _, noise_level_embed_BS1M = dynamics_maskgit.apply_noise_augmentation(\n vid_embed_BSNM, _rng_noise_augmentation, noise_level_B\n )\n\n vid_embed_BSNp2M = jnp.concatenate(\n [act_embed_BS1M, noise_level_embed_BS1M, vid_embed_BSNM], axis=2\n )\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNp2V = (\n dynamics_maskgit.transformer(vid_embed_BSNp2M) / step_temp\n )\n final_logits_BSNV = final_logits_BSNp2V[:, :, 2:]\n=======\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNp1V = (\n dynamics_maskgit.transformer(vid_embed_BSNp1M) / step_temp\n )\n final_logits_BSNV = final_logits_BSNp1V[:, :, 1:]\n>>>>>>> prepend-action-maskgit\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n noise_level: float,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n noise_level = jnp.array(noise_level)\n\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n\n rng, _rng_noise_augmentation = jax.random.split(rng)\n noise_level_B = jnp.tile(noise_level, B)\n _, noise_level_embed_BS1M = dynamics_causal.apply_noise_augmentation(\n vid_embed_BSNM, _rng_noise_augmentation, noise_level_B\n )\n\n vid_embed_BSNp2M = jnp.concatenate(\n [act_embed_BS1M, noise_level_embed_BS1M, vid_embed_BSNM], axis=2\n )\n final_logits_BTNp2V = (\n dynamics_causal.transformer(vid_embed_BSNp2M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp2V[:, step_t, step_n + 1, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n assert self.lam is not None\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.ModelAndOptimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.ModelAndOptimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.ModelAndOptimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.ModelAndOptimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +1222,2758182,"jasmine/genie.py",0,0,"",python,tab +1223,2799653,"jasmine/genie.py",13045,45," vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n",python,content +1224,2805550,"jasmine/genie.py",13267,89," final_logits_BSNp1V = (\n dynamics_maskgit.transformer(vid_embed_BSNp1M) / step_temp\n )\n final_logits_BSNV = final_logits_BSNp1V[:, :, 1:]\n",python,content +1225,2807994,"jasmine/genie.py",0,0,"",python,tab +1226,2809687,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1227,2812547,"jasmine/models/dynamics.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer, Transformer\n\n\nclass DynamicsMaskGIT(nnx.Module):\n """"""\n MaskGIT dynamics model\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n V: vocabulary size (number of latents)\n """"""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n max_noise_level: float,\n noise_buckets: int,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.noise_level_embed = nnx.Embed(\n self.noise_buckets, self.model_dim, rngs=rngs\n )\n\n def apply_noise_augmentation(self, vid_embed_BTNM, rng, noise_level_B=None):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n if noise_level_B is None:\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl,\n shape=(B,),\n minval=0.0,\n maxval=self.max_noise_level,\n dtype=self.dtype,\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n\n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n B = vid_embed_BTNM.shape[0]\n rng, _rng_prob, *_rngs_mask = jax.random.split(batch[""rng""], B + 2)\n mask_prob = jax.random.uniform(_rng_prob, shape=(B,), minval=self.mask_limit)\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n\n # --- Apply noise augmentation ---\n vid_embed_BTNM, noise_level_embed_BT1M = self.apply_noise_augmentation(\n vid_embed_BTNM, rng\n )\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n<<<<<<< HEAD\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 2:]\n=======\n vid_embed_BTNp1M = jnp.concatenate(\n [padded_act_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp1V = self.transformer(vid_embed_BTNp1M)\n logits_BTNV = logits_BTNp1V[:, :, 1:]\n>>>>>>> prepend-action-maskgit\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n max_noise_level: float,\n noise_buckets: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.noise_level_embed = nnx.Embed(\n self.noise_buckets, self.model_dim, rngs=rngs\n )\n\n def apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl,\n shape=(B,),\n minval=0.0,\n maxval=self.max_noise_level,\n dtype=self.dtype,\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n\n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNM, noise_level_embed_BT1M = self.apply_noise_augmentation(\n vid_embed_BTNM, batch[""rng""]\n )\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 1:-1]\n return logits_BTNV, jnp.ones_like(video_tokens_BTN)\n",python,tab +1228,2814082,"jasmine/models/dynamics.py",0,0,"",python,tab +1229,2824621,"jasmine/models/dynamics.py",4906,106," vid_embed_BTNp1M = jnp.concatenate(\n [padded_act_embed_BT1M, vid_embed_BTNM], axis=2\n",python,content +1230,2825650,"jasmine/models/dynamics.py",5020,103," logits_BTNp1V = self.transformer(vid_embed_BTNp1M)\n logits_BTNV = logits_BTNp1V[:, :, 1:]\n",python,content +1231,2827282,"jasmine/models/dynamics.py",0,0,"",python,tab +1232,2833565,"TERMINAL",0,0,"g",,terminal_output +1233,2833653,"TERMINAL",0,0,"i",,terminal_output +1234,2833705,"TERMINAL",0,0,"t",,terminal_output +1235,2833756,"TERMINAL",0,0," ",,terminal_output +1236,2833958,"TERMINAL",0,0,"s",,terminal_output +1237,2834044,"TERMINAL",0,0,"t",,terminal_output +1238,2834122,"TERMINAL",0,0,"a",,terminal_output +1239,2834288,"TERMINAL",0,0,"t",,terminal_output +1240,2834410,"TERMINAL",0,0,"u",,terminal_output +1241,2834626,"TERMINAL",0,0,"s",,terminal_output +1242,2835026,"TERMINAL",0,0,"\r\n[?2004l\rOn branch vizdoom-dataset\r\nYour branch is ahead of 'origin/vizdoom-dataset' by 1 commit.\r\n (use ""git push"" to publish your local commits)\r\n\r\nAll conflicts fixed but you are still merging.\r\n (use ""git commit"" to conclude merge)\r\n\r\nChanges to be committed:\r\n\tnew file: data/jasmine_data/atari/atari_utils.py\r\n\tnew file: data/jasmine_data/atari/generate_atari_dataset.py\r\n\tnew file: data/jasmine_data/atari/visualize_array_record.py\r\n\tnew file: data/jasmine_data/minatar/generate_minatar_breakout_dataset.py\r\n\tmodified: data/pyproject.toml\r\n\tmodified: jasmine/genie.py\r\n\tmodified: jasmine/models/dynamics.py\r\n\tmodified: jasmine/train_dynamics.py\r\n\tmodified: jasmine/train_lam.py\r\n\tmodified: jasmine/train_tokenizer.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\t checklist.md\r\n\tali-old-branch.diff\r\n\tdata/_vizdoom.ini\r\n\tdata/jasmine_data/ViZDoomPPO/_vizdoom.ini\r\n\tdata/jasmine_data/ViZDoomPPO/load_model_generate_dataset_fast.py\r\n\tdata/jasmine_data/ViZDoomPPO/logs/tensorboard/\r\n\tdata/jasmine_data/_vizdoom/\r\n\tdata/uv.lock\r\n\tdataset_duplicates.ipynb\r\n\tdiff.diff\r\n\tdiff2.diff\r\n\tgifs/\r\n\tinput_pipeline/\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\tmessage.md\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/\r\n\tuv.lock\r\n\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1243,2841052,"jasmine/genie.py",0,0,"",python,tab +1244,2864496,"jasmine/genie.py",12889,0,"",python,selection_mouse +1245,2864655,"jasmine/genie.py",12880,15,"act_embed_BSm1M",python,selection_mouse +1246,2867838,"jasmine/genie.py",13105,0,"",python,selection_mouse +1247,2867972,"jasmine/genie.py",13093,14,"act_embed_BS1M",python,selection_mouse +1248,2868495,"jasmine/genie.py",13110,0,"",python,selection_mouse +1249,2868641,"jasmine/genie.py",13109,14,"vid_embed_BSNM",python,selection_mouse +1250,2874587,"TERMINAL",0,0,"git status",,terminal_output +1251,2874776,"TERMINAL",0,0,"merge prepend-action-maskgit",,terminal_output +1252,2874936,"TERMINAL",0,0,"branch",,terminal_output +1253,2875285,"TERMINAL",0,0,"merge action-prepend-maskgit",,terminal_output +1254,2875827,"TERMINAL",0,0,"commit -am ""modified generate dataset script""",,terminal_output +1255,2877020,"TERMINAL",0,0,"diff jasmine/train_tokenizer.py",,terminal_output +1256,2877480,"TERMINAL",0,0,"status",,terminal_output +1257,2877944,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1258,2879226,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=05:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_default_single_gpu_60x80\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val\r\n\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_tokenizer.py \\r\n --save_ckpt \\r\n --image_height=60 \\r\n --image_width=80 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=48 \\r\n --init_lr=0 \\r\n --log \\r\n --name=doom-tokenizer-default-60x80-$slurm_job_id \\r\n --tags tokenizer doom default 60x80 \\r\n --entity instant-uv \\r\n --project jafar \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val\r\n",,terminal_output +1259,2879357,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=310924\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759750850\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759754450\r\nSLURM_PMI2_SRUN_PORT=42403\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3546199\r\nSLURM_PTY_PORT=35939\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=31\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=103\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=40421\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3546199\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=40421\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +1260,2879483,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +1261,2889864,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1262,2890653,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251006_142528-kfiygybc\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run doom-tokenizer-default-60x80-3546199\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/kfiygybc\r\n",,terminal_output +1263,2892378,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['decoder', 'encoder', 'vq']\r\nParameter counts:\r\n{'decoder': 17228096, 'encoder': 17228832, 'vq': 32768, 'total': 34489696}\r\n",,terminal_output +1264,3083400,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=1] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=1] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1265,3210124,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=2] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=2] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1266,3332662,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=3] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=3] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1267,3453544,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=4] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=4] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1268,3454971,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/interactive/3546199/004000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/interactive/3546199/004000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/interactive/3546199/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/interactive/3546199/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/interactive/3546199/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/interactive/3546199/002000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/interactive/3546199/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/interactive/3546199/003000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1269,3468427,"jasmine/train_tokenizer.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.90"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 30_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 1000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +1270,3474462,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3546199.2 task 0: running\r\n",,terminal_output +1271,3474817,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3546199.2\r\nsrun: forcing job termination\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-3:\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\nKeyboardInterrupt\r\nKeyboardInterrupt\r\nKeyboardInterrupt\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\nKeyboardInterrupt\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\n File ""/usr/lib64/python3.12/multiprocessing/queues.py"", line 89, in put\r\n if not self._sem.acquire(block, timeout):\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nKeyboardInterrupt\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 314, in _bootstrap\r\n self.run()\r\n File ""/usr/lib64/python3.12/multiprocessing/process.py"", line 108, in run\r\n self._target(*self._args, **self._kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/grain_pool.py"", line 261, in _worker_loop\r\n if not multiprocessing_common.add_element_to_queue( # pytype: disable=wrong-arg-types\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/grain/_src/python/multiprocessing_common.py"", line 54, in add_element_to_queue\r\n elements_queue.put(element, timeout=_QUEUE_WAIT_TIMEOUT_SECONDS)\r\nslurmstepd: error: *** STEP 3546199.2 ON hkn0401 CANCELLED AT 2025-10-06T14:35:13 ***\r\n",,terminal_output +1272,3474975,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3546199.2\r\nsrun: job abort in progress\r\n",,terminal_output +1273,3475150,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1274,3475344,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1275,3476494,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1276,3477307,"TERMINAL",0,0,"",,terminal_output +1277,3477791,"TERMINAL",0,0,"",,terminal_output +1278,3477820,"TERMINAL",0,0,"\r",,terminal_output +1279,3477977,"TERMINAL",0,0,"",,terminal_output +1280,3478032,"TERMINAL",0,0,"",,terminal_output +1281,3478082,"TERMINAL",0,0,"",,terminal_output +1282,3478920,"TERMINAL",0,0,"h slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1283,3479044,"TERMINAL",0,0," slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1284,3479541,"TERMINAL",0,0,"s slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1285,3479720,"TERMINAL",0,0,"b slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1286,3479771,"TERMINAL",0,0,"a slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1287,3479923,"TERMINAL",0,0,"t slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1288,3480002,"TERMINAL",0,0,"c slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1289,3480271,"TERMINAL",0,0,"h slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",,terminal_output +1290,3481709,"TERMINAL",0,0,"\r\n\r\r\n[?2004l\rSubmitted batch job 3546364\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1291,3485228,"TERMINAL",0,0,"bash",,terminal_focus +1292,3487648,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0401 jasmine]$ \r(jasmine) [tum_cte0515@hkn0401 jasmine]$ \r(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1293,3487793,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1294,3508555,"slurm/jobs/mihir/horeka/doom/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1295,3510073,"slurm/jobs/mihir/horeka/doom/train_tokenizer_default_1node.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default_single_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=240 \\n --image_width=320 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=32 \\n --init_lr=0 \\n --log \\n --name=doom-tokenizer-default-1node-$slurm_job_id \\n --tags tokenizer doom default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val",shellscript,tab +1296,3516691,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default_single_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=240 \\n --image_width=320 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=32 \\n --init_lr=0 \\n --log \\n --name=doom-tokenizer-default-1node-$slurm_job_id \\n --tags tokenizer doom default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val",shellscript,tab +1297,3521233,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1298,3523443,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",84,0,"",shellscript,selection_mouse +1299,3524385,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",83,1,"",shellscript,content +1300,3524533,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",82,1,"",shellscript,content +1301,3525867,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",82,0,"4",shellscript,content +1302,3525868,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",83,0,"",shellscript,selection_keyboard +1303,3525940,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",83,0,"8",shellscript,content +1304,3525941,"slurm/jobs/mihir/horeka/doom/resolution80x60/train_tokenizer_default_1gpu.sh",84,0,"",shellscript,selection_keyboard +1305,3530513,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1306,3531934,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",84,0,"",shellscript,selection_mouse +1307,3533077,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",83,1,"",shellscript,content +1308,3533195,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",82,1,"",shellscript,content +1309,3533471,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",82,0,"4",shellscript,content +1310,3533472,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",83,0,"",shellscript,selection_keyboard +1311,3533618,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",83,0,"8",shellscript,content +1312,3533619,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",84,0,"",shellscript,selection_keyboard +1313,3533923,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",83,0,"",shellscript,selection_command +1314,3534913,"slurm/jobs/mihir/horeka/doom/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1315,3536263,"TERMINAL",0,0,"srun",,terminal_focus +1316,3540211,"TERMINAL",0,0,"",,terminal_output +1317,3540428,"TERMINAL",0,0,"c",,terminal_output +1318,3540527,"TERMINAL",0,0,"a",,terminal_output +1319,3540653,"TERMINAL",0,0,"n",,terminal_output +1320,3540704,"TERMINAL",0,0,"c",,terminal_output +1321,3540851,"TERMINAL",0,0,"e",,terminal_output +1322,3540907,"TERMINAL",0,0,"l",,terminal_output +1323,3541054,"TERMINAL",0,0," ",,terminal_output +1324,3541354,"TERMINAL",0,0,"3546364",,terminal_output +1325,3541617,"TERMINAL",0,0,"3546364\r\n[?2004l\rbash: cancel: command not found...\r\n",,terminal_output +1326,3542368,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1327,3543257,"TERMINAL",0,0,"cancel 3546364",,terminal_output +1328,3543461,"TERMINAL",0,0,"",,terminal_output +1329,3543669,"TERMINAL",0,0,"",,terminal_output +1330,3544013,"TERMINAL",0,0,"[1@s",,terminal_output +1331,3544152,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1332,3554748,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1333,3557931,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1097,0,"",shellscript,selection_mouse +1334,3559103,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1096,1,"",shellscript,content +1335,3559237,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1095,1,"",shellscript,content +1336,3559580,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1094,1,"",shellscript,content +1337,3561043,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1094,0,"1",shellscript,content +1338,3561044,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1095,0,"",shellscript,selection_keyboard +1339,3562511,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1095,0,"2",shellscript,content +1340,3562513,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1096,0,"",shellscript,selection_keyboard +1341,3562542,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1096,0,"0",shellscript,content +1342,3562542,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1097,0,"",shellscript,selection_keyboard +1343,3562957,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1074,0,"",shellscript,selection_command +1344,3563385,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1092,0,"",shellscript,selection_command +1345,3563566,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1117,0,"",shellscript,selection_command +1346,3564508,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1120,1,"",shellscript,content +1347,3564660,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1119,1,"",shellscript,content +1348,3565039,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1118,1,"",shellscript,content +1349,3565601,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1118,0,"1",shellscript,content +1350,3565602,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1119,0,"",shellscript,selection_keyboard +1351,3566328,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1119,0,"6",shellscript,content +1352,3566328,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1120,0,"",shellscript,selection_keyboard +1353,3566385,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1120,0,"0",shellscript,content +1354,3566385,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1121,0,"",shellscript,selection_keyboard +1355,3567919,"TERMINAL",0,0,"bash",,terminal_focus +1356,3577703,"TERMINAL",0,0,"cd doom_episodes_10m_120x160_fixed/",,terminal_command +1357,3578785,"TERMINAL",0,0,"pwd",,terminal_command +1358,3581958,"TERMINAL",0,0,"ls",,terminal_command +1359,3585281,"TERMINAL",0,0,"ls train/",,terminal_command +1360,3585339,"TERMINAL",0,0,"]633;C",,terminal_output +1361,3585945,"TERMINAL",0,0,"data_0000.array_record data_0175.array_record data_0350.array_record data_0525.array_record\r\ndata_0001.array_record data_0176.array_record data_0351.array_record data_0526.array_record\r\ndata_0002.array_record data_0177.array_record data_0352.array_record data_0527.array_record\r\ndata_0003.array_record data_0178.array_record data_0353.array_record data_0528.array_record\r\ndata_0004.array_record data_0179.array_record data_0354.array_record data_0529.array_record\r\ndata_0005.array_record data_0180.array_record data_0355.array_record data_0530.array_record\r\ndata_0006.array_record data_0181.array_record data_0356.array_record data_0531.array_record\r\ndata_0007.array_record data_0182.array_record data_0357.array_record data_0532.array_record\r\ndata_0008.array_record data_0183.array_record data_0358.array_record data_0533.array_record\r\ndata_0009.array_record data_0184.array_record data_0359.array_record data_0534.array_record\r\ndata_0010.array_record data_0185.array_record data_0360.array_record data_0535.array_record\r\ndata_0011.array_record data_0186.array_record data_0361.array_record data_0536.array_record\r\ndata_0012.array_record data_0187.array_record data_0362.array_record data_0537.array_record\r\ndata_0013.array_record data_0188.array_record data_0363.array_record data_0538.array_record\r\ndata_0014.array_record data_0189.array_record data_0364.array_record data_0539.array_record\r\ndata_0015.array_record data_0190.array_record data_0365.array_record data_0540.array_record\r\ndata_0016.array_record data_0191.array_record data_0366.array_record data_0541.array_record\r\ndata_0017.array_record data_0192.array_record data_0367.array_record data_0542.array_record\r\ndata_0018.array_record data_0193.array_record data_0368.array_record data_0543.array_record\r\ndata_0019.array_record data_0194.array_record data_0369.array_record data_0544.array_record\r\ndata_0020.array_record data_0195.array_record data_0370.array_record data_0545.array_record\r\ndata_0021.array_record data_0196.array_record data_0371.array_record data_0546.array_record\r\ndata_0022.array_record data_0197.array_record data_0372.array_record data_0547.array_record\r\ndata_0023.array_record data_0198.array_record data_0373.array_record data_0548.array_record\r\ndata_0024.array_record data_0199.array_record data_0374.array_record data_0549.array_record\r\ndata_0025.array_record data_0200.array_record data_0375.array_record data_0550.array_record\r\ndata_0026.array_record data_0201.array_record data_0376.array_record data_0551.array_record\r\ndata_0027.array_record data_0202.array_record data_0377.array_record data_0552.array_record\r\ndata_0028.array_record data_0203.array_record data_0378.array_record data_0553.array_record\r\ndata_0029.array_record data_0204.array_record data_0379.array_record data_0554.array_record\r\ndata_0030.array_record data_0205.array_record data_0380.array_record data_0555.array_record\r\ndata_0031.array_record data_0206.array_record data_0381.array_record data_0556.array_record\r\ndata_0032.array_record data_0207.array_record data_0382.array_record data_0557.array_record\r\ndata_0033.array_record data_0208.array_record data_0383.array_record data_0558.array_record\r\ndata_0034.array_record data_0209.array_record data_0384.array_record data_0559.array_record\r\ndata_0035.array_record data_0210.array_record data_0385.array_record data_0560.array_record\r\ndata_0036.array_record data_0211.array_record data_0386.array_record data_0561.array_record\r\ndata_0037.array_record data_0212.array_record data_0387.array_record data_0562.array_record\r\ndata_0038.array_record data_0213.array_record data_0388.array_record data_0563.array_record\r\ndata_0039.array_record data_0214.array_record data_0389.array_record data_0564.array_record\r\ndata_0040.array_record data_0215.array_record data_0390.array_record data_0565.array_record\r\ndata_0041.array_record data_0216.array_record data_0391.array_record data_0566.array_record\r\ndata_0042.array_record data_0217.array_record data_0392.array_record data_0567.array_record\r\ndata_0043.array_record data_0218.array_record data_0393.array_record data_0568.array_record\r\ndata_0044.array_record data_0219.array_record data_0394.array_record data_0569.array_record\r\ndata_0045.array_record data_0220.array_record data_0395.array_record data_0570.array_record\r\ndata_0046.array_record data_0221.array_record data_0396.array_record data_0571.array_record\r\ndata_0047.array_record data_0222.array_record data_0397.array_record data_0572.array_record\r\ndata_0048.array_record data_0223.array_record data_0398.array_record data_0573.array_record\r\ndata_0049.array_record data_0224.array_record data_0399.array_record data_0574.array_record\r\ndata_0050.array_record data_0225.array_record data_0400.array_record data_0575.array_record\r\ndata_0051.array_record data_0226.array_record data_0401.array_record data_0576.array_record\r\ndata_0052.array_record data_0227.array_record data_0402.array_record data_0577.array_record\r\ndata_0053.array_record data_0228.array_record data_0403.array_record data_0578.array_record\r\ndata_0054.array_record data_0229.array_record data_0404.array_record data_0579.array_record\r\ndata_0055.array_record data_0230.array_record data_0405.array_record data_0580.array_record\r\ndata_0056.array_record data_0231.array_record data_0406.array_record data_0581.array_record\r\ndata_0057.array_record data_0232.array_record data_0407.array_record data_0582.array_record\r\ndata_0058.array_record data_0233.array_record data_0408.array_record data_0583.array_record\r\ndata_0059.array_record data_0234.array_record data_0409.array_record data_0584.array_record\r\ndata_0060.array_record data_0235.array_record data_0410.array_record data_0585.array_record\r\ndata_0061.array_record data_0236.array_record data_0411.array_record data_0586.array_record\r\ndata_0062.array_record data_0237.array_record data_0412.array_record data_0587.array_record\r\ndata_0063.array_record data_0238.array_record data_0413.array_record data_0588.array_record\r\ndata_0064.array_record data_0239.array_record data_0414.array_record data_0589.array_record\r\ndata_0065.array_record data_0240.array_record data_0415.array_record data_0590.array_record\r\ndata_0066.array_record data_0241.array_record data_0416.array_record data_0591.array_record\r\ndata_0067.array_record data_0242.array_record data_0417.array_record data_0592.array_record\r\ndata_0068.array_record data_0243.array_record data_0418.array_record data_0593.array_record\r\ndata_0069.array_record data_0244.array_record data_0419.array_record data_0594.array_record\r\ndata_0070.array_record data_0245.array_record data_0420.array_record data_0595.array_record\r\ndata_0071.array_record data_0246.array_record data_0421.array_record data_0596.array_record\r\ndata_0072.array_record data_0247.array_record data_0422.array_record data_0597.array_record\r\ndata_0073.array_record data_0248.array_record data_0423.array_record data_0598.array_record\r\ndata_0074.array_record data_0249.array_record data_0424.array_record data_0599.array_record\r\ndata_0075.array_record data_0250.array_record data_0425.array_record data_0600.array_record\r\ndata_0076.array_record data_0251.array_record data_0426.array_record data_0601.array_record\r\ndata_0077.array_record data_0252.array_record data_0427.array_record data_0602.array_record\r\ndata_0078.array_record data_0253.array_record data_0428.array_record data_0603.array_record\r\ndata_0079.array_record data_0254.array_record data_0429.array_record data_0604.array_record\r\ndata_0080.array_record data_0255.array_record data_0430.array_record data_0605.array_record\r\ndata_0081.array_record data_0256.array_record data_0431.array_record data_0606.array_record\r\ndata_0082.array_record data_0257.array_record data_0432.array_record data_0607.array_record\r\ndata_0083.array_record data_0258.array_record data_0433.array_record data_0608.array_record\r\ndata_0084.array_record data_0259.array_record data_0434.array_record data_0609.array_record\r\ndata_0085.array_record data_0260.array_record data_0435.array_record data_0610.array_record\r\ndata_0086.array_record data_0261.array_record data_0436.array_record data_0611.array_record\r\ndata_0087.array_record data_0262.array_record data_0437.array_record data_0612.array_record\r\ndata_0088.array_record data_0263.array_record data_0438.array_record data_0613.array_record\r\ndata_0089.array_record data_0264.array_record data_0439.array_record data_0614.array_record\r\ndata_0090.array_record data_0265.array_record data_0440.array_record data_0615.array_record\r\ndata_0091.array_record data_0266.array_record data_0441.array_record data_0616.array_record\r\ndata_0092.array_record data_0267.array_record data_0442.array_record data_0617.array_record\r\ndata_0093.array_record data_0268.array_record data_0443.array_record data_0618.array_record\r\ndata_0094.array_record data_0269.array_record data_0444.array_record data_0619.array_record\r\ndata_0095.array_record data_0270.array_record data_0445.array_record data_0620.array_record\r\ndata_0096.array_record data_0271.array_record data_0446.array_record data_0621.array_record\r\ndata_0097.array_record data_0272.array_record data_0447.array_record data_0622.array_record\r\ndata_0098.array_record data_0273.array_record data_0448.array_record data_0623.array_record\r\ndata_0099.array_record data_0274.array_record data_0449.array_record data_0624.array_record\r\ndata_0100.array_record data_0275.array_record data_0450.array_record data_0625.array_record\r\ndata_0101.array_record data_0276.array_record data_0451.array_record data_0626.array_record\r\ndata_0102.array_record data_0277.array_record data_0452.array_record data_0627.array_record\r\ndata_0103.array_record data_0278.array_record data_0453.array_record data_0628.array_record\r\ndata_0104.array_record data_0279.array_record data_0454.array_record data_0629.array_record\r\ndata_0105.array_record data_0280.array_record data_0455.array_record data_0630.array_record\r\ndata_0106.array_record data_0281.array_record data_0456.array_record data_0631.array_record\r\ndata_0107.array_record data_0282.array_record data_0457.array_record data_0632.array_record\r\ndata_0108.array_record data_0283.array_record data_0458.array_record data_0633.array_record\r\ndata_0109.array_record data_0284.array_record data_0459.array_record data_0634.array_record\r\ndata_0110.array_record data_0285.array_record data_0460.array_record data_0635.array_record\r\ndata_0111.array_record data_0286.array_record data_0461.array_record data_0636.array_record\r\ndata_0112.array_record data_0287.array_record data_0462.array_record data_0637.array_record\r\ndata_0113.array_record data_0288.array_record data_0463.array_record data_0638.array_record\r\ndata_0114.array_record data_0289.array_record data_0464.array_record data_0639.array_record\r\ndata_0115.array_record data_0290.array_record data_0465.array_record data_0640.array_record\r\ndata_0116.array_record data_0291.array_record data_0466.array_record data_0641.array_record\r\ndata_0117.array_record data_0292.array_record data_0467.array_record data_0642.array_record\r\ndata_0118.array_record data_0293.array_record data_0468.array_record data_0643.array_record\r\ndata_0119.array_record data_0294.array_record data_0469.array_record data_0644.array_record\r\ndata_0120.array_record data_0295.array_record data_0470.array_record data_0645.array_record\r\ndata_0121.array_record data_0296.array_record data_0471.array_record data_0646.array_record\r\ndata_0122.array_record data_0297.array_record data_0472.array_record data_0647.array_record\r\ndata_0123.array_record data_0298.array_record data_0473.array_record data_0648.array_record\r\ndata_0124.array_record data_0299.array_record data_0474.array_record data_0649.array_record\r\ndata_0125.array_record data_0300.array_record data_0475.array_record data_0650.array_record\r\ndata_0126.array_record data_0301.array_record data_0476.array_record data_0651.array_record\r\ndata_0127.array_record data_0302.array_record data_0477.array_record data_0652.array_record\r\ndata_0128.array_record data_0303.array_record data_0478.array_record data_0653.array_record\r\ndata_0129.array_record data_0304.array_record data_0479.array_record data_0654.array_record\r\ndata_0130.array_record data_0305.array_record data_0480.array_record data_0655.array_record\r\ndata_0131.array_record data_0306.array_record data_0481.array_record data_0656.array_record\r\ndata_0132.array_record data_0307.array_record data_0482.array_record data_0657.array_record\r\ndata_0133.array_record data_0308.array_record data_0483.array_record data_0658.array_record\r\ndata_0134.array_record data_0309.array_record data_0484.array_record data_0659.array_record\r\ndata_0135.array_record data_0310.array_record data_0485.array_record data_0660.array_record\r\ndata_0136.array_record data_0311.array_record data_0486.array_record data_0661.array_record\r\ndata_0137.array_record data_0312.array_record data_0487.array_record data_0662.array_record\r\ndata_0138.array_record data_0313.array_record data_0488.array_record data_0663.array_record\r\ndata_0139.array_record data_0314.array_record data_0489.array_record data_0664.array_record\r\ndata_0140.array_record data_0315.array_record data_0490.array_record data_0665.array_record\r\ndata_0141.array_record data_0316.array_record data_0491.array_record data_0666.array_record\r\ndata_0142.array_record data_0317.array_record data_0492.array_record data_0667.array_record\r\ndata_0143.array_record data_0318.array_record data_0493.array_record data_0668.array_record\r\ndata_0144.array_record data_0319.array_record data_0494.array_record data_0669.array_record\r\ndata_0145.array_record data_0320.array_record data_0495.array_record data_0670.array_record\r\ndata_0146.array_record data_0321.array_record data_0496.array_record data_0671.array_record\r\ndata_0147.array_record data_0322.array_record data_0497.array_record data_0672.array_record\r\ndata_0148.array_record data_0323.array_record data_0498.array_record data_0673.array_record\r\ndata_0149.array_record data_0324.array_record data_0499.array_record data_0674.array_record\r\ndata_0150.array_record data_0325.array_record data_0500.array_record data_0675.array_record\r\ndata_0151.array_record data_0326.array_record data_0501.array_record data_0676.array_record\r\ndata_0152.array_record data_0327.array_record data_0502.array_record data_0677.array_record\r\ndata_0153.array_record data_0328.array_record data_0503.array_record data_0678.array_record\r\ndata_0154.array_record data_0329.array_record data_0504.array_record data_0679.array_record\r\ndata_0155.array_record data_0330.array_record data_0505.array_record data_0680.array_record\r\ndata_0156.array_record data_0331.array_record data_0506.array_record data_0681.array_record\r\ndata_0157.array_record data_0332.array_record data_0507.array_record data_0682.array_record\r\ndata_0158.array_record data_0333.array_record data_0508.array_record data_0683.array_record\r\ndata_0159.array_record data_0334.array_record data_0509.array_record data_0684.array_record\r\ndata_0160.array_record data_0335.array_record data_0510.array_record data_0685.array_record\r\ndata_0161.array_record data_0336.array_record data_0511.array_record data_0686.array_record\r\ndata_0162.array_record data_0337.array_record data_0512.array_record data_0687.array_record\r\ndata_0163.array_record data_0338.array_record data_0513.array_record data_0688.array_record\r\ndata_0164.array_record data_0339.array_record data_0514.array_record data_0689.array_record\r\ndata_0165.array_record data_0340.array_record data_0515.array_record data_0690.array_record\r\ndata_0166.array_record data_0341.array_record data_0516.array_record data_0691.array_record\r\ndata_0167.array_record data_0342.array_record data_0517.array_record data_0692.array_record\r\ndata_0168.array_record data_0343.array_record data_0518.array_record data_0693.array_record\r\ndata_0169.array_record data_0344.array_record data_0519.array_record data_0694.array_record\r\ndata_0170.array_record data_0345.array_record data_0520.array_record data_0695.array_record\r\ndata_0171.array_record data_0346.array_record data_0521.array_record data_0696.array_record\r\ndata_0172.array_record data_0347.array_record data_0522.array_record data_0697.array_record\r\ndata_0173.array_record data_0348.array_record data_0523.array_record data_0698.array_record\r\ndata_0174.array_record data_0349.array_record data_0524.array_record data_0699.array_record\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed",,terminal_output +1362,3588722,"TERMINAL",0,0,"ls val/",,terminal_command +1363,3591911,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,0,"",shellscript,selection_mouse +1364,3591988,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,1,"/",shellscript,selection_mouse +1365,3592011,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,6,"/hkfs/",shellscript,selection_mouse +1366,3592020,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,10,"/hkfs/work",shellscript,selection_mouse +1367,3592041,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",571,25,"\narray_records_dir_train=",shellscript,selection_mouse +1368,3592070,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,21,"/hkfs/work/workspace/",shellscript,selection_mouse +1369,3592100,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,24,"/hkfs/work/workspace/scr",shellscript,selection_mouse +1370,3592130,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,27,"/hkfs/work/workspace/scratc",shellscript,selection_mouse +1371,3592130,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,31,"/hkfs/work/workspace/scratch/tu",shellscript,selection_mouse +1372,3592162,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,33,"/hkfs/work/workspace/scratch/tum_",shellscript,selection_mouse +1373,3592162,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,36,"/hkfs/work/workspace/scratch/tum_ind",shellscript,selection_mouse +1374,3592192,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,40,"/hkfs/work/workspace/scratch/tum_ind3695",shellscript,selection_mouse +1375,3592193,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,157,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_",shellscript,selection_mouse +1376,3592219,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,162,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_sh",shellscript,selection_mouse +1377,3592247,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,167,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +1378,3592248,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,170,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/dat",shellscript,selection_mouse +1379,3592274,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,173,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_d",shellscript,selection_mouse +1380,3592275,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,198,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/val\n",shellscript,selection_mouse +1381,3592476,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,197,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/val",shellscript,selection_mouse +1382,3592561,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,88,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/train",shellscript,selection_mouse +1383,3592915,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,87,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/trai",shellscript,selection_mouse +1384,3592927,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,86,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/tra",shellscript,selection_mouse +1385,3592941,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,85,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/tr",shellscript,selection_mouse +1386,3593084,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,84,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/t",shellscript,selection_mouse +1387,3593261,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,83,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m/",shellscript,selection_mouse +1388,3593384,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,82,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1389,3594114,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,82,"",shellscript,content +1390,3594312,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed",shellscript,content +1391,3596381,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",804,0,"",shellscript,selection_mouse +1392,3596506,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",803,1,"m",shellscript,selection_mouse +1393,3596525,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",801,3,"_1m",shellscript,selection_mouse +1394,3596539,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",799,5,"es_1m",shellscript,selection_mouse +1395,3596571,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",798,6,"des_1m",shellscript,selection_mouse +1396,3596599,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",804,5,"/val\n",shellscript,selection_mouse +1397,3596724,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",739,65,"ace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1398,3596725,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",737,67,"space/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1399,3596736,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",733,71,"workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1400,3596756,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",731,73,"k/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1401,3596786,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",729,75,"ork/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1402,3596787,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",728,76,"work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1403,3596827,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",597,207,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1404,3596854,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",596,208,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1405,3596855,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",595,209,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1406,3596931,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",594,210,"n=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1407,3596943,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",593,211,"in=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1408,3597152,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",594,210,"n=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1409,3597513,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",722,82,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m",shellscript,selection_mouse +1410,3598254,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",722,82,"",shellscript,content +1411,3598429,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",722,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed",shellscript,content +1412,3602779,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",450,0,"",shellscript,selection_mouse +1413,3606112,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default_single_gpu_60x80\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=60 \\n --image_width=80 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --init_lr=0 \\n --log \\n --name=doom-tokenizer-default-60x80-$slurm_job_id \\n --tags tokenizer doom default 60x80 \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val\n",shellscript,tab +1414,3612645,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1415,3613668,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,0,"",shellscript,selection_mouse +1416,3614044,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",451,0,"",shellscript,selection_mouse +1417,3615705,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1418,3616204,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1419,3617181,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",339,0,"",shellscript,selection_mouse +1420,3618084,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1421,3634053,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1422,3634977,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,0,"",shellscript,selection_mouse +1423,3635608,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",451,0,"",shellscript,selection_mouse +1424,3638208,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1425,3640262,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1426,3641980,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",451,0,"_",shellscript,content +1427,3641981,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,0,"",shellscript,selection_keyboard +1428,3642874,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,0,"1",shellscript,content +1429,3642874,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",453,0,"",shellscript,selection_keyboard +1430,3644406,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",453,0,"6",shellscript,content +1431,3644408,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",454,0,"",shellscript,selection_keyboard +1432,3644483,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",454,0,"0",shellscript,content +1433,3644484,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",455,0,"",shellscript,selection_keyboard +1434,3644972,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",454,1,"",shellscript,content +1435,3645144,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",453,1,"",shellscript,content +1436,3645284,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",453,0,"2",shellscript,content +1437,3645285,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",454,0,"",shellscript,selection_keyboard +1438,3645369,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",454,0,"0",shellscript,content +1439,3645369,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",455,0,"",shellscript,selection_keyboard +1440,3646722,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",455,0,"x",shellscript,content +1441,3646723,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",456,0,"",shellscript,selection_keyboard +1442,3647064,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",456,0,"1",shellscript,content +1443,3647065,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",457,0,"",shellscript,selection_keyboard +1444,3647469,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",457,0,"6",shellscript,content +1445,3647470,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",458,0,"",shellscript,selection_keyboard +1446,3647533,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",458,0,"0",shellscript,content +1447,3647534,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",459,0,"",shellscript,selection_keyboard +1448,3648343,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",522,0,"",shellscript,selection_mouse +1449,3653429,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1213,0,"",shellscript,selection_mouse +1450,3654193,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1214,0,"",shellscript,selection_command +1451,3654398,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1213,1,"",shellscript,content +1452,3654513,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1212,1,"",shellscript,content +1453,3655739,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1212,0,"4",shellscript,content +1454,3655740,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1213,0,"",shellscript,selection_keyboard +1455,3655917,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1213,0,"8",shellscript,content +1456,3655918,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1214,0,"",shellscript,selection_keyboard +1457,3664416,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1458,3669262,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",224,1,"6",shellscript,selection_command +1459,3669402,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",451,2,"60",shellscript,selection_command +1460,3669783,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",451,3,"60x",shellscript,selection_command +1461,3671248,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",451,4,"60x8",shellscript,selection_command +1462,3671300,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",451,5,"60x80",shellscript,selection_command +1463,3673872,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",685,5,"60x80",shellscript,selection_command +1464,3674565,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",809,5,"60x80",shellscript,selection_command +1465,3674868,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",1272,5,"60x80",shellscript,selection_command +1466,3675606,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",1328,5,"60x80",shellscript,selection_command +1467,3677247,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1468,3679580,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",37,1,"1",shellscript,selection_command +1469,3679709,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,2,"12",shellscript,selection_command +1470,3679830,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,3,"120",shellscript,selection_command +1471,3680242,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,4,"120x",shellscript,selection_command +1472,3681032,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,5,"120x1",shellscript,selection_command +1473,3682500,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,6,"120x16",shellscript,selection_command +1474,3682556,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,7,"120x160",shellscript,selection_command +1475,3684386,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",688,7,"120x160",shellscript,selection_command +1476,3684789,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",814,7,"120x160",shellscript,selection_command +1477,3685423,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",452,7,"120x160",shellscript,selection_command +1478,3688187,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1234,0,"",shellscript,selection_mouse +1479,3688190,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1233,0,"",shellscript,selection_command +1480,3688517,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1212,0,"",shellscript,selection_mouse +1481,3689517,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,0,"",shellscript,selection_mouse +1482,3690152,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1273,0,"",shellscript,selection_mouse +1483,3690640,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1278,0,"",shellscript,selection_mouse +1484,3690979,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1279,0,"",shellscript,selection_mouse +1485,3692218,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1486,3694399,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1487,3695740,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1335,0,"",shellscript,selection_mouse +1488,3696181,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1281,0,"",shellscript,selection_mouse +1489,3696697,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1285,0,"",shellscript,selection_mouse +1490,3697082,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1286,0,"",shellscript,selection_mouse +1491,3701436,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1287,0,"",shellscript,selection_command +1492,3701595,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1286,0,"",shellscript,selection_command +1493,3701703,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1287,0,"",shellscript,selection_command +1494,3701822,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1286,0,"",shellscript,selection_command +1495,3701921,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1287,0,"",shellscript,selection_command +1496,3702033,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1286,0,"",shellscript,selection_command +1497,3702123,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1287,0,"",shellscript,selection_command +1498,3702222,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1286,0,"",shellscript,selection_command +1499,3702332,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1287,0,"",shellscript,selection_command +1500,3702442,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1286,0,"",shellscript,selection_command +1501,3702634,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1285,0,"",shellscript,selection_command +1502,3702827,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1286,0,"",shellscript,selection_command +1503,3703071,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1287,0,"",shellscript,selection_command +1504,3703409,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1286,0,"",shellscript,selection_command +1505,3703565,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1287,0,"",shellscript,selection_command +1506,3703672,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1286,0,"",shellscript,selection_command +1507,3704162,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1287,0,"",shellscript,selection_command +1508,3707280,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1337,0,"",shellscript,selection_command +1509,3707646,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1336,0,"",shellscript,selection_command +1510,3707887,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1337,0,"",shellscript,selection_command +1511,3710407,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1337,0,"1",shellscript,content +1512,3710409,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1338,0,"",shellscript,selection_keyboard +1513,3710412,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1338,0,"2",shellscript,content +1514,3710413,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1339,0,"",shellscript,selection_keyboard +1515,3710561,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1339,0,"0",shellscript,content +1516,3710562,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1340,0,"",shellscript,selection_keyboard +1517,3710803,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1340,0,"x",shellscript,content +1518,3710804,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1341,0,"",shellscript,selection_keyboard +1519,3711137,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1341,0,"1",shellscript,content +1520,3711137,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1342,0,"",shellscript,selection_keyboard +1521,3711371,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1342,0,"6",shellscript,content +1522,3711372,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1343,0,"",shellscript,selection_keyboard +1523,3711516,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1343,0,"0",shellscript,content +1524,3711516,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1344,0,"",shellscript,selection_keyboard +1525,3712400,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1344,0," ",shellscript,content +1526,3712401,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1345,0,"",shellscript,selection_keyboard +1527,3713515,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1287,0,"",shellscript,selection_mouse +1528,3714498,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1287,0,"1",shellscript,content +1529,3714499,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1288,0,"",shellscript,selection_keyboard +1530,3714611,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1288,0,"2",shellscript,content +1531,3714612,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1289,0,"",shellscript,selection_keyboard +1532,3714821,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1289,0,"0",shellscript,content +1533,3714822,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1290,0,"",shellscript,selection_keyboard +1534,3715374,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1290,0,"x",shellscript,content +1535,3715374,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1291,0,"",shellscript,selection_keyboard +1536,3715710,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1291,0,"1",shellscript,content +1537,3715711,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1292,0,"",shellscript,selection_keyboard +1538,3716017,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1292,0,"6",shellscript,content +1539,3716018,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1293,0,"",shellscript,selection_keyboard +1540,3716148,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1293,0,"0",shellscript,content +1541,3716149,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1294,0,"",shellscript,selection_keyboard +1542,3717892,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1294,0,"-",shellscript,content +1543,3717894,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1295,0,"",shellscript,selection_keyboard +1544,3719489,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,0,"",shellscript,selection_mouse +1545,3719641,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,1,"-",shellscript,selection_mouse +1546,3719667,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,2,"-1",shellscript,selection_mouse +1547,3719746,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,3,"-1n",shellscript,selection_mouse +1548,3719773,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,4,"-1no",shellscript,selection_mouse +1549,3719847,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,5,"-1nod",shellscript,selection_mouse +1550,3719921,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,6,"-1node",shellscript,selection_mouse +1551,3720026,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,7,"-1node-",shellscript,selection_mouse +1552,3720916,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,7,"",shellscript,content +1553,3721733,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1280,0,"-",shellscript,content +1554,3721734,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1281,0,"",shellscript,selection_keyboard +1555,3728005,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1246,0,"",shellscript,selection_mouse +1556,3729449,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1245,0,"",shellscript,selection_command +1557,3730189,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1217,30,"",shellscript,content +1558,3730206,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1221,0,"",shellscript,selection_command +1559,3732126,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1560,3733898,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",1220,0,"",shellscript,selection_mouse +1561,3734947,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",1186,40,"",shellscript,content +1562,3734956,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",1190,0,"",shellscript,selection_command +1563,3736723,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",1192,0,"batch_size=48 \\n --init_lr=0 \\n --",shellscript,content +1564,3736741,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",1220,0,"",shellscript,selection_command +1565,3737418,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",1208,30,"",shellscript,content +1566,3737429,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",1212,0,"",shellscript,selection_command +1567,3745387,"jasmine/train_tokenizer.py",0,0,"",python,tab +1568,3755612,"TERMINAL",0,0,"srun",,terminal_focus +1569,3756942,"TERMINAL",0,0,"q",,terminal_output +1570,3757062,"TERMINAL",0,0,"u",,terminal_output +1571,3757151,"TERMINAL",0,0,"e",,terminal_output +1572,3757230,"TERMINAL",0,0,"u",,terminal_output +1573,3757283,"TERMINAL",0,0,"e",,terminal_output +1574,3757434,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1575,3757563,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn0401.localdomain: Mon Oct 6 14:39:56 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3546292 accelerat preproce tum_cte0 PD\t0:00\t 1 (Priority)3543728 accelerat preproce tum_cte0 R 19:02:26\t 1 hkn04253543729 accelerat preproce tum_cte0 R 19:02:26\t 1 hkn06323543730 accelerat preproce tum_cte0 R 19:02:26\t 1 hkn06323546199 dev_accel interact tum_cte0 R59:06\t 1 hkn0401",,terminal_output +1576,3758556,"TERMINAL",0,0,"77777",,terminal_output +1577,3759675,"TERMINAL",0,0,"88888",,terminal_output +1578,3760671,"TERMINAL",0,0,"99999",,terminal_output +1579,3761692,"TERMINAL",0,0,"40:0030303010",,terminal_output +1580,3762652,"TERMINAL",0,0,"11111",,terminal_output +1581,3763666,"TERMINAL",0,0,"22222",,terminal_output +1582,3764110,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1583,3768071,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1584,3768616,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1585,3778018,"TERMINAL",0,0,"s",,terminal_output +1586,3778105,"TERMINAL",0,0,"b",,terminal_output +1587,3778224,"TERMINAL",0,0,"a",,terminal_output +1588,3778322,"TERMINAL",0,0,"t",,terminal_output +1589,3778447,"TERMINAL",0,0,"c",,terminal_output +1590,3778502,"TERMINAL",0,0,"h",,terminal_output +1591,3778623,"TERMINAL",0,0," ",,terminal_output +1592,3778932,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",,terminal_output +1593,3783068,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh\r\n[?2004l\rSubmitted batch job 3546529\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1594,3784441,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1595,3788060,"TERMINAL",0,0,"s",,terminal_output +1596,3788114,"TERMINAL",0,0,"b",,terminal_output +1597,3788236,"TERMINAL",0,0,"a",,terminal_output +1598,3788345,"TERMINAL",0,0,"t",,terminal_output +1599,3788472,"TERMINAL",0,0,"c",,terminal_output +1600,3788540,"TERMINAL",0,0,"h",,terminal_output +1601,3788663,"TERMINAL",0,0," ",,terminal_output +1602,3788951,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",,terminal_output +1603,3789635,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh\r\n[?2004l\rSubmitted batch job 3546530\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1604,3791030,"slurm/jobs/mihir/horeka/doom/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1605,3798067,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1606,3800817,"slurm/jobs/mihir/horeka/doom/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1607,3803090,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +1608,3804144,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1609,3807506,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",575,0,"",shellscript,selection_mouse +1610,3807533,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",574,0,"",shellscript,selection_command +1611,3809683,"TERMINAL",0,0,"i",,terminal_output +1612,3809801,"TERMINAL",0,0,"d",,terminal_output +1613,3809880,"TERMINAL",0,0,"l",,terminal_output +1614,3810129,"TERMINAL",0,0,"i",,terminal_output +1615,3810283,"TERMINAL",0,0,"ng",,terminal_output +1616,3810490,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn0401.localdomain: Mon Oct 6 14:40:49 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 2 nodes idle",,terminal_output +1617,3811486,"TERMINAL",0,0,"50",,terminal_output +1618,3812504,"TERMINAL",0,0,"10",,terminal_output +1619,3813522,"TERMINAL",0,0,"2",,terminal_output +1620,3814597,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1621,3814761,"TERMINAL",0,0,"q",,terminal_output +1622,3814884,"TERMINAL",0,0,"u",,terminal_output +1623,3814942,"TERMINAL",0,0,"e",,terminal_output +1624,3815023,"TERMINAL",0,0,"u",,terminal_output +1625,3815104,"TERMINAL",0,0,"e",,terminal_output +1626,3815264,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0401.localdomain: Mon Oct 6 14:40:54 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3546292 accelerat preproce tum_cte0 PD\t0:00\t 1 (Priority)3546530 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3546529 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3543728 accelerat preproce tum_cte0 R 19:03:24\t 1 hkn04253543729 accelerat preproce tum_cte0 R 19:03:24\t 1 hkn06323543730 accelerat preproce tum_cte0 R 19:03:24\t 1 hkn06323546199 dev_accel interact tum_cte0 R 1:00:04\t 1 hkn0401",,terminal_output +1627,3816266,"TERMINAL",0,0,"55555",,terminal_output +1628,3817301,"TERMINAL",0,0,"66666",,terminal_output +1629,3818302,"TERMINAL",0,0,"77777",,terminal_output +1630,3819350,"TERMINAL",0,0,"88888",,terminal_output +1631,3820370,"TERMINAL",0,0,"99999",,terminal_output +1632,3821094,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +1633,3824427,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\nsalloc: Relinquishing job allocation 3546199\r\nsalloc: Job 3546199 has exceeded its time limit and its allocation has been revoked.\r\n",,terminal_output +1634,3824455,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1635,3831914,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +1636,3831974,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3546536\r\nsalloc: job 3546536 queued and waiting for resources\r\n",,terminal_output +1637,3833408,"TERMINAL",0,0,"^Csalloc: Job allocation 3546536 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1638,3834722,"TERMINAL",0,0,"queue",,terminal_command +1639,3834783,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Oct 6 14:41:13 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3546292 accelerat preproce tum_cte0 PD\t0:00\t 1 (Priority)3546530 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3546529 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3543728 accelerat preproce tum_cte0 R 19:03:43\t 1 hkn04253543729 accelerat preproce tum_cte0 R 19:03:43\t 1 hkn06323543730 accelerat preproce tum_cte0 R 19:03:43\t 1 hkn06323546199 dev_accel interact tum_cte0 CG 1:00:13\t 1 hkn0401",,terminal_output +1640,3835828,"TERMINAL",0,0,"\r4444",,terminal_output +1641,3836520,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1642,3838253,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +1643,3838308,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3546537\r\nsalloc: job 3546537 queued and waiting for resources\r\n",,terminal_output +1644,3840010,"TERMINAL",0,0,"^Csalloc: Job allocation 3546537 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1645,3841469,"TERMINAL",0,0,"idling",,terminal_command +1646,3841528,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Oct 6 14:41:20 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 2 nodes idle",,terminal_output +1647,3842593,"TERMINAL",0,0,"1",,terminal_output +1648,3843604,"TERMINAL",0,0,"2",,terminal_output +1649,3844742,"TERMINAL",0,0,"3",,terminal_output +1650,3845767,"TERMINAL",0,0,"4",,terminal_output +1651,3846719,"TERMINAL",0,0,"51",,terminal_output +1652,3847757,"TERMINAL",0,0,"6",,terminal_output +1653,3848151,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1654,3850394,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +1655,3850477,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3546538\r\nsalloc: job 3546538 queued and waiting for resources\r\n",,terminal_output +1656,3853623,"TERMINAL",0,0,"bash",,terminal_focus +1657,3854741,"TERMINAL",0,0,"queue",,terminal_command +1658,3854824,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Oct 6 14:41:33 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3546292 accelerat preproce tum_cte0 PD\t0:00\t 1 (Priority)3546530 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3546529 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3543728 accelerat preproce tum_cte0 R 19:04:03\t 1 hkn04253543729 accelerat preproce tum_cte0 R 19:04:03\t 1 hkn06323543730 accelerat preproce tum_cte0 R 19:04:03\t 1 hkn06323546538 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +1659,3855906,"TERMINAL",0,0,"4444",,terminal_output +1660,3856910,"TERMINAL",0,0,"5555",,terminal_output +1661,3857947,"TERMINAL",0,0,"6777",,terminal_output +1662,3857972,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed",,terminal_output +1663,3861511,"TERMINAL",0,0,"idling",,terminal_command +1664,3861570,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Oct 6 14:41:40 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 2 nodes idle",,terminal_output +1665,3862687,"TERMINAL",0,0,"1",,terminal_output +1666,3863676,"TERMINAL",0,0,"2",,terminal_output +1667,3864725,"TERMINAL",0,0,"3",,terminal_output +1668,3865540,"TERMINAL",0,0,"salloc",,terminal_focus +1669,3865720,"TERMINAL",0,0,"4",,terminal_output +1670,3866263,"TERMINAL",0,0,"^Csalloc: Job allocation 3546538 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1671,3866749,"TERMINAL",0,0,"5",,terminal_output +1672,3867788,"TERMINAL",0,0,"6",,terminal_output +1673,3868876,"TERMINAL",0,0,"7",,terminal_output +1674,3869861,"TERMINAL",0,0,"8",,terminal_output +1675,3870939,"TERMINAL",0,0,"935",,terminal_output +1676,3873359,"TERMINAL",0,0,"500",,terminal_output +1677,3874357,"TERMINAL",0,0,"3",,terminal_output +1678,3874591,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated-h200 --nodes=1--gres=gpu:1 --cpus-per-task=8",,terminal_command +1679,3874642,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3546540\r\nsalloc: job 3546540 queued and waiting for resources\r\n",,terminal_output +1680,3875391,"TERMINAL",0,0,"4",,terminal_output +1681,3876304,"TERMINAL",0,0,"^Csalloc: Job allocation 3546540 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1682,3876465,"TERMINAL",0,0,"5",,terminal_output +1683,3877513,"TERMINAL",0,0,"6",,terminal_output +1684,3877747,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +1685,3877835,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3546541\r\nsalloc: job 3546541 queued and waiting for resources\r\n",,terminal_output +1686,3878508,"TERMINAL",0,0,"7",,terminal_output +1687,3879560,"TERMINAL",0,0,"8",,terminal_output +1688,3880687,"TERMINAL",0,0,"9",,terminal_output +1689,3881710,"TERMINAL",0,0,"2:00",,terminal_output +1690,3882692,"TERMINAL",0,0,"1",,terminal_output +1691,3883696,"TERMINAL",0,0,"2",,terminal_output +1692,3884780,"TERMINAL",0,0,"3",,terminal_output +1693,3885777,"TERMINAL",0,0,"4",,terminal_output +1694,3886820,"TERMINAL",0,0,"5",,terminal_output +1695,3887866,"TERMINAL",0,0,"6",,terminal_output +1696,3888898,"TERMINAL",0,0,"7",,terminal_output +1697,3889941,"TERMINAL",0,0,"8",,terminal_output +1698,3890976,"TERMINAL",0,0,"10",,terminal_output +1699,3892018,"TERMINAL",0,0,"1",,terminal_output +1700,3893058,"TERMINAL",0,0,"2",,terminal_output +1701,3894101,"TERMINAL",0,0,"3",,terminal_output +1702,3895134,"TERMINAL",0,0,"4",,terminal_output +1703,3895288,"TERMINAL",0,0,"^Csalloc: Job allocation 3546541 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1704,3896170,"TERMINAL",0,0,"5",,terminal_output +1705,3897273,"TERMINAL",0,0,"62",,terminal_output +1706,3898247,"TERMINAL",0,0,"7",,terminal_output +1707,3899319,"TERMINAL",0,0,"8",,terminal_output +1708,3900327,"TERMINAL",0,0,"9",,terminal_output +1709,3901397,"TERMINAL",0,0,"20",,terminal_output +1710,3902495,"TERMINAL",0,0,"1",,terminal_output +1711,3903444,"TERMINAL",0,0,"2",,terminal_output +1712,3904625,"TERMINAL",0,0,"3",,terminal_output +1713,3905572,"TERMINAL",0,0,"4",,terminal_output +1714,3906555,"TERMINAL",0,0,"5",,terminal_output +1715,3907620,"TERMINAL",0,0,"6",,terminal_output +1716,3908633,"TERMINAL",0,0,"71",,terminal_output +1717,3909764,"TERMINAL",0,0,"8",,terminal_output +1718,3910710,"TERMINAL",0,0,"9",,terminal_output +1719,3911748,"TERMINAL",0,0,"30",,terminal_output +1720,3912789,"TERMINAL",0,0,"1",,terminal_output +1721,3913825,"TERMINAL",0,0,"2",,terminal_output +1722,3914865,"TERMINAL",0,0,"3",,terminal_output +1723,3915909,"TERMINAL",0,0,"4",,terminal_output +1724,3916998,"TERMINAL",0,0,"5",,terminal_output +1725,3918061,"TERMINAL",0,0,"7",,terminal_output +1726,3919059,"TERMINAL",0,0,"8",,terminal_output +1727,3920127,"TERMINAL",0,0,"9",,terminal_output +1728,3921152,"TERMINAL",0,0,"40",,terminal_output +1729,3922152,"TERMINAL",0,0,"1",,terminal_output +1730,3923165,"TERMINAL",0,0,"2",,terminal_output +1731,3924203,"TERMINAL",0,0,"3",,terminal_output +1732,3925244,"TERMINAL",0,0,"4",,terminal_output +1733,3926305,"TERMINAL",0,0,"5",,terminal_output +1734,3927381,"TERMINAL",0,0,"6",,terminal_output +1735,3928353,"TERMINAL",0,0,"7",,terminal_output +1736,3929386,"TERMINAL",0,0,"8",,terminal_output +1737,3930425,"TERMINAL",0,0,"9",,terminal_output +1738,3931455,"TERMINAL",0,0,"50",,terminal_output +1739,3932599,"TERMINAL",0,0,"10",,terminal_output +1740,3933527,"TERMINAL",0,0,"2",,terminal_output +1741,3934671,"TERMINAL",0,0,"3",,terminal_output +1742,3935697,"TERMINAL",0,0,"4",,terminal_output +1743,3936723,"TERMINAL",0,0,"5",,terminal_output +1744,3937702,"TERMINAL",0,0,"6",,terminal_output +1745,3938711,"TERMINAL",0,0,"7",,terminal_output +1746,3939750,"TERMINAL",0,0,"8",,terminal_output +1747,3940795,"TERMINAL",0,0,"9",,terminal_output +1748,3941828,"TERMINAL",0,0,"3:00",,terminal_output +1749,3942869,"TERMINAL",0,0,"1",,terminal_output +1750,3943905,"TERMINAL",0,0,"2",,terminal_output +1751,3944991,"TERMINAL",0,0,"3",,terminal_output +1752,3945975,"TERMINAL",0,0,"5",,terminal_output +1753,3947016,"TERMINAL",0,0,"6",,terminal_output +1754,3948062,"TERMINAL",0,0,"7",,terminal_output +1755,3949090,"TERMINAL",0,0,"8",,terminal_output +1756,3950122,"TERMINAL",0,0,"9",,terminal_output +1757,3951340,"TERMINAL",0,0,"10",,terminal_output +1758,3952196,"TERMINAL",0,0,"1",,terminal_output +1759,3953246,"TERMINAL",0,0,"2",,terminal_output +1760,3954279,"TERMINAL",0,0,"3",,terminal_output +1761,3955312,"TERMINAL",0,0,"4",,terminal_output +1762,3956356,"TERMINAL",0,0,"5",,terminal_output +1763,3956683,"TERMINAL",0,0,"queue",,terminal_command +1764,3956738,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Oct 6 14:43:15 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3546292 accelerat preproce tum_cte0 PD\t0:00\t 1 (Priority)3546529 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3546530 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3543728 accelerat preproce tum_cte0 R 19:05:45\t 1 hkn04253543729 accelerat preproce tum_cte0 R 19:05:45\t 1 hkn06323543730 accelerat preproce tum_cte0 R 19:05:45\t 1 hkn0632",,terminal_output +1765,3957393,"TERMINAL",0,0,"6",,terminal_output +1766,3957777,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1767,3958444,"TERMINAL",0,0,"7",,terminal_output +1768,3959207,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +1769,3959260,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3546542\r\nsalloc: job 3546542 queued and waiting for resources\r\n",,terminal_output +1770,3959469,"TERMINAL",0,0,"8",,terminal_output +1771,3960437,"TERMINAL",0,0,"^Csalloc: Job allocation 3546542 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1772,3960525,"TERMINAL",0,0,"9",,terminal_output +1773,3961584,"TERMINAL",0,0,"20",,terminal_output +1774,3962323,"TERMINAL",0,0,"queue",,terminal_command +1775,3962401,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Oct 6 14:43:21 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3546292 accelerat preproce tum_cte0 PD\t0:00\t 1 (Priority)3546529 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3546530 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3543728 accelerat preproce tum_cte0 R 19:05:51\t 1 hkn04253543729 accelerat preproce tum_cte0 R 19:05:51\t 1 hkn06323543730 accelerat preproce tum_cte0 R 19:05:51\t 1 hkn0632",,terminal_output +1776,3962578,"TERMINAL",0,0,"1",,terminal_output +1777,3963446,"TERMINAL",0,0,"2222",,terminal_output +1778,3963616,"TERMINAL",0,0,"2",,terminal_output +1779,3964550,"TERMINAL",0,0,"3333",,terminal_output +1780,3964676,"TERMINAL",0,0,"3",,terminal_output +1781,3965579,"TERMINAL",0,0,"4444",,terminal_output +1782,3965727,"TERMINAL",0,0,"4",,terminal_output +1783,3966581,"TERMINAL",0,0,"5555",,terminal_output +1784,3966723,"TERMINAL",0,0,"571",,terminal_output +1785,3967741,"TERMINAL",0,0,"6666",,terminal_output +1786,3967770,"TERMINAL",0,0,"61",,terminal_output +1787,3968668,"TERMINAL",0,0,"7777",,terminal_output +1788,3968810,"TERMINAL",0,0,"7",,terminal_output +1789,3969773,"TERMINAL",0,0,"8888",,terminal_output +1790,3969843,"TERMINAL",0,0,"8",,terminal_output +1791,3970752,"TERMINAL",0,0,"9999",,terminal_output +1792,3970878,"TERMINAL",0,0,"9",,terminal_output +1793,3971820,"TERMINAL",0,0,"306:006:006:00",,terminal_output +1794,3971941,"TERMINAL",0,0,"30",,terminal_output +1795,3972845,"TERMINAL",0,0,"1111",,terminal_output +1796,3972968,"TERMINAL",0,0,"1",,terminal_output +1797,3973911,"TERMINAL",0,0,"2222",,terminal_output +1798,3973985,"TERMINAL",0,0,"3",,terminal_output +1799,3974975,"TERMINAL",0,0,"3444",,terminal_output +1800,3975054,"TERMINAL",0,0,"4",,terminal_output +1801,3975984,"TERMINAL",0,0,"5555",,terminal_output +1802,3976056,"TERMINAL",0,0,"5",,terminal_output +1803,3977024,"TERMINAL",0,0,"6666",,terminal_output +1804,3977099,"TERMINAL",0,0,"6",,terminal_output +1805,3978171,"TERMINAL",0,0,"7777",,terminal_output +1806,3978172,"TERMINAL",0,0,"7",,terminal_output +1807,3979195,"TERMINAL",0,0,"8888",,terminal_output +1808,3979196,"TERMINAL",0,0,"8",,terminal_output +1809,3980158,"TERMINAL",0,0,"9999",,terminal_output +1810,3980233,"TERMINAL",0,0,"9",,terminal_output +1811,3981210,"TERMINAL",0,0,"40101010",,terminal_output +1812,3981236,"TERMINAL",0,0,"40",,terminal_output +1813,3981984,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",0,0,"",shellscript,tab +1814,3982255,"TERMINAL",0,0,"1111",,terminal_output +1815,3982273,"TERMINAL",0,0,"1",,terminal_output +1816,3983316,"TERMINAL",0,0,"2222",,terminal_output +1817,3983344,"TERMINAL",0,0,"2",,terminal_output +1818,3984061,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",84,0,"",shellscript,selection_mouse +1819,3984346,"TERMINAL",0,0,"3333",,terminal_output +1820,3984370,"TERMINAL",0,0,"3",,terminal_output +1821,3985166,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",83,1,"",shellscript,content +1822,3985381,"TERMINAL",0,0,"4444",,terminal_output +1823,3985444,"TERMINAL",0,0,"4",,terminal_output +1824,3985490,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",82,1,"",shellscript,content +1825,3985700,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",82,0,"0",shellscript,content +1826,3985700,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",83,0,"",shellscript,selection_keyboard +1827,3986438,"TERMINAL",0,0,"5",,terminal_output +1828,3986438,"TERMINAL",0,0,"5555",,terminal_output +1829,3986676,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",83,0,"8",shellscript,content +1830,3986677,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",84,0,"",shellscript,selection_keyboard +1831,3987472,"TERMINAL",0,0,"6",,terminal_output +1832,3987497,"TERMINAL",0,0,"6666",,terminal_output +1833,3988499,"TERMINAL",0,0,"7",,terminal_output +1834,3988524,"TERMINAL",0,0,"7777",,terminal_output +1835,3989537,"TERMINAL",0,0,"8",,terminal_output +1836,3989592,"TERMINAL",0,0,"8888",,terminal_output +1837,3990661,"TERMINAL",0,0,"9",,terminal_output +1838,3990662,"TERMINAL",0,0,"9999",,terminal_output +1839,3991603,"TERMINAL",0,0,"50",,terminal_output +1840,3991697,"TERMINAL",0,0,"50202020",,terminal_output +1841,3992655,"TERMINAL",0,0,"1",,terminal_output +1842,3992737,"TERMINAL",0,0,"1111",,terminal_output +1843,3993381,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1844,3993673,"TERMINAL",0,0,"210",,terminal_output +1845,3995877,"TERMINAL",0,0,"3 6",,terminal_output +1846,3996917,"TERMINAL",0,0,"5",,terminal_output +1847,3997951,"TERMINAL",0,0,"6",,terminal_output +1848,3998848,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu.sh",,terminal_command +1849,3998849,"TERMINAL",0,0,"]633;CSubmitted batch job 3546543\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1850,3999009,"TERMINAL",0,0,"8",,terminal_output +1851,4000025,"TERMINAL",0,0,"9",,terminal_output +1852,4000758,"TERMINAL",0,0,"queue",,terminal_command +1853,4000811,"TERMINAL",0,0,"]633;C",,terminal_output +1854,4000920,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Oct 6 14:43:59 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3546292 accelerat preproce tum_cte0 PD\t0:00\t 1 (Priority)3546529 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3546530 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3546543 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3543728 accelerat preproce tum_cte0 R 19:06:29\t 1 hkn04253543729 accelerat preproce tum_cte0 R 19:06:29\t 1 hkn06323543730 accelerat preproce tum_cte0 R 19:06:29\t 1 hkn0632",,terminal_output +1855,4001058,"TERMINAL",0,0,"4:00",,terminal_output +1856,4001900,"TERMINAL",0,0,"4:00303030",,terminal_output +1857,4002099,"TERMINAL",0,0,"1",,terminal_output +1858,4003057,"TERMINAL",0,0,"1222",,terminal_output +1859,4003136,"TERMINAL",0,0,"2",,terminal_output +1860,4004080,"TERMINAL",0,0,"3333",,terminal_output +1861,4004203,"TERMINAL",0,0,"3",,terminal_output +1862,4004845,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1863,4005202,"TERMINAL",0,0,"4",,terminal_output +1864,4005758,"TERMINAL",0,0,"fqueue",,terminal_command +1865,4005809,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.... hkn1991.localdomain: Mon Oct 6 14:44:04 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3546292\tacceleratedpreprocess_doom_10m tum_cte0 PENDING\t 0:00 1-00:00:00\r 1 (Priority)3546529\taccelerated train_tokenizer_default_single tum_cte0 PENDING\t 0:00 2-00:00:00\r 1 (Priority)3546530\taccelerated train_tokenizer_default_single tum_cte0 PENDING\t 0:00 2-00:00:00\r 1 (Priority)3546543\taccelerated train_tokenizer_default_single tum_cte0 PENDING\t 0:00 8:00:00\r1 (Priority)3543728\taccelerated preprocess_doom_50m_120x160 tum_cte0 RUNNING 19:06:34 1-18:00:00\r 1 hkn04253543729\taccelerated\t preprocess_doom_50m_60x80 tum_cte0 RUNNING 19:06:34 1-18:00:00\r 1 hkn06323543730\tacceleratedpreprocess_doom_50m tum_cte0 RUNNING 19:06:34 1-18:00:00\r 1 hkn0632",,terminal_output +1866,4006336,"TERMINAL",0,0,"5",,terminal_output +1867,4006856,"TERMINAL",0,0,"5555",,terminal_output +1868,4007336,"TERMINAL",0,0,"6",,terminal_output +1869,4007929,"TERMINAL",0,0,"6666",,terminal_output +1870,4008324,"TERMINAL",0,0,"7",,terminal_output +1871,4008992,"TERMINAL",0,0,"7777",,terminal_output +1872,4009402,"TERMINAL",0,0,"8",,terminal_output +1873,4009942,"TERMINAL",0,0,"8999",,terminal_output +1874,4010424,"TERMINAL",0,0,"9",,terminal_output +1875,4010975,"TERMINAL",0,0,"10404040",,terminal_output +1876,4011449,"TERMINAL",0,0,"10",,terminal_output +1877,4012007,"TERMINAL",0,0,"1111",,terminal_output +1878,4012474,"TERMINAL",0,0,"1",,terminal_output +1879,4013037,"TERMINAL",0,0,"2222",,terminal_output +1880,4013494,"TERMINAL",0,0,"2",,terminal_output +1881,4014068,"TERMINAL",0,0,"3333",,terminal_output +1882,4014623,"TERMINAL",0,0,"3",,terminal_output +1883,4015095,"TERMINAL",0,0,"4444",,terminal_output +1884,4015577,"TERMINAL",0,0,"4",,terminal_output +1885,4016132,"TERMINAL",0,0,"5555",,terminal_output +1886,4016598,"TERMINAL",0,0,"5",,terminal_output +1887,4017167,"TERMINAL",0,0,"6666",,terminal_output +1888,4017633,"TERMINAL",0,0,"6",,terminal_output +1889,4018208,"TERMINAL",0,0,"7777",,terminal_output +1890,4018668,"TERMINAL",0,0,"7",,terminal_output +1891,4019233,"TERMINAL",0,0,"8888",,terminal_output +1892,4019743,"TERMINAL",0,0,"8",,terminal_output +1893,4020255,"TERMINAL",0,0,"9999",,terminal_output +1894,4020745,"TERMINAL",0,0,"9",,terminal_output +1895,4021382,"TERMINAL",0,0,"20505050",,terminal_output +1896,4021793,"TERMINAL",0,0,"20",,terminal_output +1897,4022406,"TERMINAL",0,0,"1111",,terminal_output +1898,4022817,"TERMINAL",0,0,"1",,terminal_output +1899,4023431,"TERMINAL",0,0,"2222",,terminal_output +1900,4023849,"TERMINAL",0,0,"2",,terminal_output +1901,4024454,"TERMINAL",0,0,"3333",,terminal_output +1902,4024887,"TERMINAL",0,0,"3",,terminal_output +1903,4025481,"TERMINAL",0,0,"4444",,terminal_output +1904,4025923,"TERMINAL",0,0,"4",,terminal_output +1905,4026439,"TERMINAL",0,0,"5555",,terminal_output +1906,4027018,"TERMINAL",0,0,"5",,terminal_output +1907,4027531,"TERMINAL",0,0,"6666",,terminal_output +1908,4028041,"TERMINAL",0,0,"71",,terminal_output +1909,4028499,"TERMINAL",0,0,"7777",,terminal_output +1910,4029067,"TERMINAL",0,0,"8",,terminal_output +1911,4029578,"TERMINAL",0,0,"8888",,terminal_output +1912,4030088,"TERMINAL",0,0,"9",,terminal_output +1913,4030603,"TERMINAL",0,0,"9999",,terminal_output +1914,4031115,"TERMINAL",0,0,"30",,terminal_output +1915,4031628,"TERMINAL",0,0,"307:007:007:00",,terminal_output +1916,4032236,"TERMINAL",0,0,"1",,terminal_output +1917,4032623,"TERMINAL",0,0,"1111",,terminal_output +1918,4033216,"TERMINAL",0,0,"2",,terminal_output +1919,4033245,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1920,4034286,"TERMINAL",0,0,"3",,terminal_output +1921,4035248,"TERMINAL",0,0,"4",,terminal_output +1922,4036286,"TERMINAL",0,0,"57",,terminal_output +1923,4037359,"TERMINAL",0,0,"6",,terminal_output +1924,4038367,"TERMINAL",0,0,"7",,terminal_output +1925,4039508,"TERMINAL",0,0,"8",,terminal_output +1926,4040534,"TERMINAL",0,0,"9",,terminal_output +1927,4041497,"TERMINAL",0,0,"40",,terminal_output +1928,4042582,"TERMINAL",0,0,"1",,terminal_output +1929,4043548,"TERMINAL",0,0,"2",,terminal_output +1930,4044629,"TERMINAL",0,0,"3",,terminal_output +1931,4045622,"TERMINAL",0,0,"4",,terminal_output +1932,4046660,"TERMINAL",0,0,"5",,terminal_output +1933,4047800,"TERMINAL",0,0,"6",,terminal_output +1934,4048825,"TERMINAL",0,0,"7",,terminal_output +1935,4049780,"TERMINAL",0,0,"8",,terminal_output +1936,4050821,"TERMINAL",0,0,"9",,terminal_output +1937,4051860,"TERMINAL",0,0,"50",,terminal_output +1938,4052900,"TERMINAL",0,0,"10",,terminal_output +1939,4053937,"TERMINAL",0,0,"2",,terminal_output +1940,4055072,"TERMINAL",0,0,"4",,terminal_output +1941,4056011,"TERMINAL",0,0,"5",,terminal_output +1942,4057045,"TERMINAL",0,0,"6",,terminal_output +1943,4058143,"TERMINAL",0,0,"7",,terminal_output +1944,4059112,"TERMINAL",0,0,"8",,terminal_output +1945,4060150,"TERMINAL",0,0,"9",,terminal_output +1946,4061218,"TERMINAL",0,0,"5:00",,terminal_output +1947,4062239,"TERMINAL",0,0,"1",,terminal_output +1948,4063413,"TERMINAL",0,0,"2",,terminal_output +1949,4064392,"TERMINAL",0,0,"3",,terminal_output +1950,4065345,"TERMINAL",0,0,"4",,terminal_output +1951,4066440,"TERMINAL",0,0,"5",,terminal_output +1952,4067463,"TERMINAL",0,0,"6",,terminal_output +1953,4068457,"TERMINAL",0,0,"7",,terminal_output +1954,4069517,"TERMINAL",0,0,"8",,terminal_output +1955,4070528,"TERMINAL",0,0,"9",,terminal_output +1956,4071562,"TERMINAL",0,0,"10",,terminal_output +1957,4072685,"TERMINAL",0,0,"1",,terminal_output +1958,4073635,"TERMINAL",0,0,"2",,terminal_output +1959,4074670,"TERMINAL",0,0,"3",,terminal_output +1960,4075759,"TERMINAL",0,0,"4",,terminal_output +1961,4076782,"TERMINAL",0,0,"5",,terminal_output +1962,4077815,"TERMINAL",0,0,"6",,terminal_output +1963,4078831,"TERMINAL",0,0,"7",,terminal_output +1964,4079862,"TERMINAL",0,0,"8",,terminal_output +1965,4080900,"TERMINAL",0,0,"9",,terminal_output +1966,4081937,"TERMINAL",0,0,"20",,terminal_output +1967,4082974,"TERMINAL",0,0,"2",,terminal_output +1968,4084016,"TERMINAL",0,0,"3",,terminal_output +1969,4085076,"TERMINAL",0,0,"4",,terminal_output +1970,4086102,"TERMINAL",0,0,"5",,terminal_output +1971,4087226,"TERMINAL",0,0,"61",,terminal_output +1972,4088252,"TERMINAL",0,0,"7",,terminal_output +1973,4089274,"TERMINAL",0,0,"8",,terminal_output +1974,4090252,"TERMINAL",0,0,"911",,terminal_output +1975,4091323,"TERMINAL",0,0,"30",,terminal_output +1976,4092300,"TERMINAL",0,0,"1",,terminal_output +1977,4093342,"TERMINAL",0,0,"2",,terminal_output +1978,4094393,"TERMINAL",0,0,"3",,terminal_output +1979,4095416,"TERMINAL",0,0,"4",,terminal_output +1980,4096484,"TERMINAL",0,0,"5",,terminal_output +1981,4097570,"TERMINAL",0,0,"6",,terminal_output +1982,4098533,"TERMINAL",0,0,"7",,terminal_output +1983,4099559,"TERMINAL",0,0,"8",,terminal_output +1984,4100641,"TERMINAL",0,0,"9",,terminal_output +1985,4101665,"TERMINAL",0,0,"40",,terminal_output +1986,4102688,"TERMINAL",0,0,"1",,terminal_output +1987,4103715,"TERMINAL",0,0,"2",,terminal_output +1988,4104808,"TERMINAL",0,0,"3",,terminal_output +1989,4105795,"TERMINAL",0,0,"4",,terminal_output +1990,4106827,"TERMINAL",0,0,"5",,terminal_output +1991,4107867,"TERMINAL",0,0,"6",,terminal_output +1992,4108900,"TERMINAL",0,0,"7",,terminal_output +1993,4109961,"TERMINAL",0,0,"8",,terminal_output +1994,4110984,"TERMINAL",0,0,"50",,terminal_output +1995,4112113,"TERMINAL",0,0,"1",,terminal_output +1996,4113047,"TERMINAL",0,0,"20",,terminal_output +1997,4114160,"TERMINAL",0,0,"3",,terminal_output +1998,4115118,"TERMINAL",0,0,"4",,terminal_output +1999,4117846,"TERMINAL",0,0,"5",,terminal_output +2000,4118813,"TERMINAL",0,0,"7",,terminal_output +2001,4119851,"TERMINAL",0,0,"8",,terminal_output +2002,4120884,"TERMINAL",0,0,"9",,terminal_output +2003,4121919,"TERMINAL",0,0,"6:00",,terminal_output +2004,4122957,"TERMINAL",0,0,"1",,terminal_output +2005,4124089,"TERMINAL",0,0,"35",,terminal_output +2006,4125089,"TERMINAL",0,0,"4",,terminal_output +2007,4126133,"TERMINAL",0,0,"5",,terminal_output +2008,4127162,"TERMINAL",0,0,"6",,terminal_output +2009,4128185,"TERMINAL",0,0,"7",,terminal_output +2010,4129167,"TERMINAL",0,0,"8",,terminal_output +2011,4130207,"TERMINAL",0,0,"9",,terminal_output +2012,4131258,"TERMINAL",0,0,"10",,terminal_output +2013,4132283,"TERMINAL",0,0,"1",,terminal_output +2014,4133315,"TERMINAL",0,0,"2",,terminal_output +2015,4134433,"TERMINAL",0,0,"3",,terminal_output +2016,4135457,"TERMINAL",0,0,"4",,terminal_output +2017,4136419,"TERMINAL",0,0,"5",,terminal_output +2018,4137507,"TERMINAL",0,0,"6",,terminal_output +2019,4138501,"TERMINAL",0,0,"76",,terminal_output +2020,4139657,"TERMINAL",0,0,"8",,terminal_output +2021,4140587,"TERMINAL",0,0,"9",,terminal_output +2022,4141703,"TERMINAL",0,0,"20",,terminal_output +2023,4142666,"TERMINAL",0,0,"1",,terminal_output +2024,4143702,"TERMINAL",0,0,"2",,terminal_output +2025,4144805,"TERMINAL",0,0,"3",,terminal_output +2026,4145804,"TERMINAL",0,0,"4",,terminal_output +2027,4146826,"TERMINAL",0,0,"5",,terminal_output +2028,4147862,"TERMINAL",0,0,"61",,terminal_output +2029,4148903,"TERMINAL",0,0,"7",,terminal_output +2030,4149936,"TERMINAL",0,0,"8",,terminal_output +2031,4151025,"TERMINAL",0,0,"30",,terminal_output +2032,4152014,"TERMINAL",0,0,"1",,terminal_output +2033,4153072,"TERMINAL",0,0,"2",,terminal_output +2034,4154094,"TERMINAL",0,0,"3",,terminal_output +2035,4155117,"TERMINAL",0,0,"4",,terminal_output +2036,4156248,"TERMINAL",0,0,"5",,terminal_output +2037,4157271,"TERMINAL",0,0,"6",,terminal_output +2038,4158294,"TERMINAL",0,0,"7",,terminal_output +2039,4159315,"TERMINAL",0,0,"8",,terminal_output +2040,4160315,"TERMINAL",0,0,"9",,terminal_output +2041,4161331,"TERMINAL",0,0,"40",,terminal_output +2042,4162397,"TERMINAL",0,0,"1",,terminal_output +2043,4163412,"TERMINAL",0,0,"2",,terminal_output +2044,4164435,"TERMINAL",0,0,"3",,terminal_output +2045,4165471,"TERMINAL",0,0,"4",,terminal_output +2046,4166509,"TERMINAL",0,0,"5",,terminal_output +2047,4167543,"TERMINAL",0,0,"6",,terminal_output +2048,4168575,"TERMINAL",0,0,"7",,terminal_output +2049,4169622,"TERMINAL",0,0,"8",,terminal_output +2050,4170680,"TERMINAL",0,0,"9",,terminal_output +2051,4171682,"TERMINAL",0,0,"50",,terminal_output +2052,4172731,"TERMINAL",0,0,"1",,terminal_output +2053,4173859,"TERMINAL",0,0,"20",,terminal_output +2054,4174967,"TERMINAL",0,0,"3",,terminal_output +2055,4175838,"TERMINAL",0,0,"4",,terminal_output +2056,4176871,"TERMINAL",0,0,"5",,terminal_output +2057,4177907,"TERMINAL",0,0,"6",,terminal_output +2058,4178941,"TERMINAL",0,0,"7",,terminal_output +2059,4180001,"TERMINAL",0,0,"9",,terminal_output +2060,4181026,"TERMINAL",0,0,"7:00",,terminal_output +2061,4182153,"TERMINAL",0,0,"1",,terminal_output +2062,4183181,"TERMINAL",0,0,"2",,terminal_output +2063,4184202,"TERMINAL",0,0,"3",,terminal_output +2064,4185160,"TERMINAL",0,0,"4",,terminal_output +2065,4186250,"TERMINAL",0,0,"5",,terminal_output +2066,4187274,"TERMINAL",0,0,"6",,terminal_output +2067,4188297,"TERMINAL",0,0,"7",,terminal_output +2068,4189322,"TERMINAL",0,0,"8",,terminal_output +2069,4190341,"TERMINAL",0,0,"9",,terminal_output +2070,4191472,"TERMINAL",0,0,"10",,terminal_output +2071,4192417,"TERMINAL",0,0,"1",,terminal_output +2072,4193450,"TERMINAL",0,0,"2",,terminal_output +2073,4194545,"TERMINAL",0,0,"3",,terminal_output +2074,4195568,"TERMINAL",0,0,"4",,terminal_output +2075,4196595,"TERMINAL",0,0,"5",,terminal_output +2076,4197613,"TERMINAL",0,0,"61",,terminal_output +2077,4198643,"TERMINAL",0,0,"7",,terminal_output +2078,4199764,"TERMINAL",0,0,"8",,terminal_output +2079,4200788,"TERMINAL",0,0,"9",,terminal_output +2080,4201812,"TERMINAL",0,0,"20",,terminal_output +2081,4202846,"TERMINAL",0,0,"1",,terminal_output +2082,4203860,"TERMINAL",0,0,"2",,terminal_output +2083,4204885,"TERMINAL",0,0,"3",,terminal_output +2084,4206113,"TERMINAL",0,0,"4",,terminal_output +2085,4207138,"TERMINAL",0,0,"6",,terminal_output +2086,4208162,"TERMINAL",0,0,"71",,terminal_output +2087,4209186,"TERMINAL",0,0,"8",,terminal_output +2088,4210212,"TERMINAL",0,0,"9",,terminal_output +2089,4211336,"TERMINAL",0,0,"3032",,terminal_output +2090,4212292,"TERMINAL",0,0,"1",,terminal_output +2091,4213384,"TERMINAL",0,0,"2",,terminal_output +2092,4214407,"TERMINAL",0,0,"3",,terminal_output +2093,4215430,"TERMINAL",0,0,"4",,terminal_output +2094,4216455,"TERMINAL",0,0,"5",,terminal_output +2095,4217581,"TERMINAL",0,0,"6",,terminal_output +2096,4218521,"TERMINAL",0,0,"7",,terminal_output +2097,4219560,"TERMINAL",0,0,"8",,terminal_output +2098,4220698,"TERMINAL",0,0,"9",,terminal_output +2099,4221636,"TERMINAL",0,0,"40",,terminal_output +2100,4222701,"TERMINAL",0,0,"1",,terminal_output +2101,4223726,"TERMINAL",0,0,"2",,terminal_output +2102,4224800,"TERMINAL",0,0,"3",,terminal_output +2103,4225802,"TERMINAL",0,0,"4",,terminal_output +2104,4226840,"TERMINAL",0,0,"5",,terminal_output +2105,4227874,"TERMINAL",0,0,"6",,terminal_output +2106,4228913,"TERMINAL",0,0,"7",,terminal_output +2107,4229951,"TERMINAL",0,0,"8",,terminal_output +2108,4231035,"TERMINAL",0,0,"50",,terminal_output +2109,4232027,"TERMINAL",0,0,"1",,terminal_output +2110,4233066,"TERMINAL",0,0,"20",,terminal_output +2111,4234170,"TERMINAL",0,0,"3",,terminal_output +2112,4235141,"TERMINAL",0,0,"4",,terminal_output +2113,4236219,"TERMINAL",0,0,"5",,terminal_output +2114,4237244,"TERMINAL",0,0,"6",,terminal_output +2115,4239355,"TERMINAL",0,0,"716",,terminal_output +2116,4240389,"TERMINAL",0,0,"9",,terminal_output +2117,4241428,"TERMINAL",0,0,"8:00",,terminal_output +2118,4242565,"TERMINAL",0,0,"1",,terminal_output +2119,4243504,"TERMINAL",0,0,"2",,terminal_output +2120,4244613,"TERMINAL",0,0,"3",,terminal_output +2121,4245619,"TERMINAL",0,0,"4",,terminal_output +2122,4246660,"TERMINAL",0,0,"5",,terminal_output +2123,4247687,"TERMINAL",0,0,"6",,terminal_output +2124,4248716,"TERMINAL",0,0,"7",,terminal_output +2125,4249720,"TERMINAL",0,0,"8",,terminal_output +2126,4250755,"TERMINAL",0,0,"9",,terminal_output +2127,4251904,"TERMINAL",0,0,"10",,terminal_output +2128,4252835,"TERMINAL",0,0,"1",,terminal_output +2129,4253870,"TERMINAL",0,0,"2",,terminal_output +2130,4254906,"TERMINAL",0,0,"3",,terminal_output +2131,4255943,"TERMINAL",0,0,"4",,terminal_output +2132,4256976,"TERMINAL",0,0,"6",,terminal_output +2133,4258033,"TERMINAL",0,0,"7",,terminal_output +2134,4259158,"TERMINAL",0,0,"8",,terminal_output +2135,4260088,"TERMINAL",0,0,"9",,terminal_output +2136,4261207,"TERMINAL",0,0,"20",,terminal_output +2137,4262231,"TERMINAL",0,0,"1",,terminal_output +2138,4263251,"TERMINAL",0,0,"2",,terminal_output +2139,4264277,"TERMINAL",0,0,"3",,terminal_output +2140,4265300,"TERMINAL",0,0,"4",,terminal_output +2141,4266326,"TERMINAL",0,0,"5",,terminal_output +2142,4267455,"TERMINAL",0,0,"632",,terminal_output +2143,4268396,"TERMINAL",0,0,"71",,terminal_output +2144,4269439,"TERMINAL",0,0,"8",,terminal_output +2145,4270525,"TERMINAL",0,0,"9",,terminal_output +2146,4271504,"TERMINAL",0,0,"30",,terminal_output +2147,4272572,"TERMINAL",0,0,"1",,terminal_output +2148,4273575,"TERMINAL",0,0,"2",,terminal_output +2149,4274619,"TERMINAL",0,0,"3",,terminal_output +2150,4275744,"TERMINAL",0,0,"4",,terminal_output +2151,4276814,"TERMINAL",0,0,"5",,terminal_output +2152,4277747,"TERMINAL",0,0,"6",,terminal_output +2153,4278821,"TERMINAL",0,0,"7",,terminal_output +2154,4279842,"TERMINAL",0,0,"8",,terminal_output +2155,4280866,"TERMINAL",0,0,"9",,terminal_output +2156,4281866,"TERMINAL",0,0,"40",,terminal_output +2157,4282903,"TERMINAL",0,0,"1",,terminal_output +2158,4283938,"TERMINAL",0,0,"2",,terminal_output +2159,4284976,"TERMINAL",0,0,"4",,terminal_output +2160,4286013,"TERMINAL",0,0,"5",,terminal_output +2161,4287110,"TERMINAL",0,0,"6",,terminal_output +2162,4288239,"TERMINAL",0,0,"7",,terminal_output +2163,4289123,"TERMINAL",0,0,"8",,terminal_output +2164,4290158,"TERMINAL",0,0,"9",,terminal_output +2165,4291200,"TERMINAL",0,0,"50",,terminal_output +2166,4292231,"TERMINAL",0,0,"10",,terminal_output +2167,4293273,"TERMINAL",0,0,"2",,terminal_output +2168,4294314,"TERMINAL",0,0,"3",,terminal_output +2169,4295358,"TERMINAL",0,0,"4",,terminal_output +2170,4296435,"TERMINAL",0,0,"5",,terminal_output +2171,4297455,"TERMINAL",0,0,"6",,terminal_output +2172,4298470,"TERMINAL",0,0,"7",,terminal_output +2173,4299555,"TERMINAL",0,0,"8",,terminal_output +2174,4300559,"TERMINAL",0,0,"9",,terminal_output +2175,4301586,"TERMINAL",0,0,"9:00",,terminal_output +2176,4302623,"TERMINAL",0,0,"1",,terminal_output +2177,4303658,"TERMINAL",0,0,"2",,terminal_output +2178,4304699,"TERMINAL",0,0,"3",,terminal_output +2179,4305739,"TERMINAL",0,0,"4",,terminal_output +2180,4306782,"TERMINAL",0,0,"5",,terminal_output +2181,4307904,"TERMINAL",0,0,"6",,terminal_output +2182,4308841,"TERMINAL",0,0,"7",,terminal_output +2183,4309879,"TERMINAL",0,0,"8",,terminal_output +2184,4310917,"TERMINAL",0,0,"9",,terminal_output +2185,4311961,"TERMINAL",0,0,"10",,terminal_output +2186,4312992,"TERMINAL",0,0,"2",,terminal_output +2187,4314059,"TERMINAL",0,0,"3",,terminal_output +2188,4315095,"TERMINAL",0,0,"4",,terminal_output +2189,4316194,"TERMINAL",0,0,"5",,terminal_output +2190,4317134,"TERMINAL",0,0,"6",,terminal_output +2191,4318243,"TERMINAL",0,0,"7",,terminal_output +2192,4319262,"TERMINAL",0,0,"8",,terminal_output +2193,4320292,"TERMINAL",0,0,"948",,terminal_output +2194,4321315,"TERMINAL",0,0,"20",,terminal_output +2195,4322339,"TERMINAL",0,0,"1",,terminal_output +2196,4323363,"TERMINAL",0,0,"2",,terminal_output +2197,4324391,"TERMINAL",0,0,"3",,terminal_output +2198,4325511,"TERMINAL",0,0,"4",,terminal_output +2199,4326482,"TERMINAL",0,0,"5",,terminal_output +2200,4327510,"TERMINAL",0,0,"61",,terminal_output +2201,4328547,"TERMINAL",0,0,"7",,terminal_output +2202,4329606,"TERMINAL",0,0,"880",,terminal_output +2203,4330632,"TERMINAL",0,0,"9",,terminal_output +2204,4331761,"TERMINAL",0,0,"30",,terminal_output +2205,4332784,"TERMINAL",0,0,"1",,terminal_output +2206,4333733,"TERMINAL",0,0,"2",,terminal_output +2207,4334857,"TERMINAL",0,0,"396",,terminal_output +2208,4335857,"TERMINAL",0,0,"4112",,terminal_output +2209,4336850,"TERMINAL",0,0,"5",,terminal_output +2210,4337905,"TERMINAL",0,0,"6",,terminal_output +2211,4338923,"TERMINAL",0,0,"7",,terminal_output +2212,4339960,"TERMINAL",0,0,"8",,terminal_output +2213,4341093,"TERMINAL",0,0,"40",,terminal_output +2214,4342034,"TERMINAL",0,0,"1",,terminal_output +2215,4343067,"TERMINAL",0,0,"2",,terminal_output +2216,4344102,"TERMINAL",0,0,"3",,terminal_output +2217,4345138,"TERMINAL",0,0,"4",,terminal_output +2218,4346201,"TERMINAL",0,0,"5",,terminal_output +2219,4347327,"TERMINAL",0,0,"6",,terminal_output +2220,4348252,"TERMINAL",0,0,"7",,terminal_output +2221,4349373,"TERMINAL",0,0,"8",,terminal_output +2222,4350321,"TERMINAL",0,0,"9",,terminal_output +2223,4351361,"TERMINAL",0,0,"50",,terminal_output +2224,4352394,"TERMINAL",0,0,"1",,terminal_output +2225,4353443,"TERMINAL",0,0,"20",,terminal_output +2226,4354491,"TERMINAL",0,0,"3",,terminal_output +2227,4355617,"TERMINAL",0,0,"4",,terminal_output +2228,4356549,"TERMINAL",0,0,"5",,terminal_output +2229,4357665,"TERMINAL",0,0,"6",,terminal_output +2230,4358629,"TERMINAL",0,0,"7",,terminal_output +2231,4360942,"TERMINAL",0,0,"8 6",,terminal_output +2232,4361872,"TERMINAL",0,0,"50:00",,terminal_output +2233,4362917,"TERMINAL",0,0,"1",,terminal_output +2234,4363959,"TERMINAL",0,0,"2",,terminal_output +2235,4364996,"TERMINAL",0,0,"4",,terminal_output +2236,4366042,"TERMINAL",0,0,"5",,terminal_output +2237,4367077,"TERMINAL",0,0,"6",,terminal_output +2238,4368215,"TERMINAL",0,0,"7",,terminal_output +2239,4369147,"TERMINAL",0,0,"8",,terminal_output +2240,4370187,"TERMINAL",0,0,"9",,terminal_output +2241,4371219,"TERMINAL",0,0,"10",,terminal_output +2242,4372311,"TERMINAL",0,0,"1",,terminal_output +2243,4373332,"TERMINAL",0,0,"2",,terminal_output +2244,4374326,"TERMINAL",0,0,"3",,terminal_output +2245,4375360,"TERMINAL",0,0,"4",,terminal_output +2246,4376402,"TERMINAL",0,0,"5",,terminal_output +2247,4377531,"TERMINAL",0,0,"6",,terminal_output +2248,4378471,"TERMINAL",0,0,"7",,terminal_output +2249,4379510,"TERMINAL",0,0,"8",,terminal_output +2250,4380545,"TERMINAL",0,0,"9",,terminal_output +2251,4381630,"TERMINAL",0,0,"20",,terminal_output +2252,4382660,"TERMINAL",0,0,"1",,terminal_output +2253,4383654,"TERMINAL",0,0,"2",,terminal_output +2254,4384697,"TERMINAL",0,0,"3",,terminal_output +2255,4385746,"TERMINAL",0,0,"4",,terminal_output +2256,4386781,"TERMINAL",0,0,"5",,terminal_output +2257,4387897,"TERMINAL",0,0,"6",,terminal_output +2258,4388931,"TERMINAL",0,0,"71",,terminal_output +2259,4389967,"TERMINAL",0,0,"9",,terminal_output +2260,4391003,"TERMINAL",0,0,"30",,terminal_output +2261,4392037,"TERMINAL",0,0,"1",,terminal_output +2262,4393097,"TERMINAL",0,0,"2",,terminal_output +2263,4394122,"TERMINAL",0,0,"3",,terminal_output +2264,4395145,"TERMINAL",0,0,"4",,terminal_output +2265,4396272,"TERMINAL",0,0,"5",,terminal_output +2266,4397294,"TERMINAL",0,0,"6",,terminal_output +2267,4398318,"TERMINAL",0,0,"7",,terminal_output +2268,4399341,"TERMINAL",0,0,"8",,terminal_output +2269,4400335,"TERMINAL",0,0,"9",,terminal_output +2270,4401392,"TERMINAL",0,0,"40",,terminal_output +2271,4402418,"TERMINAL",0,0,"1",,terminal_output +2272,4403447,"TERMINAL",0,0,"2",,terminal_output +2273,4404566,"TERMINAL",0,0,"3",,terminal_output +2274,4405523,"TERMINAL",0,0,"4",,terminal_output +2275,4406615,"TERMINAL",0,0,"5",,terminal_output +2276,4407640,"TERMINAL",0,0,"6",,terminal_output +2277,4408665,"TERMINAL",0,0,"7",,terminal_output +2278,4409693,"TERMINAL",0,0,"8",,terminal_output +2279,4410809,"TERMINAL",0,0,"9",,terminal_output +2280,4411864,"TERMINAL",0,0,"50",,terminal_output +2281,4412867,"TERMINAL",0,0,"10",,terminal_output +2282,4413895,"TERMINAL",0,0,"2",,terminal_output +2283,4414888,"TERMINAL",0,0,"3",,terminal_output +2284,4415936,"TERMINAL",0,0,"4",,terminal_output +2285,4416952,"TERMINAL",0,0,"5",,terminal_output +2286,4417985,"TERMINAL",0,0,"7",,terminal_output +2287,4419025,"TERMINAL",0,0,"8",,terminal_output +2288,4420064,"TERMINAL",0,0,"9",,terminal_output +2289,4421101,"TERMINAL",0,0,"1:00",,terminal_output +2290,4422176,"TERMINAL",0,0,"1",,terminal_output +2291,4423202,"TERMINAL",0,0,"2",,terminal_output +2292,4424206,"TERMINAL",0,0,"3",,terminal_output +2293,4425249,"TERMINAL",0,0,"4",,terminal_output +2294,4426380,"TERMINAL",0,0,"5",,terminal_output +2295,4427330,"TERMINAL",0,0,"6",,terminal_output +2296,4428428,"TERMINAL",0,0,"7",,terminal_output +2297,4429448,"TERMINAL",0,0,"8",,terminal_output +2298,4430441,"TERMINAL",0,0,"9",,terminal_output +2299,4431497,"TERMINAL",0,0,"10",,terminal_output +2300,4432515,"TERMINAL",0,0,"1",,terminal_output +2301,4433550,"TERMINAL",0,0,"2",,terminal_output +2302,4434585,"TERMINAL",0,0,"3",,terminal_output +2303,4435696,"TERMINAL",0,0,"4",,terminal_output +2304,4436658,"TERMINAL",0,0,"5",,terminal_output +2305,4437696,"TERMINAL",0,0,"6",,terminal_output +2306,4438734,"TERMINAL",0,0,"7",,terminal_output +2307,4439794,"TERMINAL",0,0,"8",,terminal_output +2308,4440814,"TERMINAL",0,0,"9",,terminal_output +2309,4441941,"TERMINAL",0,0,"20",,terminal_output +2310,4442959,"TERMINAL",0,0,"1",,terminal_output +2311,4443915,"TERMINAL",0,0,"2",,terminal_output +2312,4444953,"TERMINAL",0,0,"3",,terminal_output +2313,4445993,"TERMINAL",0,0,"5",,terminal_output +2314,4447034,"TERMINAL",0,0,"61",,terminal_output +2315,4448066,"TERMINAL",0,0,"7",,terminal_output +2316,4449103,"TERMINAL",0,0,"8",,terminal_output +2317,4450141,"TERMINAL",0,0,"9",,terminal_output +2318,4451259,"TERMINAL",0,0,"30",,terminal_output +2319,4452287,"TERMINAL",0,0,"1",,terminal_output +2320,4453309,"TERMINAL",0,0,"2",,terminal_output +2321,4454330,"TERMINAL",0,0,"3",,terminal_output +2322,4455321,"TERMINAL",0,0,"4",,terminal_output +2323,4456378,"TERMINAL",0,0,"5",,terminal_output +2324,4457510,"TERMINAL",0,0,"6",,terminal_output +2325,4458436,"TERMINAL",0,0,"7",,terminal_output +2326,4459557,"TERMINAL",0,0,"8",,terminal_output +2327,4460507,"TERMINAL",0,0,"9",,terminal_output +2328,4461544,"TERMINAL",0,0,"40",,terminal_output +2329,4462630,"TERMINAL",0,0,"1",,terminal_output +2330,4463621,"TERMINAL",0,0,"2",,terminal_output +2331,4464664,"TERMINAL",0,0,"3",,terminal_output +2332,4465702,"TERMINAL",0,0,"4",,terminal_output +2333,4466729,"TERMINAL",0,0,"5",,terminal_output +2334,4467777,"TERMINAL",0,0,"6",,terminal_output +2335,4468900,"TERMINAL",0,0,"7",,terminal_output +2336,4469901,"TERMINAL",0,0,"8",,terminal_output +2337,4470924,"TERMINAL",0,0,"9",,terminal_output +2338,4471943,"TERMINAL",0,0,"50",,terminal_output +2339,4472968,"TERMINAL",0,0,"20",,terminal_output +2340,4473995,"TERMINAL",0,0,"3",,terminal_output +2341,4475080,"TERMINAL",0,0,"42",,terminal_output +2342,4476068,"TERMINAL",0,0,"5",,terminal_output +2343,4477172,"TERMINAL",0,0,"6",,terminal_output +2344,4478141,"TERMINAL",0,0,"7",,terminal_output +2345,4479213,"TERMINAL",0,0,"8",,terminal_output +2346,4480239,"TERMINAL",0,0,"9",,terminal_output +2347,4482058,"TERMINAL",0,0,"2:00",,terminal_output +2348,4483105,"TERMINAL",0,0,"2",,terminal_output +2349,4484231,"TERMINAL",0,0,"3",,terminal_output +2350,4485177,"TERMINAL",0,0,"4",,terminal_output +2351,4486279,"TERMINAL",0,0,"5",,terminal_output +2352,4487306,"TERMINAL",0,0,"6",,terminal_output +2353,4488289,"TERMINAL",0,0,"7",,terminal_output +2354,4489329,"TERMINAL",0,0,"8",,terminal_output +2355,4490362,"TERMINAL",0,0,"9",,terminal_output +2356,4491502,"TERMINAL",0,0,"10",,terminal_output +2357,4492532,"TERMINAL",0,0,"1",,terminal_output +2358,4493468,"TERMINAL",0,0,"2",,terminal_output +2359,4494574,"TERMINAL",0,0,"3",,terminal_output +2360,4495609,"TERMINAL",0,0,"4",,terminal_output +2361,4496627,"TERMINAL",0,0,"5",,terminal_output +2362,4497622,"TERMINAL",0,0,"6",,terminal_output +2363,4498653,"TERMINAL",0,0,"7",,terminal_output +2364,4499796,"TERMINAL",0,0,"8",,terminal_output +2365,4500782,"TERMINAL",0,0,"9",,terminal_output +2366,4501767,"TERMINAL",0,0,"20",,terminal_output +2367,4502893,"TERMINAL",0,0,"1",,terminal_output +2368,4503899,"TERMINAL",0,0,"2",,terminal_output +2369,4504929,"TERMINAL",0,0,"3",,terminal_output +2370,4505914,"TERMINAL",0,0,"4",,terminal_output +2371,4506963,"TERMINAL",0,0,"51",,terminal_output +2372,4507988,"TERMINAL",0,0,"7",,terminal_output +2373,4509043,"TERMINAL",0,0,"8",,terminal_output +2374,4510064,"TERMINAL",0,0,"9",,terminal_output +2375,4511102,"TERMINAL",0,0,"30",,terminal_output +2376,4512143,"TERMINAL",0,0,"1",,terminal_output +2377,4513177,"TERMINAL",0,0,"2",,terminal_output +2378,4514237,"TERMINAL",0,0,"3",,terminal_output +2379,4515268,"TERMINAL",0,0,"4",,terminal_output +2380,4516286,"TERMINAL",0,0,"5",,terminal_output +2381,4517327,"TERMINAL",0,0,"6",,terminal_output +2382,4518357,"TERMINAL",0,0,"7",,terminal_output +2383,4519459,"TERMINAL",0,0,"8",,terminal_output +2384,4520484,"TERMINAL",0,0,"9",,terminal_output +2385,4521508,"TERMINAL",0,0,"40",,terminal_output +2386,4522509,"TERMINAL",0,0,"1",,terminal_output +2387,4523547,"TERMINAL",0,0,"2",,terminal_output +2388,4524582,"TERMINAL",0,0,"3",,terminal_output +2389,4525708,"TERMINAL",0,0,"4",,terminal_output +2390,4526655,"TERMINAL",0,0,"5",,terminal_output +2391,4527757,"TERMINAL",0,0,"6",,terminal_output +2392,4528729,"TERMINAL",0,0,"7",,terminal_output +2393,4529801,"TERMINAL",0,0,"8",,terminal_output +2394,4530824,"TERMINAL",0,0,"9",,terminal_output +2395,4531838,"TERMINAL",0,0,"50",,terminal_output +2396,4532978,"TERMINAL",0,0,"10",,terminal_output +2397,4534005,"TERMINAL",0,0,"2",,terminal_output +2398,4534951,"TERMINAL",0,0,"3",,terminal_output +2399,4535986,"TERMINAL",0,0,"5",,terminal_output +2400,4537023,"TERMINAL",0,0,"6",,terminal_output +2401,4538062,"TERMINAL",0,0,"7",,terminal_output +2402,4539099,"TERMINAL",0,0,"8",,terminal_output +2403,4540140,"TERMINAL",0,0,"9",,terminal_output +2404,4541274,"TERMINAL",0,0,"3:00",,terminal_output +2405,4542296,"TERMINAL",0,0,"1",,terminal_output +2406,4543320,"TERMINAL",0,0,"2",,terminal_output +2407,4544343,"TERMINAL",0,0,"3",,terminal_output +2408,4545319,"TERMINAL",0,0,"4",,terminal_output +2409,4546391,"TERMINAL",0,0,"5",,terminal_output +2410,4547415,"TERMINAL",0,0,"6",,terminal_output +2411,4548430,"TERMINAL",0,0,"7",,terminal_output +2412,4549571,"TERMINAL",0,0,"8",,terminal_output +2413,4550590,"TERMINAL",0,0,"9",,terminal_output +2414,4551536,"TERMINAL",0,0,"10",,terminal_output +2415,4552572,"TERMINAL",0,0,"1",,terminal_output +2416,4553671,"TERMINAL",0,0,"2",,terminal_output +2417,4554684,"TERMINAL",0,0,"3",,terminal_output +2418,4555684,"TERMINAL",0,0,"4",,terminal_output +2419,4556719,"TERMINAL",0,0,"5",,terminal_output +2420,4557859,"TERMINAL",0,0,"6",,terminal_output +2421,4558796,"TERMINAL",0,0,"7",,terminal_output +2422,4559932,"TERMINAL",0,0,"8",,terminal_output +2423,4560932,"TERMINAL",0,0,"9",,terminal_output +2424,4561955,"TERMINAL",0,0,"20",,terminal_output +2425,4562983,"TERMINAL",0,0,"1",,terminal_output +2426,4564005,"TERMINAL",0,0,"3",,terminal_output +2427,4565017,"TERMINAL",0,0,"4",,terminal_output +2428,4566046,"TERMINAL",0,0,"5",,terminal_output +2429,4567084,"TERMINAL",0,0,"61",,terminal_output +2430,4568121,"TERMINAL",0,0,"7",,terminal_output +2431,4569159,"TERMINAL",0,0,"8",,terminal_output +2432,4570201,"TERMINAL",0,0,"9",,terminal_output +2433,4571278,"TERMINAL",0,0,"30",,terminal_output +2434,4572299,"TERMINAL",0,0,"1",,terminal_output +2435,4573429,"TERMINAL",0,0,"2",,terminal_output +2436,4574447,"TERMINAL",0,0,"3",,terminal_output +2437,4575385,"TERMINAL",0,0,"4",,terminal_output +2438,4576499,"TERMINAL",0,0,"5",,terminal_output +2439,4577522,"TERMINAL",0,0,"6",,terminal_output +2440,4578563,"TERMINAL",0,0,"7",,terminal_output +2441,4579571,"TERMINAL",0,0,"8",,terminal_output +2442,4580596,"TERMINAL",0,0,"9",,terminal_output +2443,4581606,"TERMINAL",0,0,"40",,terminal_output +2444,4582746,"TERMINAL",0,0,"1",,terminal_output +2445,4583676,"TERMINAL",0,0,"2",,terminal_output +2446,4584771,"TERMINAL",0,0,"3",,terminal_output +2447,4585815,"TERMINAL",0,0,"4",,terminal_output +2448,4586784,"TERMINAL",0,0,"5",,terminal_output +2449,4587863,"TERMINAL",0,0,"6",,terminal_output +2450,4588860,"TERMINAL",0,0,"7",,terminal_output +2451,4589920,"TERMINAL",0,0,"8",,terminal_output +2452,4591030,"TERMINAL",0,0,"9",,terminal_output +2453,4591976,"TERMINAL",0,0,"510",,terminal_output +2454,4593008,"TERMINAL",0,0,"2",,terminal_output +2455,4594043,"TERMINAL",0,0,"3",,terminal_output +2456,4595102,"TERMINAL",0,0,"4",,terminal_output +2457,4596113,"TERMINAL",0,0,"5",,terminal_output +2458,4597146,"TERMINAL",0,0,"6",,terminal_output +2459,4598206,"TERMINAL",0,0,"7",,terminal_output +2460,4599229,"TERMINAL",0,0,"8",,terminal_output +2461,4600260,"TERMINAL",0,0,"9",,terminal_output +2462,4601383,"TERMINAL",0,0,"4:00",,terminal_output +2463,4603326,"TERMINAL",0,0,"1",,terminal_output +2464,4604277,"TERMINAL",0,0,"3",,terminal_output +2465,4605314,"TERMINAL",0,0,"4",,terminal_output +2466,4606396,"TERMINAL",0,0,"5",,terminal_output +2467,4607428,"TERMINAL",0,0,"6",,terminal_output +2468,4608428,"TERMINAL",0,0,"7",,terminal_output +2469,4609468,"TERMINAL",0,0,"8",,terminal_output +2470,4610598,"TERMINAL",0,0,"9",,terminal_output +2471,4611559,"TERMINAL",0,0,"10",,terminal_output +2472,4612644,"TERMINAL",0,0,"1",,terminal_output +2473,4613606,"TERMINAL",0,0,"2",,terminal_output +2474,4614691,"TERMINAL",0,0,"3",,terminal_output +2475,4615714,"TERMINAL",0,0,"4",,terminal_output +2476,4616715,"TERMINAL",0,0,"5",,terminal_output +2477,4617768,"TERMINAL",0,0,"6",,terminal_output +2478,4618890,"TERMINAL",0,0,"7",,terminal_output +2479,4619939,"TERMINAL",0,0,"8",,terminal_output +2480,4620918,"TERMINAL",0,0,"9",,terminal_output +2481,4621964,"TERMINAL",0,0,"20",,terminal_output +2482,4622983,"TERMINAL",0,0,"1",,terminal_output +2483,4624009,"TERMINAL",0,0,"3",,terminal_output +2484,4625035,"TERMINAL",0,0,"4",,terminal_output +2485,4626044,"TERMINAL",0,0,"5",,terminal_output +2486,4627084,"TERMINAL",0,0,"61",,terminal_output +2487,4628123,"TERMINAL",0,0,"7",,terminal_output +2488,4629167,"TERMINAL",0,0,"8",,terminal_output +2489,4630203,"TERMINAL",0,0,"9",,terminal_output +2490,4631282,"TERMINAL",0,0,"30",,terminal_output +2491,4632306,"TERMINAL",0,0,"1",,terminal_output +2492,4633329,"TERMINAL",0,0,"2",,terminal_output +2493,4634453,"TERMINAL",0,0,"3",,terminal_output +2494,4635398,"TERMINAL",0,0,"4",,terminal_output +2495,4636451,"TERMINAL",0,0,"5",,terminal_output +2496,4637530,"TERMINAL",0,0,"6",,terminal_output +2497,4638511,"TERMINAL",0,0,"7",,terminal_output +2498,4639577,"TERMINAL",0,0,"8",,terminal_output +2499,4640596,"TERMINAL",0,0,"9",,terminal_output +2500,4641728,"TERMINAL",0,0,"40",,terminal_output +2501,4642750,"TERMINAL",0,0,"1",,terminal_output +2502,4643706,"TERMINAL",0,0,"2",,terminal_output +2503,4644772,"TERMINAL",0,0,"3",,terminal_output +2504,4645823,"TERMINAL",0,0,"4",,terminal_output +2505,4646814,"TERMINAL",0,0,"5",,terminal_output +2506,4647869,"TERMINAL",0,0,"6",,terminal_output +2507,4648997,"TERMINAL",0,0,"7",,terminal_output +2508,4650024,"TERMINAL",0,0,"8",,terminal_output +2509,4651043,"TERMINAL",0,0,"503",,terminal_output +2510,4652011,"TERMINAL",0,0,"1",,terminal_output +2511,4653044,"TERMINAL",0,0,"20",,terminal_output +2512,4654080,"TERMINAL",0,0,"3",,terminal_output +2513,4655122,"TERMINAL",0,0,"4",,terminal_output +2514,4656267,"TERMINAL",0,0,"5",,terminal_output +2515,4657292,"TERMINAL",0,0,"6",,terminal_output +2516,4658316,"TERMINAL",0,0,"7",,terminal_output +2517,4659339,"TERMINAL",0,0,"8",,terminal_output +2518,4660309,"TERMINAL",0,0,"91",,terminal_output +2519,4661390,"TERMINAL",0,0,"5:00",,terminal_output +2520,4662388,"TERMINAL",0,0,"1",,terminal_output +2521,4663429,"TERMINAL",0,0,"2",,terminal_output +2522,4664561,"TERMINAL",0,0,"3",,terminal_output +2523,4665586,"TERMINAL",0,0,"4",,terminal_output +2524,4666558,"TERMINAL",0,0,"5",,terminal_output +2525,4667635,"TERMINAL",0,0,"6",,terminal_output +2526,4668657,"TERMINAL",0,0,"7",,terminal_output +2527,4669681,"TERMINAL",0,0,"8",,terminal_output +2528,4670706,"TERMINAL",0,0,"9",,terminal_output +2529,4671713,"TERMINAL",0,0,"10",,terminal_output +2530,4672748,"TERMINAL",0,0,"1",,terminal_output +2531,4673788,"TERMINAL",0,0,"2",,terminal_output +2532,4674824,"TERMINAL",0,0,"3",,terminal_output +2533,4675926,"TERMINAL",0,0,"4",,terminal_output +2534,4676900,"TERMINAL",0,0,"5",,terminal_output +2535,4677980,"TERMINAL",0,0,"6",,terminal_output +2536,4679002,"TERMINAL",0,0,"8",,terminal_output +2537,4680015,"TERMINAL",0,0,"9",,terminal_output +2538,4681056,"TERMINAL",0,0,"20",,terminal_output +2539,4682089,"TERMINAL",0,0,"1",,terminal_output +2540,4683126,"TERMINAL",0,0,"2",,terminal_output +2541,4684207,"TERMINAL",0,0,"3",,terminal_output +2542,4685200,"TERMINAL",0,0,"4",,terminal_output +2543,4686234,"TERMINAL",0,0,"5",,terminal_output +2544,4687295,"TERMINAL",0,0,"6",,terminal_output +2545,4688318,"TERMINAL",0,0,"72",,terminal_output +2546,4689346,"TERMINAL",0,0,"8",,terminal_output +2547,4690376,"TERMINAL",0,0,"9",,terminal_output +2548,4691493,"TERMINAL",0,0,"30",,terminal_output +2549,4692464,"TERMINAL",0,0,"1",,terminal_output +2550,4693498,"TERMINAL",0,0,"2",,terminal_output +2551,4694532,"TERMINAL",0,0,"3",,terminal_output +2552,4695568,"TERMINAL",0,0,"4",,terminal_output +2553,4696717,"TERMINAL",0,0,"5",,terminal_output +2554,4697646,"TERMINAL",0,0,"6",,terminal_output +2555,4698680,"TERMINAL",0,0,"7",,terminal_output +2556,4699785,"TERMINAL",0,0,"8",,terminal_output +2557,4700812,"TERMINAL",0,0,"9",,terminal_output +2558,4701783,"TERMINAL",0,0,"40",,terminal_output +2559,4702856,"TERMINAL",0,0,"1",,terminal_output +2560,4703884,"TERMINAL",0,0,"2",,terminal_output +2561,4704886,"TERMINAL",0,0,"3",,terminal_output +2562,4705925,"TERMINAL",0,0,"4",,terminal_output +2563,4706995,"TERMINAL",0,0,"6",,terminal_output +2564,4708082,"TERMINAL",0,0,"7",,terminal_output +2565,4709045,"TERMINAL",0,0,"8",,terminal_output +2566,4710085,"TERMINAL",0,0,"9",,terminal_output +2567,4711139,"TERMINAL",0,0,"50",,terminal_output +2568,4712158,"TERMINAL",0,0,"10",,terminal_output +2569,4713200,"TERMINAL",0,0,"2",,terminal_output +2570,4714329,"TERMINAL",0,0,"3",,terminal_output +2571,4715294,"TERMINAL",0,0,"4",,terminal_output +2572,4716378,"TERMINAL",0,0,"5",,terminal_output +2573,4717400,"TERMINAL",0,0,"6",,terminal_output +2574,4718384,"TERMINAL",0,0,"7",,terminal_output +2575,4719451,"TERMINAL",0,0,"8",,terminal_output +2576,4720471,"TERMINAL",0,0,"9",,terminal_output +2577,4721498,"TERMINAL",0,0,"6:00",,terminal_output +2578,4722623,"TERMINAL",0,0,"1",,terminal_output +2579,4724364,"TERMINAL",0,0,"2",,terminal_output +2580,4725365,"TERMINAL",0,0,"4",,terminal_output +2581,4726514,"TERMINAL",0,0,"5",,terminal_output +2582,4727544,"TERMINAL",0,0,"6",,terminal_output +2583,4728483,"TERMINAL",0,0,"7",,terminal_output +2584,4729520,"TERMINAL",0,0,"8",,terminal_output +2585,4730608,"TERMINAL",0,0,"9",,terminal_output +2586,4731595,"TERMINAL",0,0,"10",,terminal_output +2587,4732626,"TERMINAL",0,0,"1",,terminal_output +2588,4733664,"TERMINAL",0,0,"2",,terminal_output +2589,4734700,"TERMINAL",0,0,"3",,terminal_output +2590,4735833,"TERMINAL",0,0,"4",,terminal_output +2591,4736781,"TERMINAL",0,0,"5",,terminal_output +2592,4737885,"TERMINAL",0,0,"6",,terminal_output +2593,4738851,"TERMINAL",0,0,"7",,terminal_output +2594,4739927,"TERMINAL",0,0,"8",,terminal_output +2595,4740952,"TERMINAL",0,0,"9",,terminal_output +2596,4741978,"TERMINAL",0,0,"20",,terminal_output +2597,4742990,"TERMINAL",0,0,"2",,terminal_output +2598,4744024,"TERMINAL",0,0,"3",,terminal_output +2599,4745093,"TERMINAL",0,0,"4",,terminal_output +2600,4746104,"TERMINAL",0,0,"5",,terminal_output +2601,4747140,"TERMINAL",0,0,"61",,terminal_output +2602,4748182,"TERMINAL",0,0,"7",,terminal_output +2603,4749214,"TERMINAL",0,0,"8",,terminal_output +2604,4750276,"TERMINAL",0,0,"9",,terminal_output +2605,4751296,"TERMINAL",0,0,"30",,terminal_output +2606,4752422,"TERMINAL",0,0,"1",,terminal_output +2607,4753431,"TERMINAL",0,0,"2",,terminal_output +2608,4754467,"TERMINAL",0,0,"3",,terminal_output +2609,4755490,"TERMINAL",0,0,"4",,terminal_output +2610,4756519,"TERMINAL",0,0,"5",,terminal_output +2611,4757546,"TERMINAL",0,0,"6",,terminal_output +2612,4758558,"TERMINAL",0,0,"7",,terminal_output +2613,4759592,"TERMINAL",0,0,"8",,terminal_output +2614,4760634,"TERMINAL",0,0,"9",,terminal_output +2615,4761739,"TERMINAL",0,0,"40",,terminal_output +2616,4762738,"TERMINAL",0,0,"1",,terminal_output +2617,4763780,"TERMINAL",0,0,"2",,terminal_output +2618,4764780,"TERMINAL",0,0,"3",,terminal_output +2619,4765837,"TERMINAL",0,0,"4",,terminal_output +2620,4766849,"TERMINAL",0,0,"5",,terminal_output +2621,4768015,"TERMINAL",0,0,"6",,terminal_output +2622,4768960,"TERMINAL",0,0,"7",,terminal_output +2623,4770076,"TERMINAL",0,0,"9",,terminal_output +2624,4771060,"TERMINAL",0,0,"50",,terminal_output +2625,4772089,"TERMINAL",0,0,"10",,terminal_output +2626,4773073,"TERMINAL",0,0,"2",,terminal_output +2627,4774104,"TERMINAL",0,0,"3",,terminal_output +2628,4775143,"TERMINAL",0,0,"4",,terminal_output +2629,4776177,"TERMINAL",0,0,"5",,terminal_output +2630,4777208,"TERMINAL",0,0,"6",,terminal_output +2631,4778333,"TERMINAL",0,0,"7",,terminal_output +2632,4779289,"TERMINAL",0,0,"8",,terminal_output +2633,4780319,"TERMINAL",0,0,"9",,terminal_output +2634,4781360,"TERMINAL",0,0,"7:00",,terminal_output +2635,4782390,"TERMINAL",0,0,"1",,terminal_output +2636,4783423,"TERMINAL",0,0,"2",,terminal_output +2637,4784473,"TERMINAL",0,0,"3",,terminal_output +2638,4785492,"TERMINAL",0,0,"4",,terminal_output +2639,4786531,"TERMINAL",0,0,"5",,terminal_output +2640,4787563,"TERMINAL",0,0,"6",,terminal_output +2641,4788606,"TERMINAL",0,0,"7",,terminal_output +2642,4789637,"TERMINAL",0,0,"8",,terminal_output +2643,4790718,"TERMINAL",0,0,"9",,terminal_output +2644,4791743,"TERMINAL",0,0,"10",,terminal_output +2645,4792749,"TERMINAL",0,0,"1",,terminal_output +2646,4793783,"TERMINAL",0,0,"2",,terminal_output +2647,4794916,"TERMINAL",0,0,"3",,terminal_output +2648,4795943,"TERMINAL",0,0,"4",,terminal_output +2649,4796892,"TERMINAL",0,0,"5",,terminal_output +2650,4797927,"TERMINAL",0,0,"6",,terminal_output +2651,4798965,"TERMINAL",0,0,"8",,terminal_output +2652,4800004,"TERMINAL",0,0,"9",,terminal_output +2653,4801061,"TERMINAL",0,0,"20",,terminal_output +2654,4802087,"TERMINAL",0,0,"1",,terminal_output +2655,4803108,"TERMINAL",0,0,"2",,terminal_output +2656,4804144,"TERMINAL",0,0,"3",,terminal_output +2657,4805182,"TERMINAL",0,0,"4",,terminal_output +2658,4806214,"TERMINAL",0,0,"5",,terminal_output +2659,4807252,"TERMINAL",0,0,"61",,terminal_output +2660,4808332,"TERMINAL",0,0,"7",,terminal_output +2661,4809356,"TERMINAL",0,0,"8",,terminal_output +2662,4810363,"TERMINAL",0,0,"9",,terminal_output +2663,4811510,"TERMINAL",0,0,"30",,terminal_output +2664,4812535,"TERMINAL",0,0,"1",,terminal_output +2665,4813472,"TERMINAL",0,0,"2",,terminal_output +2666,4814507,"TERMINAL",0,0,"3",,terminal_output +2667,4815547,"TERMINAL",0,0,"4",,terminal_output +2668,4816584,"TERMINAL",0,0,"5",,terminal_output +2669,4817651,"TERMINAL",0,0,"6",,terminal_output +2670,4818657,"TERMINAL",0,0,"7",,terminal_output +2671,4819689,"TERMINAL",0,0,"8",,terminal_output +2672,4820728,"TERMINAL",0,0,"9",,terminal_output +2673,4821849,"TERMINAL",0,0,"40",,terminal_output +2674,4822874,"TERMINAL",0,0,"1",,terminal_output +2675,4823898,"TERMINAL",0,0,"2",,terminal_output +2676,4824869,"TERMINAL",0,0,"3",,terminal_output +2677,4825945,"TERMINAL",0,0,"4",,terminal_output +2678,4826968,"TERMINAL",0,0,"5",,terminal_output +2679,4827982,"TERMINAL",0,0,"7",,terminal_output +2680,4829028,"TERMINAL",0,0,"8",,terminal_output +2681,4830083,"TERMINAL",0,0,"9",,terminal_output +2682,4831091,"TERMINAL",0,0,"50",,terminal_output +2683,4832131,"TERMINAL",0,0,"1",,terminal_output +2684,4833167,"TERMINAL",0,0,"20",,terminal_output +2685,4834202,"TERMINAL",0,0,"3",,terminal_output +2686,4835264,"TERMINAL",0,0,"4",,terminal_output +2687,4836388,"TERMINAL",0,0,"5",,terminal_output +2688,4837415,"TERMINAL",0,0,"6",,terminal_output +2689,4838349,"TERMINAL",0,0,"7",,terminal_output +2690,4839383,"TERMINAL",0,0,"8",,terminal_output +2691,4840490,"TERMINAL",0,0,"9",,terminal_output +2692,4841514,"TERMINAL",0,0,"8:00",,terminal_output +2693,4842494,"TERMINAL",0,0,"1",,terminal_output +2694,4843533,"TERMINAL",0,0,"2",,terminal_output +2695,4845506,"TERMINAL",0,0,"3",,terminal_output +2696,4846534,"TERMINAL",0,0,"5",,terminal_output +2697,4847551,"TERMINAL",0,0,"6",,terminal_output +2698,4848516,"TERMINAL",0,0,"7",,terminal_output +2699,4849599,"TERMINAL",0,0,"8",,terminal_output +2700,4850584,"TERMINAL",0,0,"9",,terminal_output +2701,4851648,"TERMINAL",0,0,"10",,terminal_output +2702,4852655,"TERMINAL",0,0,"1",,terminal_output +2703,4853693,"TERMINAL",0,0,"2",,terminal_output +2704,4854822,"TERMINAL",0,0,"3",,terminal_output +2705,4855846,"TERMINAL",0,0,"4",,terminal_output +2706,4856799,"TERMINAL",0,0,"5",,terminal_output +2707,4857904,"TERMINAL",0,0,"6",,terminal_output +2708,4858917,"TERMINAL",0,0,"7",,terminal_output +2709,4859904,"TERMINAL",0,0,"8",,terminal_output +2710,4860966,"TERMINAL",0,0,"9",,terminal_output +2711,4861991,"TERMINAL",0,0,"21",,terminal_output +2712,4863116,"TERMINAL",0,0,"2",,terminal_output +2713,4864142,"TERMINAL",0,0,"3",,terminal_output +2714,4865087,"TERMINAL",0,0,"4",,terminal_output +2715,4866125,"TERMINAL",0,0,"5",,terminal_output +2716,4867159,"TERMINAL",0,0,"6",,terminal_output +2717,4868192,"TERMINAL",0,0,"71",,terminal_output +2718,4869262,"TERMINAL",0,0,"8",,terminal_output +2719,4870278,"TERMINAL",0,0,"9",,terminal_output +2720,4871307,"TERMINAL",0,0,"30",,terminal_output +2721,4872435,"TERMINAL",0,0,"1",,terminal_output +2722,4873429,"TERMINAL",0,0,"2",,terminal_output +2723,4874485,"TERMINAL",0,0,"3",,terminal_output +2724,4875509,"TERMINAL",0,0,"4",,terminal_output +2725,4876486,"TERMINAL",0,0,"5",,terminal_output +2726,4877554,"TERMINAL",0,0,"6",,terminal_output +2727,4878562,"TERMINAL",0,0,"7",,terminal_output +2728,4879918,"TERMINAL",0,0,"8",,terminal_output +2729,4880766,"TERMINAL",0,0,"9",,terminal_output +2730,4881757,"TERMINAL",0,0,"40",,terminal_output +2731,4882708,"TERMINAL",0,0,"1",,terminal_output +2732,4883801,"TERMINAL",0,0,"2",,terminal_output +2733,4884778,"TERMINAL",0,0,"3",,terminal_output +2734,4885848,"TERMINAL",0,0,"4",,terminal_output +2735,4886848,"TERMINAL",0,0,"5",,terminal_output +2736,4887901,"TERMINAL",0,0,"6",,terminal_output +2737,4888927,"TERMINAL",0,0,"7",,terminal_output +2738,4889972,"TERMINAL",0,0,"9",,terminal_output +2739,4891069,"TERMINAL",0,0,"50",,terminal_output +2740,4892095,"TERMINAL",0,0,"10",,terminal_output +2741,4893120,"TERMINAL",0,0,"2",,terminal_output +2742,4894129,"TERMINAL",0,0,"3",,terminal_output +2743,4895163,"TERMINAL",0,0,"4",,terminal_output +2744,4896293,"TERMINAL",0,0,"5",,terminal_output +2745,4897317,"TERMINAL",0,0,"6",,terminal_output +2746,4898288,"TERMINAL",0,0,"7",,terminal_output +2747,4899367,"TERMINAL",0,0,"8",,terminal_output +2748,4900368,"TERMINAL",0,0,"9",,terminal_output +2749,4901416,"TERMINAL",0,0,"9:00",,terminal_output +2750,4902452,"TERMINAL",0,0,"1",,terminal_output +2751,4903497,"TERMINAL",0,0,"2",,terminal_output +2752,4904591,"TERMINAL",0,0,"3",,terminal_output +2753,4905614,"TERMINAL",0,0,"4",,terminal_output +2754,4906637,"TERMINAL",0,0,"5",,terminal_output +2755,4907661,"TERMINAL",0,0,"6",,terminal_output +2756,4908789,"TERMINAL",0,0,"7",,terminal_output +2757,4909814,"TERMINAL",0,0,"8",,terminal_output +2758,4910837,"TERMINAL",0,0,"9",,terminal_output +2759,4911859,"TERMINAL",0,0,"10",,terminal_output +2760,4912884,"TERMINAL",0,0,"1",,terminal_output +2761,4913909,"TERMINAL",0,0,"2",,terminal_output +2762,4915059,"TERMINAL",0,0,"3",,terminal_output +2763,4915967,"TERMINAL",0,0,"5",,terminal_output +2764,4917003,"TERMINAL",0,0,"6",,terminal_output +2765,4918103,"TERMINAL",0,0,"7",,terminal_output +2766,4919082,"TERMINAL",0,0,"8",,terminal_output +2767,4920120,"TERMINAL",0,0,"9",,terminal_output +2768,4921160,"TERMINAL",0,0,"20",,terminal_output +2769,4922198,"TERMINAL",0,0,"1",,terminal_output +2770,4923243,"TERMINAL",0,0,"2",,terminal_output +2771,4924280,"TERMINAL",0,0,"3",,terminal_output +2772,4925319,"TERMINAL",0,0,"4",,terminal_output +2773,4926399,"TERMINAL",0,0,"5",,terminal_output +2774,4927389,"TERMINAL",0,0,"61",,terminal_output +2775,4928427,"TERMINAL",0,0,"7",,terminal_output +2776,4929465,"TERMINAL",0,0,"8",,terminal_output +2777,4930597,"TERMINAL",0,0,"9",,terminal_output +2778,4931560,"TERMINAL",0,0,"30",,terminal_output +2779,4932572,"TERMINAL",0,0,"1",,terminal_output +2780,4933607,"TERMINAL",0,0,"2",,terminal_output +2781,4934646,"TERMINAL",0,0,"3",,terminal_output +2782,4935718,"TERMINAL",0,0,"4",,terminal_output +2783,4936718,"TERMINAL",0,0,"5",,terminal_output +2784,4937755,"TERMINAL",0,0,"6",,terminal_output +2785,4938889,"TERMINAL",0,0,"7",,terminal_output +2786,4939847,"TERMINAL",0,0,"8",,terminal_output +2787,4940938,"TERMINAL",0,0,"9",,terminal_output +2788,4941964,"TERMINAL",0,0,"40",,terminal_output +2789,4942988,"TERMINAL",0,0,"1",,terminal_output +2790,4944011,"TERMINAL",0,0,"3",,terminal_output +2791,4945046,"TERMINAL",0,0,"4",,terminal_output +2792,4946167,"TERMINAL",0,0,"5",,terminal_output +2793,4947114,"TERMINAL",0,0,"6",,terminal_output +2794,4948150,"TERMINAL",0,0,"7",,terminal_output +2795,4949189,"TERMINAL",0,0,"8",,terminal_output +2796,4950262,"TERMINAL",0,0,"9",,terminal_output +2797,4951263,"TERMINAL",0,0,"50",,terminal_output +2798,4952412,"TERMINAL",0,0,"120",,terminal_output +2799,4953431,"TERMINAL",0,0,"2",,terminal_output +2800,4954451,"TERMINAL",0,0,"3",,terminal_output +2801,4955420,"TERMINAL",0,0,"4",,terminal_output +2802,4956508,"TERMINAL",0,0,"5",,terminal_output +2803,4957531,"TERMINAL",0,0,"6",,terminal_output +2804,4958536,"TERMINAL",0,0,"7",,terminal_output +2805,4959578,"TERMINAL",0,0,"8",,terminal_output +2806,4960618,"TERMINAL",0,0,"9",,terminal_output +2807,4961731,"TERMINAL",0,0,"5:00:00",,terminal_output +2808,4962752,"TERMINAL",0,0,"1",,terminal_output +2809,4963724,"TERMINAL",0,0,"2",,terminal_output +2810,4964778,"TERMINAL",0,0,"3",,terminal_output +2811,4966514,"TERMINAL",0,0,"4",,terminal_output +2812,4967873,"TERMINAL",0,0,"6",,terminal_output +2813,4968860,"TERMINAL",0,0,"7",,terminal_output +2814,4969896,"TERMINAL",0,0,"8",,terminal_output +2815,4970946,"TERMINAL",0,0,"9",,terminal_output +2816,4971974,"TERMINAL",0,0,"11",,terminal_output +2817,4973096,"TERMINAL",0,0,"2",,terminal_output +2818,4974082,"TERMINAL",0,0,"3",,terminal_output +2819,4975089,"TERMINAL",0,0,"4",,terminal_output +2820,4976114,"TERMINAL",0,0,"5",,terminal_output +2821,4977149,"TERMINAL",0,0,"6",,terminal_output +2822,4978191,"TERMINAL",0,0,"7",,terminal_output +2823,4979227,"TERMINAL",0,0,"8",,terminal_output +2824,4980284,"TERMINAL",0,0,"9",,terminal_output +2825,4981297,"TERMINAL",0,0,"20",,terminal_output +2826,4982421,"TERMINAL",0,0,"1",,terminal_output +2827,4983429,"TERMINAL",0,0,"2",,terminal_output +2828,4984461,"TERMINAL",0,0,"3",,terminal_output +2829,4985483,"TERMINAL",0,0,"4",,terminal_output +2830,4986510,"TERMINAL",0,0,"5",,terminal_output +2831,4987538,"TERMINAL",0,0,"61",,terminal_output +2832,4988559,"TERMINAL",0,0,"7",,terminal_output +2833,4989684,"TERMINAL",0,0,"8",,terminal_output +2834,4990687,"TERMINAL",0,0,"9",,terminal_output +2835,4991668,"TERMINAL",0,0,"30",,terminal_output +2836,4992706,"TERMINAL",0,0,"1",,terminal_output +2837,4993778,"TERMINAL",0,0,"2",,terminal_output +2838,4994801,"TERMINAL",0,0,"3",,terminal_output +2839,4995826,"TERMINAL",0,0,"4",,terminal_output +2840,4996852,"TERMINAL",0,0,"5",,terminal_output +2841,4997895,"TERMINAL",0,0,"6",,terminal_output +2842,4998929,"TERMINAL",0,0,"7",,terminal_output +2843,5000037,"TERMINAL",0,0,"9",,terminal_output +2844,5001001,"TERMINAL",0,0,"40",,terminal_output +2845,5002075,"TERMINAL",0,0,"1",,terminal_output +2846,5003113,"TERMINAL",0,0,"2",,terminal_output +2847,5004120,"TERMINAL",0,0,"3",,terminal_output +2848,5005166,"TERMINAL",0,0,"4",,terminal_output +2849,5006198,"TERMINAL",0,0,"5",,terminal_output +2850,5007234,"TERMINAL",0,0,"6",,terminal_output +2851,5008268,"TERMINAL",0,0,"7",,terminal_output +2852,5009344,"TERMINAL",0,0,"8",,terminal_output +2853,5010351,"TERMINAL",0,0,"9",,terminal_output +2854,5011392,"TERMINAL",0,0,"50",,terminal_output +2855,5012520,"TERMINAL",0,0,"1",,terminal_output +2856,5013463,"TERMINAL",0,0,"210",,terminal_output +2857,5014567,"TERMINAL",0,0,"3",,terminal_output +2858,5015594,"TERMINAL",0,0,"4",,terminal_output +2859,5016576,"TERMINAL",0,0,"5",,terminal_output +2860,5017654,"TERMINAL",0,0,"6",,terminal_output +2861,5018665,"TERMINAL",0,0,"7",,terminal_output +2862,5019790,"TERMINAL",0,0,"8",,terminal_output +2863,5020782,"TERMINAL",0,0,"9",,terminal_output +2864,5021839,"TERMINAL",0,0,"1:00",,terminal_output +2865,5022862,"TERMINAL",0,0,"1",,terminal_output +2866,5023887,"TERMINAL",0,0,"2",,terminal_output +2867,5024888,"TERMINAL",0,0,"3",,terminal_output +2868,5025933,"TERMINAL",0,0,"4",,terminal_output +2869,5027059,"TERMINAL",0,0,"6",,terminal_output +2870,5028087,"TERMINAL",0,0,"7",,terminal_output +2871,5029109,"TERMINAL",0,0,"8",,terminal_output +2872,5030120,"TERMINAL",0,0,"9",,terminal_output +2873,5031155,"TERMINAL",0,0,"10",,terminal_output +2874,5032179,"TERMINAL",0,0,"1",,terminal_output +2875,5033213,"TERMINAL",0,0,"2",,terminal_output +2876,5034246,"TERMINAL",0,0,"3",,terminal_output +2877,5035294,"TERMINAL",0,0,"4",,terminal_output +2878,5036323,"TERMINAL",0,0,"5",,terminal_output +2879,5037364,"TERMINAL",0,0,"6",,terminal_output +2880,5038427,"TERMINAL",0,0,"7",,terminal_output +2881,5039450,"TERMINAL",0,0,"8",,terminal_output +2882,5040579,"TERMINAL",0,0,"9",,terminal_output +2883,5041540,"TERMINAL",0,0,"20",,terminal_output +2884,5042625,"TERMINAL",0,0,"1",,terminal_output +2885,5043648,"TERMINAL",0,0,"2",,terminal_output +2886,5044678,"TERMINAL",0,0,"3",,terminal_output +2887,5045683,"TERMINAL",0,0,"4",,terminal_output +2888,5046780,"TERMINAL",0,0,"5",,terminal_output +2889,5047845,"TERMINAL",0,0,"6",,terminal_output +2890,5048872,"TERMINAL",0,0,"71",,terminal_output +2891,5049895,"TERMINAL",0,0,"8",,terminal_output +2892,5050919,"TERMINAL",0,0,"9",,terminal_output +2893,5051942,"TERMINAL",0,0,"30",,terminal_output +2894,5052968,"TERMINAL",0,0,"1",,terminal_output +2895,5054120,"TERMINAL",0,0,"3",,terminal_output +2896,5055132,"TERMINAL",0,0,"4",,terminal_output +2897,5056072,"TERMINAL",0,0,"5",,terminal_output +2898,5057166,"TERMINAL",0,0,"6",,terminal_output +2899,5058190,"TERMINAL",0,0,"7",,terminal_output +2900,5059217,"TERMINAL",0,0,"8",,terminal_output +2901,5060247,"TERMINAL",0,0,"9",,terminal_output +2902,5061267,"TERMINAL",0,0,"40",,terminal_output +2903,5062303,"TERMINAL",0,0,"1",,terminal_output +2904,5063344,"TERMINAL",0,0,"2",,terminal_output +2905,5064382,"TERMINAL",0,0,"3",,terminal_output +2906,5065454,"TERMINAL",0,0,"4",,terminal_output +2907,5066462,"TERMINAL",0,0,"5",,terminal_output +2908,5067495,"TERMINAL",0,0,"6",,terminal_output +2909,5068532,"TERMINAL",0,0,"7",,terminal_output +2910,5069635,"TERMINAL",0,0,"8",,terminal_output +2911,5070607,"TERMINAL",0,0,"9",,terminal_output +2912,5071644,"TERMINAL",0,0,"50",,terminal_output +2913,5072683,"TERMINAL",0,0,"10",,terminal_output +2914,5073718,"TERMINAL",0,0,"2",,terminal_output +2915,5074785,"TERMINAL",0,0,"3",,terminal_output +2916,5075803,"TERMINAL",0,0,"4",,terminal_output +2917,5076832,"TERMINAL",0,0,"5",,terminal_output +2918,5077954,"TERMINAL",0,0,"6",,terminal_output +2919,5078910,"TERMINAL",0,0,"7",,terminal_output +2920,5080001,"TERMINAL",0,0,"8",,terminal_output +2921,5081041,"TERMINAL",0,0,"2:00",,terminal_output +2922,5082051,"TERMINAL",0,0,"1",,terminal_output +2923,5083073,"TERMINAL",0,0,"2",,terminal_output +2924,5084123,"TERMINAL",0,0,"3",,terminal_output +2925,5085145,"TERMINAL",0,0,"4",,terminal_output +2926,5086190,"TERMINAL",0,0,"5",,terminal_output +2927,5087576,"TERMINAL",0,0,"6",,terminal_output +2928,5088609,"TERMINAL",0,0,"7",,terminal_output +2929,5089728,"TERMINAL",0,0,"8",,terminal_output +2930,5090753,"TERMINAL",0,0,"9",,terminal_output +2931,5091782,"TERMINAL",0,0,"10",,terminal_output +2932,5092771,"TERMINAL",0,0,"1",,terminal_output +2933,5093825,"TERMINAL",0,0,"2",,terminal_output +2934,5094952,"TERMINAL",0,0,"3",,terminal_output +2935,5095976,"TERMINAL",0,0,"4",,terminal_output +2936,5096924,"TERMINAL",0,0,"5",,terminal_output +2937,5098023,"TERMINAL",0,0,"6",,terminal_output +2938,5099047,"TERMINAL",0,0,"8",,terminal_output +2939,5100078,"TERMINAL",0,0,"9",,terminal_output +2940,5101095,"TERMINAL",0,0,"20",,terminal_output +2941,5102226,"TERMINAL",0,0,"1",,terminal_output +2942,5103249,"TERMINAL",0,0,"2",,terminal_output +2943,5104205,"TERMINAL",0,0,"3",,terminal_output +2944,5105237,"TERMINAL",0,0,"4",,terminal_output +2945,5106273,"TERMINAL",0,0,"5",,terminal_output +2946,5107312,"TERMINAL",0,0,"61",,terminal_output +2947,5108355,"TERMINAL",0,0,"7",,terminal_output +2948,5109493,"TERMINAL",0,0,"8",,terminal_output +2949,5110515,"TERMINAL",0,0,"9",,terminal_output +2950,5111484,"TERMINAL",0,0,"30",,terminal_output +2951,5112565,"TERMINAL",0,0,"1",,terminal_output +2952,5113554,"TERMINAL",0,0,"2",,terminal_output +2953,5114611,"TERMINAL",0,0,"3",,terminal_output +2954,5115741,"TERMINAL",0,0,"4",,terminal_output +2955,5116761,"TERMINAL",0,0,"5",,terminal_output +2956,5117792,"TERMINAL",0,0,"6",,terminal_output +2957,5118754,"TERMINAL",0,0,"7",,terminal_output +2958,5119793,"TERMINAL",0,0,"8",,terminal_output +2959,5120861,"TERMINAL",0,0,"9",,terminal_output +2960,5121881,"TERMINAL",0,0,"40",,terminal_output +2961,5122908,"TERMINAL",0,0,"1",,terminal_output +2962,5124033,"TERMINAL",0,0,"2",,terminal_output +2963,5124979,"TERMINAL",0,0,"4",,terminal_output +2964,5126087,"TERMINAL",0,0,"5",,terminal_output +2965,5127107,"TERMINAL",0,0,"6",,terminal_output +2966,5128166,"TERMINAL",0,0,"7",,terminal_output +2967,5129183,"TERMINAL",0,0,"81",,terminal_output +2968,5130171,"TERMINAL",0,0,"9",,terminal_output +2969,5131212,"TERMINAL",0,0,"50",,terminal_output +2970,5132250,"TERMINAL",0,0,"1",,terminal_output +2971,5133287,"TERMINAL",0,0,"20",,terminal_output +2972,5134321,"TERMINAL",0,0,"3",,terminal_output +2973,5135361,"TERMINAL",0,0,"4",,terminal_output +2974,5136395,"TERMINAL",0,0,"5",,terminal_output +2975,5137450,"TERMINAL",0,0,"6",,terminal_output +2976,5138484,"TERMINAL",0,0,"7",,terminal_output +2977,5139598,"TERMINAL",0,0,"8",,terminal_output +2978,5140626,"TERMINAL",0,0,"9",,terminal_output +2979,5141650,"TERMINAL",0,0,"3:00",,terminal_output +2980,5142673,"TERMINAL",0,0,"1",,terminal_output +2981,5143672,"TERMINAL",0,0,"2",,terminal_output +2982,5144720,"TERMINAL",0,0,"3",,terminal_output +2983,5145846,"TERMINAL",0,0,"4",,terminal_output +2984,5146785,"TERMINAL",0,0,"5",,terminal_output +2985,5147893,"TERMINAL",0,0,"6",,terminal_output +2986,5148916,"TERMINAL",0,0,"7",,terminal_output +2987,5149942,"TERMINAL",0,0,"8",,terminal_output +2988,5150964,"TERMINAL",0,0,"9",,terminal_output +2989,5151982,"TERMINAL",0,0,"11",,terminal_output +2990,5153116,"TERMINAL",0,0,"2",,terminal_output +2991,5154140,"TERMINAL",0,0,"3",,terminal_output +2992,5155091,"TERMINAL",0,0,"4",,terminal_output +2993,5156186,"TERMINAL",0,0,"5",,terminal_output +2994,5157209,"TERMINAL",0,0,"6",,terminal_output +2995,5158237,"TERMINAL",0,0,"7",,terminal_output +2996,5159260,"TERMINAL",0,0,"8",,terminal_output +2997,5160297,"TERMINAL",0,0,"9",,terminal_output +2998,5161328,"TERMINAL",0,0,"20",,terminal_output +2999,5162365,"TERMINAL",0,0,"1",,terminal_output +3000,5163430,"TERMINAL",0,0,"2",,terminal_output +3001,5164464,"TERMINAL",0,0,"3",,terminal_output +3002,5165504,"TERMINAL",0,0,"4",,terminal_output +3003,5166578,"TERMINAL",0,0,"5",,terminal_output +3004,5167574,"TERMINAL",0,0,"6",,terminal_output +3005,5168622,"TERMINAL",0,0,"71",,terminal_output +3006,5169706,"TERMINAL",0,0,"8",,terminal_output +3007,5170728,"TERMINAL",0,0,"9",,terminal_output +3008,5171751,"TERMINAL",0,0,"30",,terminal_output +3009,5172878,"TERMINAL",0,0,"1",,terminal_output +3010,5173906,"TERMINAL",0,0,"2",,terminal_output +3011,5174929,"TERMINAL",0,0,"3",,terminal_output +3012,5175950,"TERMINAL",0,0,"4",,terminal_output +3013,5176973,"TERMINAL",0,0,"5",,terminal_output +3014,5177999,"TERMINAL",0,0,"7",,terminal_output +3015,5179025,"TERMINAL",0,0,"8",,terminal_output +3016,5180092,"TERMINAL",0,0,"9",,terminal_output +3017,5181281,"TERMINAL",0,0,"40",,terminal_output +3018,5182199,"TERMINAL",0,0,"1",,terminal_output +3019,5183221,"TERMINAL",0,0,"2",,terminal_output +3020,5184245,"TERMINAL",0,0,"3",,terminal_output +3021,5185237,"TERMINAL",0,0,"4",,terminal_output +3022,5186295,"TERMINAL",0,0,"5",,terminal_output +3023,5187309,"TERMINAL",0,0,"6",,terminal_output +3024,5188341,"TERMINAL",0,0,"7",,terminal_output +3025,5189467,"TERMINAL",0,0,"8",,terminal_output +3026,5190490,"TERMINAL",0,0,"9",,terminal_output +3027,5191521,"TERMINAL",0,0,"50",,terminal_output +3028,5192541,"TERMINAL",0,0,"1",,terminal_output +3029,5193547,"TERMINAL",0,0,"20",,terminal_output +3030,5194690,"TERMINAL",0,0,"3",,terminal_output +3031,5195719,"TERMINAL",0,0,"4",,terminal_output +3032,5196663,"TERMINAL",0,0,"5",,terminal_output +3033,5197761,"TERMINAL",0,0,"6",,terminal_output +3034,5198741,"TERMINAL",0,0,"7",,terminal_output +3035,5199811,"TERMINAL",0,0,"8",,terminal_output +3036,5200835,"TERMINAL",0,0,"9",,terminal_output +3037,5201863,"TERMINAL",0,0,"4:00",,terminal_output +3038,5202987,"TERMINAL",0,0,"1",,terminal_output +3039,5204009,"TERMINAL",0,0,"2",,terminal_output +3040,5205076,"TERMINAL",0,0,"4",,terminal_output +3041,5206057,"TERMINAL",0,0,"5",,terminal_output +3042,5207079,"TERMINAL",0,0,"6",,terminal_output +3043,5208821,"TERMINAL",0,0,"70",,terminal_output +3044,5209847,"TERMINAL",0,0,"8",,terminal_output +3045,5210868,"TERMINAL",0,0,"9",,terminal_output +3046,5211952,"TERMINAL",0,0,"10",,terminal_output +3047,5213018,"TERMINAL",0,0,"1",,terminal_output +3048,5214044,"TERMINAL",0,0,"3",,terminal_output +3049,5215067,"TERMINAL",0,0,"4",,terminal_output +3050,5216040,"TERMINAL",0,0,"5",,terminal_output +3051,5217115,"TERMINAL",0,0,"6",,terminal_output +3052,5218139,"TERMINAL",0,0,"7",,terminal_output +3053,5219266,"TERMINAL",0,0,"8",,terminal_output +3054,5220200,"TERMINAL",0,0,"9",,terminal_output +3055,5221232,"TERMINAL",0,0,"20",,terminal_output +3056,5222271,"TERMINAL",0,0,"1",,terminal_output +3057,5223311,"TERMINAL",0,0,"2",,terminal_output +3058,5224351,"TERMINAL",0,0,"3",,terminal_output +3059,5225394,"TERMINAL",0,0,"4",,terminal_output +3060,5226543,"TERMINAL",0,0,"5",,terminal_output +3061,5227466,"TERMINAL",0,0,"6",,terminal_output +3062,5228504,"TERMINAL",0,0,"71",,terminal_output +3063,5229545,"TERMINAL",0,0,"8",,terminal_output +3064,5230581,"TERMINAL",0,0,"9",,terminal_output +3065,5231622,"TERMINAL",0,0,"30",,terminal_output +3066,5232788,"TERMINAL",0,0,"1",,terminal_output +3067,5233811,"TERMINAL",0,0,"2",,terminal_output +3068,5234828,"TERMINAL",0,0,"3",,terminal_output +3069,5235857,"TERMINAL",0,0,"4",,terminal_output +3070,5236829,"TERMINAL",0,0,"5",,terminal_output +3071,5237862,"TERMINAL",0,0,"6",,terminal_output +3072,5238927,"TERMINAL",0,0,"7",,terminal_output +3073,5239936,"TERMINAL",0,0,"8",,terminal_output +3074,5241081,"TERMINAL",0,0,"40",,terminal_output +3075,5242103,"TERMINAL",0,0,"1",,terminal_output +3076,5243047,"TERMINAL",0,0,"2",,terminal_output +3077,5244151,"TERMINAL",0,0,"3",,terminal_output +3078,5245125,"TERMINAL",0,0,"4",,terminal_output +3079,5246200,"TERMINAL",0,0,"5",,terminal_output +3080,5247229,"TERMINAL",0,0,"6",,terminal_output +3081,5248253,"TERMINAL",0,0,"7",,terminal_output +3082,5249282,"TERMINAL",0,0,"8",,terminal_output +3083,5250318,"TERMINAL",0,0,"9",,terminal_output +3084,5251360,"TERMINAL",0,0,"50",,terminal_output +3085,5252404,"TERMINAL",0,0,"10",,terminal_output +3086,5253446,"TERMINAL",0,0,"2",,terminal_output +3087,5254594,"TERMINAL",0,0,"3",,terminal_output +3088,5255530,"TERMINAL",0,0,"4",,terminal_output +3089,5256583,"TERMINAL",0,0,"5",,terminal_output +3090,5257666,"TERMINAL",0,0,"6",,terminal_output +3091,5258639,"TERMINAL",0,0,"7",,terminal_output +3092,5259716,"TERMINAL",0,0,"8",,terminal_output +3093,5260740,"TERMINAL",0,0,"9",,terminal_output +3094,5261770,"TERMINAL",0,0,"5:00",,terminal_output +3095,5262888,"TERMINAL",0,0,"1",,terminal_output +3096,5263918,"TERMINAL",0,0,"2",,terminal_output +3097,5264861,"TERMINAL",0,0,"3",,terminal_output +3098,5265954,"TERMINAL",0,0,"4",,terminal_output +3099,5266986,"TERMINAL",0,0,"5",,terminal_output +3100,5268012,"TERMINAL",0,0,"7",,terminal_output +3101,5269036,"TERMINAL",0,0,"8",,terminal_output +3102,5270090,"TERMINAL",0,0,"9",,terminal_output +3103,5271096,"TERMINAL",0,0,"10",,terminal_output +3104,5272232,"TERMINAL",0,0,"1",,terminal_output +3105,5273233,"TERMINAL",0,0,"2",,terminal_output +3106,5274255,"TERMINAL",0,0,"3",,terminal_output +3107,5275249,"TERMINAL",0,0,"4",,terminal_output +3108,5276304,"TERMINAL",0,0,"5",,terminal_output +3109,5277332,"TERMINAL",0,0,"6",,terminal_output +3110,5278373,"TERMINAL",0,0,"7",,terminal_output +3111,5279412,"TERMINAL",0,0,"8",,terminal_output +3112,5280501,"TERMINAL",0,0,"9",,terminal_output +3113,5281527,"TERMINAL",0,0,"20",,terminal_output +3114,5282551,"TERMINAL",0,0,"1",,terminal_output +3115,5283568,"TERMINAL",0,0,"2",,terminal_output +3116,5284701,"TERMINAL",0,0,"3",,terminal_output +3117,5285725,"TERMINAL",0,0,"4",,terminal_output +3118,5286749,"TERMINAL",0,0,"5",,terminal_output +3119,5287735,"TERMINAL",0,0,"61",,terminal_output +3120,5288799,"TERMINAL",0,0,"7",,terminal_output +3121,5289807,"TERMINAL",0,0,"8",,terminal_output +3122,5290948,"TERMINAL",0,0,"9",,terminal_output +3123,5291983,"TERMINAL",0,0,"30",,terminal_output +3124,5293001,"TERMINAL",0,0,"1",,terminal_output +3125,5294022,"TERMINAL",0,0,"2",,terminal_output +3126,5295076,"TERMINAL",0,0,"4",,terminal_output +3127,5296046,"TERMINAL",0,0,"5",,terminal_output +3128,5297192,"TERMINAL",0,0,"6",,terminal_output +3129,5298245,"TERMINAL",0,0,"722",,terminal_output +3130,5299241,"TERMINAL",0,0,"8",,terminal_output +3131,5300309,"TERMINAL",0,0,"90",,terminal_output +3132,5301322,"TERMINAL",0,0,"40",,terminal_output +3133,5302367,"TERMINAL",0,0,"1",,terminal_output +3134,5303408,"TERMINAL",0,0,"2",,terminal_output +3135,5304568,"TERMINAL",0,0,"3",,terminal_output +3136,5305492,"TERMINAL",0,0,"4",,terminal_output +3137,5306529,"TERMINAL",0,0,"5",,terminal_output +3138,5307571,"TERMINAL",0,0,"6",,terminal_output +3139,5308666,"TERMINAL",0,0,"7",,terminal_output +3140,5309652,"TERMINAL",0,0,"8",,terminal_output +3141,5310711,"TERMINAL",0,0,"9",,terminal_output +3142,5311836,"TERMINAL",0,0,"50",,terminal_output +3143,5312860,"TERMINAL",0,0,"11",,terminal_output +3144,5313884,"TERMINAL",0,0,"20",,terminal_output +3145,5314852,"TERMINAL",0,0,"3",,terminal_output +3146,5315934,"TERMINAL",0,0,"4",,terminal_output +3147,5316957,"TERMINAL",0,0,"5",,terminal_output +3148,5317980,"TERMINAL",0,0,"7",,terminal_output +3149,5319118,"TERMINAL",0,0,"838",,terminal_output +3150,5320094,"TERMINAL",0,0,"9",,terminal_output +3151,5321158,"TERMINAL",0,0,"6:00",,terminal_output +3152,5322178,"TERMINAL",0,0,"186",,terminal_output +3153,5323173,"TERMINAL",0,0,"2",,terminal_output +3154,5324256,"TERMINAL",0,0,"3",,terminal_output +3155,5325252,"TERMINAL",0,0,"4",,terminal_output +3156,5326294,"TERMINAL",0,0,"5",,terminal_output +3157,5327337,"TERMINAL",0,0,"6",,terminal_output +3158,5328375,"TERMINAL",0,0,"70",,terminal_output +3159,5329962,"TERMINAL",0,0,"870",,terminal_output +3160,5330985,"TERMINAL",0,0,"9",,terminal_output +3161,5331965,"TERMINAL",0,0,"10",,terminal_output +3162,5332985,"TERMINAL",0,0,"2",,terminal_output +3163,5334057,"TERMINAL",0,0,"3",,terminal_output +3164,5335083,"TERMINAL",0,0,"4",,terminal_output +3165,5336115,"TERMINAL",0,0,"586",,terminal_output +3166,5337157,"TERMINAL",0,0,"6",,terminal_output +3167,5338258,"TERMINAL",0,0,"7",,terminal_output +3168,5339282,"TERMINAL",0,0,"8",,terminal_output +3169,5340306,"TERMINAL",0,0,"9",,terminal_output +3170,5341331,"TERMINAL",0,0,"20",,terminal_output +3171,5342359,"TERMINAL",0,0,"1",,terminal_output +3172,5343401,"TERMINAL",0,0,"2",,terminal_output +3173,5344443,"TERMINAL",0,0,"3",,terminal_output +3174,5345478,"TERMINAL",0,0,"4",,terminal_output +3175,5346518,"TERMINAL",0,0,"5",,terminal_output +3176,5347574,"TERMINAL",0,0,"6",,terminal_output +3177,5348598,"TERMINAL",0,0,"71",,terminal_output +3178,5349724,"TERMINAL",0,0,"8",,terminal_output +3179,5350748,"TERMINAL",0,0,"9",,terminal_output +3180,5351774,"TERMINAL",0,0,"30",,terminal_output +3181,5352804,"TERMINAL",0,0,"1",,terminal_output +3182,5353823,"TERMINAL",0,0,"2",,terminal_output +3183,5354948,"TERMINAL",0,0,"3",,terminal_output +3184,5355973,"TERMINAL",0,0,"41",,terminal_output +3185,5356996,"TERMINAL",0,0,"5",,terminal_output +3186,5358023,"TERMINAL",0,0,"7",,terminal_output +3187,5359050,"TERMINAL",0,0,"8",,terminal_output +3188,5360066,"TERMINAL",0,0,"9",,terminal_output +3189,5361111,"TERMINAL",0,0,"40",,terminal_output +3190,5362216,"TERMINAL",0,0,"1",,terminal_output +3191,5363266,"TERMINAL",0,0,"2",,terminal_output +3192,5364198,"TERMINAL",0,0,"3",,terminal_output +3193,5365296,"TERMINAL",0,0,"4",,terminal_output +3194,5366270,"TERMINAL",0,0,"5",,terminal_output +3195,5367315,"TERMINAL",0,0,"6",,terminal_output +3196,5368368,"TERMINAL",0,0,"7",,terminal_output +3197,5369388,"TERMINAL",0,0,"8",,terminal_output +3198,5370428,"TERMINAL",0,0,"9",,terminal_output +3199,5371482,"TERMINAL",0,0,"50",,terminal_output +3200,5372502,"TERMINAL",0,0,"1",,terminal_output +3201,5373538,"TERMINAL",0,0,"20",,terminal_output +3202,5374609,"TERMINAL",0,0,"3",,terminal_output +3203,5375630,"TERMINAL",0,0,"4",,terminal_output +3204,5376757,"TERMINAL",0,0,"5",,terminal_output +3205,5377782,"TERMINAL",0,0,"6",,terminal_output +3206,5378805,"TERMINAL",0,0,"7",,terminal_output +3207,5379828,"TERMINAL",0,0,"8",,terminal_output +3208,5380855,"TERMINAL",0,0,"9",,terminal_output +3209,5381842,"TERMINAL",0,0,"7:00",,terminal_output +3210,5382878,"TERMINAL",0,0,"1",,terminal_output +3211,5384030,"TERMINAL",0,0,"2",,terminal_output +3212,5385080,"TERMINAL",0,0,"4",,terminal_output +3213,5386078,"TERMINAL",0,0,"5",,terminal_output +3214,5387103,"TERMINAL",0,0,"6",,terminal_output +3215,5388126,"TERMINAL",0,0,"7",,terminal_output +3216,5389152,"TERMINAL",0,0,"8",,terminal_output +3217,5390169,"TERMINAL",0,0,"9",,terminal_output +3218,5391299,"TERMINAL",0,0,"10",,terminal_output +3219,5392236,"TERMINAL",0,0,"1",,terminal_output +3220,5393351,"TERMINAL",0,0,"2",,terminal_output +3221,5394314,"TERMINAL",0,0,"3",,terminal_output +3222,5395351,"TERMINAL",0,0,"4",,terminal_output +3223,5396411,"TERMINAL",0,0,"5",,terminal_output +3224,5397423,"TERMINAL",0,0,"6",,terminal_output +3225,5398455,"TERMINAL",0,0,"7",,terminal_output +3226,5399490,"TERMINAL",0,0,"8",,terminal_output +3227,5400621,"TERMINAL",0,0,"9",,terminal_output +3228,5401584,"TERMINAL",0,0,"20",,terminal_output +3229,5402619,"TERMINAL",0,0,"1",,terminal_output +3230,5403656,"TERMINAL",0,0,"2",,terminal_output +3231,5404715,"TERMINAL",0,0,"3",,terminal_output +3232,5405740,"TERMINAL",0,0,"4",,terminal_output +3233,5406787,"TERMINAL",0,0,"5",,terminal_output +3234,5407807,"TERMINAL",0,0,"61",,terminal_output +3235,5408916,"TERMINAL",0,0,"7",,terminal_output +3236,5409936,"TERMINAL",0,0,"8",,terminal_output +3237,5410962,"TERMINAL",0,0,"9",,terminal_output +3238,5411984,"TERMINAL",0,0,"30",,terminal_output +3239,5413009,"TERMINAL",0,0,"2",,terminal_output +3240,5414139,"TERMINAL",0,0,"3",,terminal_output +3241,5415092,"TERMINAL",0,0,"4",,terminal_output +3242,5416183,"TERMINAL",0,0,"5",,terminal_output +3243,5417207,"TERMINAL",0,0,"6",,terminal_output +3244,5418230,"TERMINAL",0,0,"7",,terminal_output +3245,5419283,"TERMINAL",0,0,"8",,terminal_output +3246,5420303,"TERMINAL",0,0,"9",,terminal_output +3247,5421304,"TERMINAL",0,0,"40",,terminal_output +3248,5422334,"TERMINAL",0,0,"1",,terminal_output +3249,5423372,"TERMINAL",0,0,"2",,terminal_output +3250,5424406,"TERMINAL",0,0,"3",,terminal_output +3251,5425500,"TERMINAL",0,0,"4",,terminal_output +3252,5426478,"TERMINAL",0,0,"5",,terminal_output +3253,5427512,"TERMINAL",0,0,"6",,terminal_output +3254,5428545,"TERMINAL",0,0,"7",,terminal_output +3255,5429631,"TERMINAL",0,0,"8",,terminal_output +3256,5430627,"TERMINAL",0,0,"9",,terminal_output +3257,5431743,"TERMINAL",0,0,"50",,terminal_output +3258,5432771,"TERMINAL",0,0,"10",,terminal_output +3259,5433735,"TERMINAL",0,0,"2",,terminal_output +3260,5434818,"TERMINAL",0,0,"3",,terminal_output +3261,5435845,"TERMINAL",0,0,"4",,terminal_output +3262,5436852,"TERMINAL",0,0,"5",,terminal_output +3263,5438004,"TERMINAL",0,0,"6",,terminal_output +3264,5439019,"TERMINAL",0,0,"7",,terminal_output +3265,5440072,"TERMINAL",0,0,"9",,terminal_output +3266,5441077,"TERMINAL",0,0,"8:00",,terminal_output +3267,5442092,"TERMINAL",0,0,"1",,terminal_output +3268,5443115,"TERMINAL",0,0,"2",,terminal_output +3269,5444144,"TERMINAL",0,0,"3",,terminal_output +3270,5445149,"TERMINAL",0,0,"4",,terminal_output +3271,5446187,"TERMINAL",0,0,"5",,terminal_output +3272,5447225,"TERMINAL",0,0,"6",,terminal_output +3273,5448260,"TERMINAL",0,0,"7",,terminal_output +3274,5449360,"TERMINAL",0,0,"8",,terminal_output +3275,5451332,"TERMINAL",0,0,"9 ",,terminal_output +3276,5452338,"TERMINAL",0,0,"11",,terminal_output +3277,5453374,"TERMINAL",0,0,"21",,terminal_output +3278,5454408,"TERMINAL",0,0,"3",,terminal_output +3279,5455506,"TERMINAL",0,0,"4",,terminal_output +3280,5456477,"TERMINAL",0,0,"5",,terminal_output +3281,5457518,"TERMINAL",0,0,"6",,terminal_output +3282,5458551,"TERMINAL",0,0,"7",,terminal_output +3283,5459600,"TERMINAL",0,0,"8",,terminal_output +3284,5460625,"TERMINAL",0,0,"9",,terminal_output +3285,5461705,"TERMINAL",0,0,"20",,terminal_output +3286,5462776,"TERMINAL",0,0,"1",,terminal_output +3287,5463719,"TERMINAL",0,0,"2",,terminal_output +3288,5464825,"TERMINAL",0,0,"3",,terminal_output +3289,5465848,"TERMINAL",0,0,"4",,terminal_output +3290,5466822,"TERMINAL",0,0,"51",,terminal_output +3291,5467859,"TERMINAL",0,0,"6",,terminal_output +3292,5468918,"TERMINAL",0,0,"7",,terminal_output +3293,5469942,"TERMINAL",0,0,"8",,terminal_output +3294,5471068,"TERMINAL",0,0,"30",,terminal_output +3295,5472092,"TERMINAL",0,0,"1",,terminal_output +3296,5473033,"TERMINAL",0,0,"2",,terminal_output +3297,5474088,"TERMINAL",0,0,"3",,terminal_output +3298,5475104,"TERMINAL",0,0,"4",,terminal_output +3299,5476188,"TERMINAL",0,0,"5",,terminal_output +3300,5477212,"TERMINAL",0,0,"6",,terminal_output +3301,5478236,"TERMINAL",0,0,"7",,terminal_output +3302,5479363,"TERMINAL",0,0,"822",,terminal_output +3303,5480328,"TERMINAL",0,0,"954",,terminal_output +3304,5481419,"TERMINAL",0,0,"40",,terminal_output +3305,5482474,"TERMINAL",0,0,"1",,terminal_output +3306,5483414,"TERMINAL",0,0,"2",,terminal_output +3307,5484461,"TERMINAL",0,0,"3",,terminal_output +3308,5485506,"TERMINAL",0,0,"4",,terminal_output +3309,5486526,"TERMINAL",0,0,"5",,terminal_output +3310,5487564,"TERMINAL",0,0,"6",,terminal_output +3311,5488606,"TERMINAL",0,0,"7",,terminal_output +3312,5489646,"TERMINAL",0,0,"8",,terminal_output +3313,5490687,"TERMINAL",0,0,"9",,terminal_output +3314,5491752,"TERMINAL",0,0,"50",,terminal_output +3315,5492781,"TERMINAL",0,0,"10",,terminal_output +3316,5493906,"TERMINAL",0,0,"2",,terminal_output +3317,5494929,"TERMINAL",0,0,"3",,terminal_output +3318,5495952,"TERMINAL",0,0,"4",,terminal_output +3319,5496978,"TERMINAL",0,0,"5",,terminal_output +3320,5498001,"TERMINAL",0,0,"6",,terminal_output +3321,5499023,"TERMINAL",0,0,"8",,terminal_output +3322,5500075,"TERMINAL",0,0,"9",,terminal_output +3323,5501074,"TERMINAL",0,0,"9:00",,terminal_output +3324,5502202,"TERMINAL",0,0,"1",,terminal_output +3325,5503141,"TERMINAL",0,0,"2",,terminal_output +3326,5504247,"TERMINAL",0,0,"3",,terminal_output +3327,5505220,"TERMINAL",0,0,"4",,terminal_output +3328,5506255,"TERMINAL",0,0,"5",,terminal_output +3329,5507319,"TERMINAL",0,0,"6",,terminal_output +3330,5508346,"TERMINAL",0,0,"7",,terminal_output +3331,5509372,"TERMINAL",0,0,"8",,terminal_output +3332,5510412,"TERMINAL",0,0,"9",,terminal_output +3333,5511488,"TERMINAL",0,0,"10",,terminal_output +3334,5512487,"TERMINAL",0,0,"1",,terminal_output +3335,5513534,"TERMINAL",0,0,"2",,terminal_output +3336,5514587,"TERMINAL",0,0,"3",,terminal_output +3337,5515616,"TERMINAL",0,0,"4",,terminal_output +3338,5516633,"TERMINAL",0,0,"5",,terminal_output +3339,5517770,"TERMINAL",0,0,"6",,terminal_output +3340,5518789,"TERMINAL",0,0,"7",,terminal_output +3341,5519813,"TERMINAL",0,0,"8",,terminal_output +3342,5520837,"TERMINAL",0,0,"9",,terminal_output +3343,5521858,"TERMINAL",0,0,"20",,terminal_output +3344,5522884,"TERMINAL",0,0,"1",,terminal_output +3345,5524009,"TERMINAL",0,0,"2",,terminal_output +3346,5525073,"TERMINAL",0,0,"3",,terminal_output +3347,5525976,"TERMINAL",0,0,"5",,terminal_output +3348,5527081,"TERMINAL",0,0,"61",,terminal_output +3349,5528112,"TERMINAL",0,0,"7",,terminal_output +3350,5529132,"TERMINAL",0,0,"8",,terminal_output +3351,5530128,"TERMINAL",0,0,"9",,terminal_output +3352,5531185,"TERMINAL",0,0,"30",,terminal_output +3353,5532329,"TERMINAL",0,0,"1",,terminal_output +3354,5533328,"TERMINAL",0,0,"2",,terminal_output +3355,5534352,"TERMINAL",0,0,"3",,terminal_output +3356,5535336,"TERMINAL",0,0,"4",,terminal_output +3357,5536402,"TERMINAL",0,0,"5",,terminal_output +3358,5537426,"TERMINAL",0,0,"6",,terminal_output +3359,5538427,"TERMINAL",0,0,"7",,terminal_output +3360,5539464,"TERMINAL",0,0,"8",,terminal_output +3361,5540603,"TERMINAL",0,0,"9",,terminal_output +3362,5541549,"TERMINAL",0,0,"40",,terminal_output +3363,5542586,"TERMINAL",0,0,"1",,terminal_output +3364,5543622,"TERMINAL",0,0,"2",,terminal_output +3365,5544694,"TERMINAL",0,0,"3",,terminal_output +3366,5545720,"TERMINAL",0,0,"4",,terminal_output +3367,5546744,"TERMINAL",0,0,"5",,terminal_output +3368,5547797,"TERMINAL",0,0,"6",,terminal_output +3369,5548810,"TERMINAL",0,0,"7",,terminal_output +3370,5549918,"TERMINAL",0,0,"8",,terminal_output +3371,5550943,"TERMINAL",0,0,"9",,terminal_output +3372,5551976,"TERMINAL",0,0,"50",,terminal_output +3373,5552992,"TERMINAL",0,0,"2 90",,terminal_output +3374,5554004,"TERMINAL",0,0,"3",,terminal_output +3375,5555090,"TERMINAL",0,0,"4",,terminal_output +3376,5556074,"TERMINAL",0,0,"5",,terminal_output +3377,5557188,"TERMINAL",0,0,"6",,terminal_output +3378,5558350,"TERMINAL",0,0,"7",,terminal_output +3379,5559237,"TERMINAL",0,0,"8",,terminal_output +3380,5560215,"TERMINAL",0,0,"9",,terminal_output +3381,5561252,"TERMINAL",0,0,"10:00",,terminal_output +3382,5562336,"TERMINAL",0,0,"1",,terminal_output +3383,5563365,"TERMINAL",0,0,"2",,terminal_output +3384,5564360,"TERMINAL",0,0,"3",,terminal_output +3385,5565393,"TERMINAL",0,0,"4",,terminal_output +3386,5566428,"TERMINAL",0,0,"5",,terminal_output +3387,5567466,"TERMINAL",0,0,"6",,terminal_output +3388,5568507,"TERMINAL",0,0,"7",,terminal_output +3389,5569543,"TERMINAL",0,0,"8",,terminal_output +3390,5570609,"TERMINAL",0,0,"9",,terminal_output +3391,5572636,"TERMINAL",0,0,"10 60",,terminal_output +3392,5573675,"TERMINAL",0,0,"2",,terminal_output +3393,5574801,"TERMINAL",0,0,"3",,terminal_output +3394,5575826,"TERMINAL",0,0,"4",,terminal_output +3395,5576772,"TERMINAL",0,0,"5",,terminal_output +3396,5577851,"TERMINAL",0,0,"6",,terminal_output +3397,5578897,"TERMINAL",0,0,"7",,terminal_output +3398,5579920,"TERMINAL",0,0,"810",,terminal_output +3399,5580947,"TERMINAL",0,0,"9",,terminal_output +3400,5581970,"TERMINAL",0,0,"20",,terminal_output +3401,5583097,"TERMINAL",0,0,"2",,terminal_output +3402,5584123,"TERMINAL",0,0,"3",,terminal_output +3403,5585092,"TERMINAL",0,0,"4",,terminal_output +3404,5586166,"TERMINAL",0,0,"5",,terminal_output +3405,5587194,"TERMINAL",0,0,"6",,terminal_output +3406,5588216,"TERMINAL",0,0,"7 9",,terminal_output +3407,5589239,"TERMINAL",0,0,"8",,terminal_output +3408,5590270,"TERMINAL",0,0,"9",,terminal_output +3409,5591288,"TERMINAL",0,0,"30",,terminal_output +3410,5592416,"TERMINAL",0,0,"1",,terminal_output +3411,5593349,"TERMINAL",0,0,"2",,terminal_output +3412,5594464,"TERMINAL",0,0,"3",,terminal_output +3413,5595423,"TERMINAL",0,0,"4",,terminal_output +3414,5596459,"TERMINAL",0,0,"5",,terminal_output +3415,5597494,"TERMINAL",0,0,"6",,terminal_output +3416,5598532,"TERMINAL",0,0,"7",,terminal_output +3417,5599585,"TERMINAL",0,0,"8",,terminal_output +3418,5600813,"TERMINAL",0,0,"924",,terminal_output +3419,5601835,"TERMINAL",0,0,"40",,terminal_output +3420,5602861,"TERMINAL",0,0,"1",,terminal_output +3421,5603986,"TERMINAL",0,0,"2",,terminal_output +3422,5605011,"TERMINAL",0,0,"3",,terminal_output +3423,5606037,"TERMINAL",0,0,"4",,terminal_output +3424,5607060,"TERMINAL",0,0,"62",,terminal_output +3425,5608022,"TERMINAL",0,0,"74",,terminal_output +3426,5609106,"TERMINAL",0,0,"8",,terminal_output +3427,5610090,"TERMINAL",0,0,"9",,terminal_output +3428,5611126,"TERMINAL",0,0,"50",,terminal_output +3429,5612177,"TERMINAL",0,0,"15",,terminal_output +3430,5613305,"TERMINAL",0,0,"2",,terminal_output +3431,5614352,"TERMINAL",0,0,"36",,terminal_output +3432,5615400,"TERMINAL",0,0,"4",,terminal_output +3433,5616435,"TERMINAL",0,0,"510",,terminal_output +3434,5617471,"TERMINAL",0,0,"6",,terminal_output +3435,5618505,"TERMINAL",0,0,"7",,terminal_output +3436,5619543,"TERMINAL",0,0,"8",,terminal_output +3437,5620679,"TERMINAL",0,0,"9",,terminal_output +3438,5621700,"TERMINAL",0,0,"1:002",,terminal_output +3439,5622723,"TERMINAL",0,0,"1",,terminal_output +3440,5623681,"TERMINAL",0,0,"2",,terminal_output +3441,5624772,"TERMINAL",0,0,"3",,terminal_output +3442,5625795,"TERMINAL",0,0,"4",,terminal_output +3443,5626793,"TERMINAL",0,0,"5",,terminal_output +3444,5627844,"TERMINAL",0,0,"6",,terminal_output +3445,5628972,"TERMINAL",0,0,"71",,terminal_output +3446,5629996,"TERMINAL",0,0,"8",,terminal_output +3447,5630936,"TERMINAL",0,0,"9",,terminal_output +3448,5631971,"TERMINAL",0,0,"11",,terminal_output +3449,5633009,"TERMINAL",0,0,"2",,terminal_output +3450,5634097,"TERMINAL",0,0,"31",,terminal_output +3451,5635090,"TERMINAL",0,0,"4",,terminal_output +3452,5636114,"TERMINAL",0,0,"57",,terminal_output +3453,5637151,"TERMINAL",0,0,"6",,terminal_output +3454,5638288,"TERMINAL",0,0,"73",,terminal_output +3455,5639315,"TERMINAL",0,0,"8",,terminal_output +3456,5640368,"TERMINAL",0,0,"9",,terminal_output +3457,5641389,"TERMINAL",0,0,"20",,terminal_output +3458,5642386,"TERMINAL",0,0,"1",,terminal_output +3459,5643412,"TERMINAL",0,0,"2",,terminal_output +3460,5644437,"TERMINAL",0,0,"3",,terminal_output +3461,5645442,"TERMINAL",0,0,"4",,terminal_output +3462,5646482,"TERMINAL",0,0,"5",,terminal_output +3463,5647517,"TERMINAL",0,0,"6",,terminal_output +3464,5648551,"TERMINAL",0,0,"7",,terminal_output +3465,5649587,"TERMINAL",0,0,"8",,terminal_output +3466,5650681,"TERMINAL",0,0,"9",,terminal_output +3467,5651706,"TERMINAL",0,0,"30",,terminal_output +3468,5652734,"TERMINAL",0,0,"1",,terminal_output +3469,5653754,"TERMINAL",0,0,"2",,terminal_output +3470,5654775,"TERMINAL",0,0,"3",,terminal_output +3471,5655903,"TERMINAL",0,0,"4",,terminal_output +3472,5656837,"TERMINAL",0,0,"5",,terminal_output +3473,5657950,"TERMINAL",0,0,"6",,terminal_output +3474,5658909,"TERMINAL",0,0,"7",,terminal_output +3475,5659998,"TERMINAL",0,0,"8",,terminal_output +3476,5661023,"TERMINAL",0,0,"40",,terminal_output +3477,5662046,"TERMINAL",0,0,"1",,terminal_output +3478,5663069,"TERMINAL",0,0,"2",,terminal_output +3479,5664094,"TERMINAL",0,0,"3",,terminal_output +3480,5665131,"TERMINAL",0,0,"4",,terminal_output +3481,5666169,"TERMINAL",0,0,"5",,terminal_output +3482,5667270,"TERMINAL",0,0,"6",,terminal_output +3483,5668291,"TERMINAL",0,0,"7",,terminal_output +3484,5669317,"TERMINAL",0,0,"843",,terminal_output +3485,5670339,"TERMINAL",0,0,"9",,terminal_output +3486,5671388,"TERMINAL",0,0,"50",,terminal_output +3487,5672492,"TERMINAL",0,0,"1",,terminal_output +3488,5673426,"TERMINAL",0,0,"20",,terminal_output +3489,5674476,"TERMINAL",0,0,"3",,terminal_output +3490,5675500,"TERMINAL",0,0,"4",,terminal_output +3491,5676536,"TERMINAL",0,0,"5",,terminal_output +3492,5677574,"TERMINAL",0,0,"6",,terminal_output +3493,5678611,"TERMINAL",0,0,"7",,terminal_output +3494,5679658,"TERMINAL",0,0,"8",,terminal_output +3495,5680786,"TERMINAL",0,0,"9",,terminal_output +3496,5681811,"TERMINAL",0,0,"2:00",,terminal_output +3497,5682833,"TERMINAL",0,0,"1",,terminal_output +3498,5683857,"TERMINAL",0,0,"2",,terminal_output +3499,5684880,"TERMINAL",0,0,"3",,terminal_output +3500,5685908,"TERMINAL",0,0,"4",,terminal_output +3501,5686905,"TERMINAL",0,0,"5",,terminal_output +3502,5687943,"TERMINAL",0,0,"6",,terminal_output +3503,5688980,"TERMINAL",0,0,"8",,terminal_output +3504,5690092,"TERMINAL",0,0,"9",,terminal_output +3505,5691129,"TERMINAL",0,0,"10",,terminal_output +3506,5692154,"TERMINAL",0,0,"1",,terminal_output +3507,5693789,"TERMINAL",0,0,"2 60",,terminal_output +3508,5694920,"TERMINAL",0,0,"3",,terminal_output +3509,5695941,"TERMINAL",0,0,"4",,terminal_output +3510,5696901,"TERMINAL",0,0,"5",,terminal_output +3511,5697933,"TERMINAL",0,0,"6",,terminal_output +3512,5699012,"TERMINAL",0,0,"8",,terminal_output +3513,5700077,"TERMINAL",0,0,"9",,terminal_output +3514,5701063,"TERMINAL",0,0,"20",,terminal_output +3515,5702187,"TERMINAL",0,0,"1",,terminal_output +3516,5703211,"TERMINAL",0,0,"2",,terminal_output +3517,5704151,"TERMINAL",0,0,"3",,terminal_output +3518,5705189,"TERMINAL",0,0,"4",,terminal_output +3519,5706282,"TERMINAL",0,0,"5",,terminal_output +3520,5707308,"TERMINAL",0,0,"6",,terminal_output +3521,5708331,"TERMINAL",0,0,"7",,terminal_output +3522,5709356,"TERMINAL",0,0,"8",,terminal_output +3523,5710405,"TERMINAL",0,0,"9",,terminal_output +3524,5711508,"TERMINAL",0,0,"30",,terminal_output +3525,5712442,"TERMINAL",0,0,"1",,terminal_output +3526,5713475,"TERMINAL",0,0,"2",,terminal_output +3527,5714511,"TERMINAL",0,0,"3",,terminal_output +3528,5715545,"TERMINAL",0,0,"4",,terminal_output +3529,5716585,"TERMINAL",0,0,"5",,terminal_output +3530,5717651,"TERMINAL",0,0,"6",,terminal_output +3531,5718657,"TERMINAL",0,0,"7",,terminal_output +3532,5719699,"TERMINAL",0,0,"8",,terminal_output +3533,5720823,"TERMINAL",0,0,"9",,terminal_output +3534,5721848,"TERMINAL",0,0,"4022",,terminal_output +3535,5722873,"TERMINAL",0,0,"1",,terminal_output +3536,5723837,"TERMINAL",0,0,"2",,terminal_output +3537,5724874,"TERMINAL",0,0,"3",,terminal_output +3538,5725947,"TERMINAL",0,0,"4",,terminal_output +3539,5726968,"TERMINAL",0,0,"5",,terminal_output +3540,5727979,"TERMINAL",0,0,"7",,terminal_output +3541,5729128,"TERMINAL",0,0,"8",,terminal_output +3542,5730090,"TERMINAL",0,0,"9",,terminal_output +3543,5731108,"TERMINAL",0,0,"50",,terminal_output +3544,5732197,"TERMINAL",0,0,"1",,terminal_output +3545,5733219,"TERMINAL",0,0,"25",,terminal_output +3546,5734238,"TERMINAL",0,0,"36",,terminal_output +3547,5735264,"TERMINAL",0,0,"47",,terminal_output +3548,5736289,"TERMINAL",0,0,"5",,terminal_output +3549,5737415,"TERMINAL",0,0,"68",,terminal_output +3550,5738431,"TERMINAL",0,0,"7",,terminal_output +3551,5739465,"TERMINAL",0,0,"8",,terminal_output +3552,5740423,"TERMINAL",0,0,"9",,terminal_output +3553,5741514,"TERMINAL",0,0,"3:00",,terminal_output +3554,5742499,"TERMINAL",0,0,"1",,terminal_output +3555,5743543,"TERMINAL",0,0,"29",,terminal_output +3556,5744572,"TERMINAL",0,0,"3",,terminal_output +3557,5745609,"TERMINAL",0,0,"4",,terminal_output +3558,5746644,"TERMINAL",0,0,"53",,terminal_output +3559,5747754,"TERMINAL",0,0,"6",,terminal_output +3560,5748716,"TERMINAL",0,0,"7",,terminal_output +3561,5749803,"TERMINAL",0,0,"8",,terminal_output +3562,5750827,"TERMINAL",0,0,"9",,terminal_output +3563,5751852,"TERMINAL",0,0,"10",,terminal_output +3564,5752862,"TERMINAL",0,0,"143",,terminal_output +3565,5753899,"TERMINAL",0,0,"2",,terminal_output +3566,5755026,"TERMINAL",0,0,"3",,terminal_output +3567,5755976,"TERMINAL",0,0,"5",,terminal_output +3568,5757015,"TERMINAL",0,0,"6",,terminal_output +3569,5758098,"TERMINAL",0,0,"7",,terminal_output +3570,5759125,"TERMINAL",0,0,"8",,terminal_output +3571,5760123,"TERMINAL",0,0,"9",,terminal_output +3572,5761173,"TERMINAL",0,0,"20",,terminal_output +3573,5762298,"TERMINAL",0,0,"1",,terminal_output +3574,5763267,"TERMINAL",0,0,"2",,terminal_output +3575,5764302,"TERMINAL",0,0,"3",,terminal_output +3576,5765368,"TERMINAL",0,0,"4",,terminal_output +3577,5766399,"TERMINAL",0,0,"5",,terminal_output +3578,5767440,"TERMINAL",0,0,"6",,terminal_output +3579,5768432,"TERMINAL",0,0,"7",,terminal_output +3580,5769465,"TERMINAL",0,0,"8",,terminal_output +3581,5770486,"TERMINAL",0,0,"9",,terminal_output +3582,5771525,"TERMINAL",0,0,"30",,terminal_output +3583,5772563,"TERMINAL",0,0,"1",,terminal_output +3584,5773596,"TERMINAL",0,0,"2",,terminal_output +3585,5774688,"TERMINAL",0,0,"3",,terminal_output +3586,5775711,"TERMINAL",0,0,"4",,terminal_output +3587,5776736,"TERMINAL",0,0,"5",,terminal_output +3588,5777758,"TERMINAL",0,0,"6",,terminal_output +3589,5778783,"TERMINAL",0,0,"7",,terminal_output +3590,5779909,"TERMINAL",0,0,"8",,terminal_output +3591,5780932,"TERMINAL",0,0,"9",,terminal_output +3592,5781959,"TERMINAL",0,0,"40",,terminal_output +3593,5782981,"TERMINAL",0,0,"1",,terminal_output +3594,5784039,"TERMINAL",0,0,"3",,terminal_output +3595,5785075,"TERMINAL",0,0,"4",,terminal_output +3596,5786054,"TERMINAL",0,0,"5",,terminal_output +3597,5787182,"TERMINAL",0,0,"6",,terminal_output +3598,5788119,"TERMINAL",0,0,"7",,terminal_output +3599,5789228,"TERMINAL",0,0,"8",,terminal_output +3600,5790198,"TERMINAL",0,0,"9",,terminal_output +3601,5791274,"TERMINAL",0,0,"50",,terminal_output +3602,5792296,"TERMINAL",0,0,"1",,terminal_output +3603,5793311,"TERMINAL",0,0,"2",,terminal_output +3604,5794455,"TERMINAL",0,0,"3",,terminal_output +3605,5795392,"TERMINAL",0,0,"4",,terminal_output +3606,5796523,"TERMINAL",0,0,"5",,terminal_output +3607,5797528,"TERMINAL",0,0,"6",,terminal_output +3608,5798506,"TERMINAL",0,0,"7",,terminal_output +3609,5799547,"TERMINAL",0,0,"8",,terminal_output +3610,5800595,"TERMINAL",0,0,"9",,terminal_output +3611,5801623,"TERMINAL",0,0,"4:00",,terminal_output +3612,5802662,"TERMINAL",0,0,"1",,terminal_output +3613,5803771,"TERMINAL",0,0,"2",,terminal_output +3614,5804793,"TERMINAL",0,0,"3",,terminal_output +3615,5805815,"TERMINAL",0,0,"4",,terminal_output +3616,5806820,"TERMINAL",0,0,"5",,terminal_output +3617,5807865,"TERMINAL",0,0,"6",,terminal_output +3618,5808992,"TERMINAL",0,0,"7",,terminal_output +3619,5810015,"TERMINAL",0,0,"8",,terminal_output +3620,5811041,"TERMINAL",0,0,"10",,terminal_output +3621,5812009,"TERMINAL",0,0,"1",,terminal_output +3622,5813092,"TERMINAL",0,0,"2",,terminal_output +3623,5814929,"TERMINAL",0,0,"3 6",,terminal_output +3624,5815957,"TERMINAL",0,0,"4",,terminal_output +3625,5817003,"TERMINAL",0,0,"6",,terminal_output +3626,5818105,"TERMINAL",0,0,"7",,terminal_output +3627,5819133,"TERMINAL",0,0,"8",,terminal_output +3628,5820116,"TERMINAL",0,0,"9",,terminal_output +3629,5821177,"TERMINAL",0,0,"20",,terminal_output +3630,5822203,"TERMINAL",0,0,"1",,terminal_output +3631,5823330,"TERMINAL",0,0,"2",,terminal_output +3632,5824355,"TERMINAL",0,0,"3",,terminal_output +3633,5825378,"TERMINAL",0,0,"4",,terminal_output +3634,5826401,"TERMINAL",0,0,"5",,terminal_output +3635,5827454,"TERMINAL",0,0,"6",,terminal_output +3636,5828464,"TERMINAL",0,0,"7",,terminal_output +3637,5829487,"TERMINAL",0,0,"8",,terminal_output +3638,5830513,"TERMINAL",0,0,"9",,terminal_output +3639,5831548,"TERMINAL",0,0,"30",,terminal_output +3640,5832588,"TERMINAL",0,0,"1",,terminal_output +3641,5833626,"TERMINAL",0,0,"2",,terminal_output +3642,5834695,"TERMINAL",0,0,"3",,terminal_output +3643,5835722,"TERMINAL",0,0,"4",,terminal_output +3644,5836740,"TERMINAL",0,0,"5",,terminal_output +3645,5837868,"TERMINAL",0,0,"6",,terminal_output +3646,5838891,"TERMINAL",0,0,"7",,terminal_output +3647,5839922,"TERMINAL",0,0,"8",,terminal_output +3648,5840901,"TERMINAL",0,0,"9",,terminal_output +3649,5841964,"TERMINAL",0,0,"40",,terminal_output +3650,5842987,"TERMINAL",0,0,"2",,terminal_output +3651,5844116,"TERMINAL",0,0,"3",,terminal_output +3652,5845060,"TERMINAL",0,0,"4",,terminal_output +3653,5846168,"TERMINAL",0,0,"5",,terminal_output +3654,5847188,"TERMINAL",0,0,"6",,terminal_output +3655,5848176,"TERMINAL",0,0,"7",,terminal_output +3656,5849243,"TERMINAL",0,0,"8",,terminal_output +3657,5850360,"TERMINAL",0,0,"9",,terminal_output +3658,5851292,"TERMINAL",0,0,"50",,terminal_output +3659,5852408,"TERMINAL",0,0,"1",,terminal_output +3660,5853371,"TERMINAL",0,0,"2",,terminal_output +3661,5854486,"TERMINAL",0,0,"310",,terminal_output +3662,5855471,"TERMINAL",0,0,"41",,terminal_output +3663,5856481,"TERMINAL",0,0,"52",,terminal_output +3664,5857529,"TERMINAL",0,0,"6",,terminal_output +3665,5858551,"TERMINAL",0,0,"73",,terminal_output +3666,5859586,"TERMINAL",0,0,"84",,terminal_output +3667,5860627,"TERMINAL",0,0,"9",,terminal_output +3668,5861662,"TERMINAL",0,0,"5:00",,terminal_output +3669,5862710,"TERMINAL",0,0,"1",,terminal_output +3670,5863781,"TERMINAL",0,0,"2",,terminal_output +3671,5864803,"TERMINAL",0,0,"323",,terminal_output +3672,5865927,"TERMINAL",0,0,"4",,terminal_output +3673,5866855,"TERMINAL",0,0,"5",,terminal_output +3674,5867974,"TERMINAL",0,0,"65",,terminal_output +3675,5868999,"TERMINAL",0,0,"7",,terminal_output +3676,5870021,"TERMINAL",0,0,"9",,terminal_output +3677,5871045,"TERMINAL",0,0,"107",,terminal_output +3678,5872034,"TERMINAL",0,0,"135",,terminal_output +3679,5873096,"TERMINAL",0,0,"2",,terminal_output +3680,5874120,"TERMINAL",0,0,"3",,terminal_output +3681,5875147,"TERMINAL",0,0,"4",,terminal_output +3682,5876268,"TERMINAL",0,0,"59",,terminal_output +3683,5877227,"TERMINAL",0,0,"6",,terminal_output +3684,5878265,"TERMINAL",0,0,"7",,terminal_output +3685,5879341,"TERMINAL",0,0,"843",,terminal_output +3686,5880364,"TERMINAL",0,0,"9",,terminal_output +3687,5881389,"TERMINAL",0,0,"20",,terminal_output +3688,5882515,"TERMINAL",0,0,"1",,terminal_output +3689,5883469,"TERMINAL",0,0,"2",,terminal_output +3690,5884565,"TERMINAL",0,0,"3",,terminal_output +3691,5885519,"TERMINAL",0,0,"4",,terminal_output +3692,5886554,"TERMINAL",0,0,"5",,terminal_output +3693,5887591,"TERMINAL",0,0,"6",,terminal_output +3694,5888625,"TERMINAL",0,0,"7",,terminal_output +3695,5889660,"TERMINAL",0,0,"8",,terminal_output +3696,5890696,"TERMINAL",0,0,"9",,terminal_output +3697,5891844,"TERMINAL",0,0,"30",,terminal_output +3698,5892857,"TERMINAL",0,0,"1",,terminal_output +3699,5893881,"TERMINAL",0,0,"2",,terminal_output +3700,5894912,"TERMINAL",0,0,"3",,terminal_output +3701,5895931,"TERMINAL",0,0,"4",,terminal_output +3702,5896954,"TERMINAL",0,0,"5",,terminal_output +3703,5897980,"TERMINAL",0,0,"6",,terminal_output +3704,5899003,"TERMINAL",0,0,"8",,terminal_output +3705,5900089,"TERMINAL",0,0,"9",,terminal_output +3706,5901153,"TERMINAL",0,0,"40",,terminal_output +3707,5902176,"TERMINAL",0,0,"1",,terminal_output +3708,5903141,"TERMINAL",0,0,"2",,terminal_output +3709,5904226,"TERMINAL",0,0,"3",,terminal_output +3710,5905211,"TERMINAL",0,0,"4",,terminal_output +3711,5906274,"TERMINAL",0,0,"5",,terminal_output +3712,5907298,"TERMINAL",0,0,"6",,terminal_output +3713,5908318,"TERMINAL",0,0,"7",,terminal_output +3714,5909482,"TERMINAL",0,0,"8",,terminal_output +3715,5910384,"TERMINAL",0,0,"9",,terminal_output +3716,5911425,"TERMINAL",0,0,"50",,terminal_output +3717,5912519,"TERMINAL",0,0,"1",,terminal_output +3718,5913499,"TERMINAL",0,0,"2",,terminal_output +3719,5914568,"TERMINAL",0,0,"3",,terminal_output +3720,5915566,"TERMINAL",0,0,"4",,terminal_output +3721,5916617,"TERMINAL",0,0,"5",,terminal_output +3722,5917655,"TERMINAL",0,0,"6",,terminal_output +3723,5918766,"TERMINAL",0,0,"7",,terminal_output +3724,5919732,"TERMINAL",0,0,"8",,terminal_output +3725,5920769,"TERMINAL",0,0,"9",,terminal_output +3726,5921835,"TERMINAL",0,0,"6:00",,terminal_output +3727,5922861,"TERMINAL",0,0,"1",,terminal_output +3728,5923987,"TERMINAL",0,0,"2",,terminal_output +3729,5925012,"TERMINAL",0,0,"3",,terminal_output +3730,5926033,"TERMINAL",0,0,"4",,terminal_output +3731,5927059,"TERMINAL",0,0,"6",,terminal_output +3732,5928084,"TERMINAL",0,0,"7",,terminal_output +3733,5929107,"TERMINAL",0,0,"8",,terminal_output +3734,5930124,"TERMINAL",0,0,"9",,terminal_output +3735,5931257,"TERMINAL",0,0,"10",,terminal_output +3736,5932208,"TERMINAL",0,0,"1",,terminal_output +3737,5933261,"TERMINAL",0,0,"2",,terminal_output +3738,5934330,"TERMINAL",0,0,"3",,terminal_output +3739,5936108,"TERMINAL",0,0,"4 6",,terminal_output +3740,5937195,"TERMINAL",0,0,"6",,terminal_output +3741,5938226,"TERMINAL",0,0,"7",,terminal_output +3742,5939244,"TERMINAL",0,0,"8",,terminal_output +3743,5940371,"TERMINAL",0,0,"9",,terminal_output +3744,5941312,"TERMINAL",0,0,"20",,terminal_output +3745,5942422,"TERMINAL",0,0,"1",,terminal_output +3746,5943428,"TERMINAL",0,0,"2",,terminal_output +3747,5944492,"TERMINAL",0,0,"3",,terminal_output +3748,5945500,"TERMINAL",0,0,"4",,terminal_output +3749,5946514,"TERMINAL",0,0,"5",,terminal_output +3750,5947558,"TERMINAL",0,0,"6",,terminal_output +3751,5948602,"TERMINAL",0,0,"7",,terminal_output +3752,5949644,"TERMINAL",0,0,"8",,terminal_output +3753,5950715,"TERMINAL",0,0,"9",,terminal_output +3754,5951740,"TERMINAL",0,0,"30",,terminal_output +3755,5952761,"TERMINAL",0,0,"1",,terminal_output +3756,5953888,"TERMINAL",0,0,"2",,terminal_output +3757,5954912,"TERMINAL",0,0,"3",,terminal_output +3758,5955937,"TERMINAL",0,0,"4",,terminal_output +3759,5956960,"TERMINAL",0,0,"5",,terminal_output +3760,5957984,"TERMINAL",0,0,"7",,terminal_output +3761,5959004,"TERMINAL",0,0,"8",,terminal_output +3762,5960073,"TERMINAL",0,0,"9",,terminal_output +3763,5961111,"TERMINAL",0,0,"40",,terminal_output +3764,5962182,"TERMINAL",0,0,"1",,terminal_output +3765,5963206,"TERMINAL",0,0,"2",,terminal_output +3766,5964213,"TERMINAL",0,0,"3",,terminal_output +3767,5965256,"TERMINAL",0,0,"4",,terminal_output +3768,5966287,"TERMINAL",0,0,"5",,terminal_output +3769,5967332,"TERMINAL",0,0,"6",,terminal_output +3770,5968427,"TERMINAL",0,0,"7",,terminal_output +3771,5969412,"TERMINAL",0,0,"8",,terminal_output +3772,5970454,"TERMINAL",0,0,"9",,terminal_output +3773,5971501,"TERMINAL",0,0,"50",,terminal_output +3774,5972526,"TERMINAL",0,0,"1",,terminal_output +3775,5973572,"TERMINAL",0,0,"2",,terminal_output +3776,5974609,"TERMINAL",0,0,"3",,terminal_output +3777,5975646,"TERMINAL",0,0,"47",,terminal_output +3778,5976684,"TERMINAL",0,0,"510",,terminal_output +3779,5977749,"TERMINAL",0,0,"62",,terminal_output +3780,5978775,"TERMINAL",0,0,"7",,terminal_output +3781,5979898,"TERMINAL",0,0,"8",,terminal_output +3782,5980923,"TERMINAL",0,0,"9",,terminal_output +3783,5981946,"TERMINAL",0,0,"7:003",,terminal_output +3784,5982912,"TERMINAL",0,0,"1",,terminal_output +3785,5983997,"TERMINAL",0,0,"2",,terminal_output +3786,5985018,"TERMINAL",0,0,"44",,terminal_output +3787,5986044,"TERMINAL",0,0,"55",,terminal_output +3788,5987170,"TERMINAL",0,0,"66",,terminal_output +3789,5988174,"TERMINAL",0,0,"724",,terminal_output +3790,5989220,"TERMINAL",0,0,"8",,terminal_output +3791,5990208,"TERMINAL",0,0,"9",,terminal_output +3792,5991264,"TERMINAL",0,0,"10",,terminal_output +3793,5992279,"TERMINAL",0,0,"1",,terminal_output +3794,5993414,"TERMINAL",0,0,"25",,terminal_output +3795,5994362,"TERMINAL",0,0,"3",,terminal_output +3796,5995404,"TERMINAL",0,0,"4",,terminal_output +3797,5996465,"TERMINAL",0,0,"5",,terminal_output +3798,5997536,"TERMINAL",0,0,"6",,terminal_output +3799,5998535,"TERMINAL",0,0,"7",,terminal_output +3800,5999558,"TERMINAL",0,0,"8",,terminal_output +3801,6000595,"TERMINAL",0,0,"9",,terminal_output +3802,6001638,"TERMINAL",0,0,"20",,terminal_output +3803,6002672,"TERMINAL",0,0,"1",,terminal_output +3804,6003709,"TERMINAL",0,0,"2",,terminal_output +3805,6004744,"TERMINAL",0,0,"36",,terminal_output +3806,6005804,"TERMINAL",0,0,"48",,terminal_output +3807,6006827,"TERMINAL",0,0,"5",,terminal_output +3808,6007861,"TERMINAL",0,0,"6",,terminal_output +3809,6008980,"TERMINAL",0,0,"7",,terminal_output +3810,6010005,"TERMINAL",0,0,"8",,terminal_output +3811,6011027,"TERMINAL",0,0,"30",,terminal_output +3812,6012054,"TERMINAL",0,0,"1",,terminal_output +3813,6013076,"TERMINAL",0,0,"2",,terminal_output +3814,6014202,"TERMINAL",0,0,"3",,terminal_output +3815,6015146,"TERMINAL",0,0,"4",,terminal_output +3816,6016253,"TERMINAL",0,0,"5",,terminal_output +3817,6017276,"TERMINAL",0,0,"6",,terminal_output +3818,6018299,"TERMINAL",0,0,"7",,terminal_output +3819,6019324,"TERMINAL",0,0,"8",,terminal_output +3820,6020389,"TERMINAL",0,0,"9",,terminal_output +3821,6021475,"TERMINAL",0,0,"40",,terminal_output +3822,6022522,"TERMINAL",0,0,"1",,terminal_output +3823,6023474,"TERMINAL",0,0,"2",,terminal_output +3824,6024572,"TERMINAL",0,0,"3",,terminal_output +3825,6025573,"TERMINAL",0,0,"49",,terminal_output +3826,6026600,"TERMINAL",0,0,"5",,terminal_output +3827,6027641,"TERMINAL",0,0,"6",,terminal_output +3828,6028743,"TERMINAL",0,0,"7",,terminal_output +3829,6029714,"TERMINAL",0,0,"8",,terminal_output +3830,6030791,"TERMINAL",0,0,"9",,terminal_output +3831,6031816,"TERMINAL",0,0,"50",,terminal_output +3832,6032837,"TERMINAL",0,0,"1",,terminal_output +3833,6033965,"TERMINAL",0,0,"2",,terminal_output +3834,6034991,"TERMINAL",0,0,"3",,terminal_output +3835,6036015,"TERMINAL",0,0,"4",,terminal_output +3836,6037038,"TERMINAL",0,0,"6",,terminal_output +3837,6038061,"TERMINAL",0,0,"7",,terminal_output +3838,6039089,"TERMINAL",0,0,"8",,terminal_output +3839,6040109,"TERMINAL",0,0,"9",,terminal_output +3840,6041156,"TERMINAL",0,0,"8:00",,terminal_output +3841,6042260,"TERMINAL",0,0,"1",,terminal_output +3842,6043225,"TERMINAL",0,0,"2",,terminal_output +3843,6044266,"TERMINAL",0,0,"3",,terminal_output +3844,6045333,"TERMINAL",0,0,"4",,terminal_output +3845,6046358,"TERMINAL",0,0,"5",,terminal_output +3846,6047483,"TERMINAL",0,0,"6",,terminal_output +3847,6048427,"TERMINAL",0,0,"7",,terminal_output +3848,6049559,"TERMINAL",0,0,"8",,terminal_output +3849,6050554,"TERMINAL",0,0,"9",,terminal_output +3850,6051562,"TERMINAL",0,0,"10",,terminal_output +3851,6052605,"TERMINAL",0,0,"1",,terminal_output +3852,6053619,"TERMINAL",0,0,"2",,terminal_output +3853,6054663,"TERMINAL",0,0,"3",,terminal_output +3854,6055699,"TERMINAL",0,0,"4",,terminal_output +3855,6056781,"TERMINAL",0,0,"51",,terminal_output +3856,6057929,"TERMINAL",0,0,"6 6",,terminal_output +3857,6058956,"TERMINAL",0,0,"7",,terminal_output +3858,6060080,"TERMINAL",0,0,"9",,terminal_output +3859,6061102,"TERMINAL",0,0,"20",,terminal_output +3860,6062128,"TERMINAL",0,0,"1",,terminal_output +3861,6063094,"TERMINAL",0,0,"2",,terminal_output +3862,6064176,"TERMINAL",0,0,"3",,terminal_output +3863,6065166,"TERMINAL",0,0,"48",,terminal_output +3864,6066222,"TERMINAL",0,0,"5",,terminal_output +3865,6067350,"TERMINAL",0,0,"6",,terminal_output +3866,6068290,"TERMINAL",0,0,"7",,terminal_output +3867,6069331,"TERMINAL",0,0,"8",,terminal_output +3868,6070371,"TERMINAL",0,0,"9",,terminal_output +3869,6071444,"TERMINAL",0,0,"30",,terminal_output +3870,6072468,"TERMINAL",0,0,"1",,terminal_output +3871,6073495,"TERMINAL",0,0,"2",,terminal_output +3872,6074620,"TERMINAL",0,0,"3",,terminal_output +3873,6075643,"TERMINAL",0,0,"4",,terminal_output +3874,6076618,"TERMINAL",0,0,"5",,terminal_output +3875,6077659,"TERMINAL",0,0,"6",,terminal_output +3876,6078699,"TERMINAL",0,0,"7",,terminal_output +3877,6079738,"TERMINAL",0,0,"8",,terminal_output +3878,6080778,"TERMINAL",0,0,"9",,terminal_output +3879,6081815,"TERMINAL",0,0,"40",,terminal_output +3880,6082918,"TERMINAL",0,0,"1",,terminal_output +3881,6083893,"TERMINAL",0,0,"2",,terminal_output +3882,6084951,"TERMINAL",0,0,"3",,terminal_output +3883,6085998,"TERMINAL",0,0,"5",,terminal_output +3884,6087111,"TERMINAL",0,0,"6",,terminal_output +3885,6088045,"TERMINAL",0,0,"7",,terminal_output +3886,6089162,"TERMINAL",0,0,"8",,terminal_output +3887,6090125,"TERMINAL",0,0,"9",,terminal_output +3888,6091171,"TERMINAL",0,0,"50",,terminal_output +3889,6092234,"TERMINAL",0,0,"1",,terminal_output +3890,6093254,"TERMINAL",0,0,"2",,terminal_output +3891,6094383,"TERMINAL",0,0,"3",,terminal_output +3892,6095345,"TERMINAL",0,0,"4",,terminal_output +3893,6096431,"TERMINAL",0,0,"5",,terminal_output +3894,6097458,"TERMINAL",0,0,"611",,terminal_output +3895,6098461,"TERMINAL",0,0,"7",,terminal_output +3896,6099503,"TERMINAL",0,0,"82",,terminal_output +3897,6100632,"TERMINAL",0,0,"9",,terminal_output +3898,6101578,"TERMINAL",0,0,"9:00",,terminal_output +3899,6102622,"TERMINAL",0,0,"14",,terminal_output +3900,6103660,"TERMINAL",0,0,"25",,terminal_output +3901,6104702,"TERMINAL",0,0,"3",,terminal_output +3902,6105740,"TERMINAL",0,0,"4",,terminal_output +3903,6106781,"TERMINAL",0,0,"5",,terminal_output +3904,6107902,"TERMINAL",0,0,"6",,terminal_output +3905,6108923,"TERMINAL",0,0,"7",,terminal_output +3906,6109946,"TERMINAL",0,0,"8",,terminal_output +3907,6110974,"TERMINAL",0,0,"96",,terminal_output +3908,6111979,"TERMINAL",0,0,"118",,terminal_output +3909,6113122,"TERMINAL",0,0,"2",,terminal_output +3910,6114146,"TERMINAL",0,0,"3",,terminal_output +3911,6115107,"TERMINAL",0,0,"4",,terminal_output +3912,6116195,"TERMINAL",0,0,"5",,terminal_output +3913,6117217,"TERMINAL",0,0,"6",,terminal_output +3914,6118242,"TERMINAL",0,0,"79",,terminal_output +3915,6119368,"TERMINAL",0,0,"8",,terminal_output +3916,6120392,"TERMINAL",0,0,"9",,terminal_output +3917,6121419,"TERMINAL",0,0,"20",,terminal_output +3918,6122442,"TERMINAL",0,0,"1",,terminal_output +3919,6123430,"TERMINAL",0,0,"2",,terminal_output +3920,6124487,"TERMINAL",0,0,"3",,terminal_output +3921,6125615,"TERMINAL",0,0,"4",,terminal_output +3922,6126573,"TERMINAL",0,0,"5",,terminal_output +3923,6127665,"TERMINAL",0,0,"6",,terminal_output +3924,6128631,"TERMINAL",0,0,"7",,terminal_output +3925,6129670,"TERMINAL",0,0,"8",,terminal_output +3926,6130712,"TERMINAL",0,0,"9",,terminal_output +3927,6131752,"TERMINAL",0,0,"30",,terminal_output +3928,6132886,"TERMINAL",0,0,"1",,terminal_output +3929,6133910,"TERMINAL",0,0,"25",,terminal_output +3930,6134936,"TERMINAL",0,0,"3",,terminal_output +3931,6135924,"TERMINAL",0,0,"4",,terminal_output +3932,6136983,"TERMINAL",0,0,"523",,terminal_output +3933,6138106,"TERMINAL",0,0,"7",,terminal_output +3934,6139133,"TERMINAL",0,0,"8",,terminal_output +3935,6140089,"TERMINAL",0,0,"9",,terminal_output +3936,6141114,"TERMINAL",0,0,"40",,terminal_output +3937,6142156,"TERMINAL",0,0,"1",,terminal_output +3938,6143191,"TERMINAL",0,0,"2",,terminal_output +3939,6144250,"TERMINAL",0,0,"37",,terminal_output +3940,6145279,"TERMINAL",0,0,"4",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-28f4aa5c-0534-40eb-ae05-51501d68e4871752860706222-2025_07_18-19.45.48.539/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-28f4aa5c-0534-40eb-ae05-51501d68e4871752860706222-2025_07_18-19.45.48.539/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..2842a488a0c7c2a4586c832160de33c1a534c653 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-28f4aa5c-0534-40eb-ae05-51501d68e4871752860706222-2025_07_18-19.45.48.539/source.csv @@ -0,0 +1,90 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,667,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"7:45:48 PM [info] Activating crowd-code\n7:45:48 PM [info] Recording started\n7:45:48 PM [info] Initializing git provider using file system watchers...\n7:45:48 PM [info] Git repository found\n7:45:48 PM [info] Git provider initialized successfully\n7:45:48 PM [info] Initial git state: [object Object]\n",Log,tab +3,228069,"TERMINAL",0,0,"queue",,terminal_command +4,228163,"TERMINAL",0,0,"]633;E;2025-07-18 19:49:36 queue;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Jul 18 19:49:36 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3357893 accelerat interact tum_cte0 R 3:08:29\t 2 hkn[0436,0708]3357894 accelerat interact tum_cte0 R 3:45:12\t 1 hkn0715\t\t",,terminal_output +5,229211,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +6,231917,"TERMINAL",0,0,"idling",,terminal_command +7,232013,"TERMINAL",0,0,"]633;E;2025-07-18 19:49:40 idling;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Jul 18 19:49:40 2025Partition dev_cpuonly: 12 nodes idle\rPartition cpuonly:\t 7 nodes idle\rPartition dev_accelerated:\t 3 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +8,233034,"TERMINAL",0,0,"1\t",,terminal_output +9,233586,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +10,237086,"TERMINAL",0,0,"queue",,terminal_command +11,237134,"TERMINAL",0,0,"]633;E;2025-07-18 19:49:45 queue;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Jul 18 19:49:45 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3357893 accelerat interact tum_cte0 R 3:08:38\t 2 hkn[0436,0708]3357894 accelerat interact tum_cte0 R 3:45:21\t 1 hkn0715\t\t",,terminal_output +12,237968,"TERMINAL",0,0,"692",,terminal_output +13,238857,"TERMINAL",0,0,"7403",,terminal_output +14,239544,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +15,241735,"TERMINAL",0,0,"scancel 3357893",,terminal_command +16,241795,"TERMINAL",0,0,"]633;E;2025-07-18 19:49:50 scancel 3357893;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C",,terminal_output +17,241927,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +18,243399,"TERMINAL",0,0,"scancel 3357894",,terminal_command +19,243459,"TERMINAL",0,0,"]633;E;2025-07-18 19:49:51 scancel 3357894;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +20,245208,"TERMINAL",0,0,"idling",,terminal_command +21,245252,"TERMINAL",0,0,"]633;E;2025-07-18 19:49:53 idling;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C",,terminal_output +22,245311,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Jul 18 19:49:53 2025Partition dev_cpuonly: 12 nodes idle\rPartition cpuonly:\t 7 nodes idle\rPartition dev_accelerated:\t 3 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +23,246324,"TERMINAL",0,0,"4\t",,terminal_output +24,246706,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +25,247830,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (1, 0), (1, 0), (0, 0)))\n logits = self.dynamics(vid_embed_padded)[:, 1:, 1:]\n mask = jnp.ones(vid_embed.shape[:-1])\n next_tokens = jnp.argmax(logits, axis=-1)\n print(next_tokens.shape)\n jax.debug.breakpoint()\n return dict(token_logits=logits, mask=mask)",python,tab +26,247835,"models/dynamics.py",2889,0,"",python,selection_mouse +27,247936,"models/dynamics.py",2887,9,"act_embed",python,selection_mouse +28,248563,"models/dynamics.py",2826,0,"",python,selection_mouse +29,251754,"models/dynamics.py",2995,0,"",python,selection_mouse +30,253317,"models/dynamics.py",2989,0,"",python,selection_mouse +31,254853,"models/dynamics.py",2988,0,"",python,selection_mouse +32,303300,"models/lam.py",0,0,"from typing import Dict, Any\n\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nn.Module):\n """"""Latent Action ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n self.action_in = self.param(\n ""action_in"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.patch_token_dim),\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.patch_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n video_action_patches = self.action_up(outputs[""z_q""]) + self.patch_up(\n outputs[""patches""][:, :-1]\n )\n del outputs[""patches""]\n\n # --- Decode ---\n video_recon = self.decoder(video_action_patches)\n video_recon = video_recon.astype(jnp.float32)\n video_recon = nn.sigmoid(video_recon)\n video_recon = video_recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(video_recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess videos ---\n B, T = videos.shape[:2]\n patches = patchify(videos, self.patch_size)\n action_pad = jnp.broadcast_to(self.action_in, (B, T, 1, self.patch_token_dim))\n # FIXME mihir do this the other way around\n padded_patches = jnp.concatenate((action_pad, patches), axis=2)\n\n # --- Encode ---\n z = self.encoder(padded_patches) # (B, T, N, E)\n # Get latent action for all future frames\n z = z[:, 1:, 0] # (B, T-1, E)\n\n # --- Vector quantize ---\n z = z.reshape(B * (T - 1), self.latent_dim)\n z_q, z, emb, indices = self.vq(z, training)\n z_q = z_q.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patches, z_q=z_q, z=z, emb=emb, indices=indices)\n",python,tab +33,304984,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport os\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n # --- Autoregressive generation loop ---\n rng = batch[""rng""]\n for t in range(T, seq_len):\n for n in range(32):\n dyna_inputs = {\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n # jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B, 1)\n\n # Insert the generated tokens into the sequence\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames\n\n\n @nn.compact\n def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) \n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n handler_registry.add('dataloader_state', grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +34,311383,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] \n\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n # for videos in dataloader:\n videos = np.load(""overfit_dir/corner_8repl.npy"")\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) #/ 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +35,312861,"train_dynamics.py",2979,0,"",python,selection_mouse +36,328600,"models/tokenizer.py",0,0,"from typing import Dict, Any, Tuple\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nn.Module):\n """"""ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.model_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n recon = self.decoder(outputs[""z_q""]) # (B, T, H_down * W_down, C)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n x = patchify(videos, self.patch_size)\n N = x.shape[2]\n x = self.encoder(x) # (B, T, N, E)\n\n # --- Vector quantize ---\n x = x.reshape(B * T * N, self.latent_dim)\n z_q, z, emb, indices = self.vq(x, training)\n z_q = z_q.reshape(B, T, N, self.latent_dim)\n indices = indices.reshape(B, T, N)\n return dict(z_q=z_q, z=z, emb=emb, indices=indices)\n\n def decode(self, indices: Any, video_hw: Tuple[int, int]):\n z = self.vq.codebook[indices]\n recon = self.decoder(z)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n return unpatchify(recon, self.patch_size, *video_hw)\n",python,tab +37,329565,"models/dynamics.py",0,0,"",python,tab +38,330894,"models/dynamics.py",3065,0,"",python,selection_mouse +39,331494,"models/dynamics.py",3070,0,"",python,selection_mouse +40,332511,"models/dynamics.py",3069,0,"",python,selection_command +41,335761,"models/dynamics.py",3070,0,"",python,selection_command +42,335998,"models/dynamics.py",3069,1,"",python,content +43,336111,"models/dynamics.py",3068,1,"",python,content +44,337261,"models/dynamics.py",3068,0,":",python,content +45,337262,"models/dynamics.py",3069,0,"",python,selection_keyboard +46,337444,"models/dynamics.py",3069,0,"-",python,content +47,337444,"models/dynamics.py",3070,0,"",python,selection_keyboard +48,337711,"models/dynamics.py",3070,0,"1",python,content +49,337712,"models/dynamics.py",3071,0,"",python,selection_keyboard +50,339339,"models/dynamics.py",3066,0,"",python,selection_command +51,339494,"models/dynamics.py",3065,1,"",python,content +52,339644,"models/dynamics.py",3064,1,"",python,content +53,340683,"models/dynamics.py",3064,0,":",python,content +54,340684,"models/dynamics.py",3065,0,"",python,selection_keyboard +55,340980,"models/dynamics.py",3065,0,"-",python,content +56,340981,"models/dynamics.py",3066,0,"",python,selection_keyboard +57,341127,"models/dynamics.py",3066,0,"1",python,content +58,341128,"models/dynamics.py",3067,0,"",python,selection_keyboard +59,341777,"models/dynamics.py",3066,0,"",python,selection_command +60,343332,"TERMINAL",0,0,"bash",,terminal_focus +61,343797,"TERMINAL",0,0,"bash",,terminal_focus +62,352015,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_yolorun_new_arch\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=1001 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=8 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-new-arch-speedrun-$slurm_job_id \\n --tags dynamics \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n ",shellscript,tab +63,357312,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1295,0,"",shellscript,selection_mouse +64,360013,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1295,0,"-",shellscript,content +65,360015,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1296,0,"",shellscript,selection_keyboard +66,360277,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1296,0,"m",shellscript,content +67,360278,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1297,0,"",shellscript,selection_keyboard +68,360361,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1297,0,"e",shellscript,content +69,360362,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1298,0,"",shellscript,selection_keyboard +70,360527,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1298,0,"w",shellscript,content +71,360528,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1299,0,"",shellscript,selection_keyboard +72,360727,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1299,0,"-",shellscript,content +73,360728,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1300,0,"",shellscript,selection_keyboard +74,361210,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1299,1,"",shellscript,content +75,361360,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1298,1,"",shellscript,content +76,361479,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1297,1,"",shellscript,content +77,361594,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1296,1,"",shellscript,content +78,361794,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1296,0,"n",shellscript,content +79,361795,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1297,0,"",shellscript,selection_keyboard +80,361877,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1297,0,"e",shellscript,content +81,361878,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1298,0,"",shellscript,selection_keyboard +82,361978,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1298,0,"w",shellscript,content +83,361979,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1299,0,"",shellscript,selection_keyboard +84,365481,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +85,370577,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",,terminal_command +86,370660,"TERMINAL",0,0,"]633;E;2025-07-18 19:51:59 sbatch slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;CSubmitted batch job 3358457\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +87,373210,"TERMINAL",0,0,"queue",,terminal_command +88,373260,"TERMINAL",0,0,"]633;E;2025-07-18 19:52:01 queue;9dd50732-d6d7-4d7e-a22c-51ae92e646cb]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Jul 18 19:52:01 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3358457 accelerat train_dy tum_cte0 PD\t0:00\t 2 (Priority)\t\t",,terminal_output +89,374277,"TERMINAL",0,0,"2\t",,terminal_output +90,375261,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-3aab53c4-8c45-4083-87ad-e991570a4f5b1752851966968-2025_07_18-17.20.32.773/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-3aab53c4-8c45-4083-87ad-e991570a4f5b1752851966968-2025_07_18-17.20.32.773/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..75c954ce2b6c4ff3cb1ca0ec642460b6cf9a815b --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-3aab53c4-8c45-4083-87ad-e991570a4f5b1752851966968-2025_07_18-17.20.32.773/source.csv @@ -0,0 +1,1233 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,428,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"5:20:32 PM [info] Activating crowd-code\n5:20:32 PM [info] Recording started\n5:20:32 PM [info] Initializing git provider using file system watchers...\n5:20:32 PM [info] Git repository found\n5:20:32 PM [info] Git provider initialized successfully\n5:20:32 PM [info] Initial git state: [object Object]\n",Log,tab +3,3378,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +4,3444,"TERMINAL",0,0,"]633;E;2025-07-18 17:20:36 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;93a385ce-8875-4adc-83d7-2d4c5cc04c5e]633;C",,terminal_output +5,3545,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +6,5283,"TERMINAL",0,0,"watch",,terminal_focus +7,5676,"TERMINAL",0,0,"bash",,terminal_focus +8,12194,"TERMINAL",0,0,"salloc",,terminal_focus +9,22164,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"from dataclasses import dataclass\nfrom typing import Optional\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom flax.training.train_state import TrainState\nimport grain\nimport orbax.checkpoint as ocp\nimport optax\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n checkpoint_step: Optional[int] = None\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_co_train: bool = True\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=args.lam_co_train,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n use_maskgit=False,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\ndummy_train_state = TrainState.create(\n apply_fn=genie.apply,\n params=params,\n tx=optax.adamw(\n optax.warmup_cosine_decay_schedule(\n 0, 0, 1, 2 # dummy values\n )\n ), \n)\nhandler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\nhandler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\ncheckpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=ocp.CheckpointManagerOptions(step_format_fixed_length=6),\n handler_registry=handler_registry\n)\nabstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, dummy_train_state\n)\n\nrestored = checkpoint_manager.restore(\n args.checkpoint_step or checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n ),\n)\nrestored_train_state = restored[""model_state""]\nparams = restored_train_state.params\n\n\ndef _sampling_wrapper(module, batch):\n # return module.sample_maskgit(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n return module.sample_causal(batch, args.seq_len, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid\n\ndef _get_dataloader_iterator():\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n# --- Get video + latent actions ---\n# grain_iterator = _get_dataloader_iterator()\n# video_batch = next(grain_iterator)\nvideo_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n\nvideo_batch = video_batch.astype(args.dtype) #/ 255.0\n# Get latent actions for all videos in the batch\nbatch = dict(videos=video_batch[:,:args.seq_len])\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n# --- Sample + evaluate video ---\nprint(""autoreg sampling..."")\nvid = _autoreg_sample(rng, video_batch, action_batch)\nprint(""autoreg sampling done. calculating ssim and saving video"")\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\ntrue_videos = (video_batch * 255).astype(np.uint8)\npred_videos = (vid * 255).astype(np.uint8)\nvideo_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\nvideo_comparison[0] = true_videos[:, :args.seq_len]\nvideo_comparison[1] = pred_videos\nframes = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n# --- Save video --- \nimgs = [Image.fromarray(img) for img in frames]\n# Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\nfor t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +10,25114,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n warmup_steps: int = 5000\n lr_schedule : str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n use_maskgit: bool = False\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n logits = outputs[""token_logits""]\n targets = outputs[""video_tokens""]\n\n # if not args.use_maskgit:\n # logits = outputs[""token_logits""][:, :, :-1]\n # targets = outputs[""video_tokens""][:, :, 1:]\n # mask = outputs[""mask""][:, :, 1:] \n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n logits, targets\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = logits.argmax(-1) == targets\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(logits)\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=logits.max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n use_maskgit=args.use_maskgit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(args.lr_schedule, \n args.init_lr, \n args.max_lr, \n args.decay_end, \n args.num_steps, \n args.warmup_steps, \n args.wsd_decay_steps)\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4, mu_dtype=args.dtype)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n # for videos in dataloader:\n videos = np.load(""overfit_dir/corner_8repl.npy"")\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n while True:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) #/ 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +11,30546,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport einops\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=True,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n\n\nclass DynamicsAutoregressive(nn.Module):\n """"""Autoregressive (causal) dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n spacial_bert=False,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions""])\n jax.debug.breakpoint()\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n jax.debug.breakpoint()\n vid_embed_padded = jnp.pad(vid_embed, ((0, 0), (0, 0), (1, 0), (0, 0)))\n jax.debug.breakpoint()\n logits = self.dynamics(vid_embed_padded)[:, :, 1:]\n mask = jnp.ones(vid_embed.shape[:-1])\n return dict(token_logits=logits, mask=mask)",python,tab +12,44064,"train_dynamics.py",0,0,"",python,tab +13,45794,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\ndynamics_ckpt_dir=$1\necho $dynamics_ckpt_dir\n\nenv | grep SLURM\n\npython sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --seq_len=2 \\n --data_dir $array_records_dir\n\n",shellscript,tab +14,47311,"TERMINAL",0,0,"bash",,terminal_focus +15,51644,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_yolorun_new_arch\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-new-arch-run-$slurm_job_id \\n --tags dynamics \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n ",shellscript,tab +16,52868,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1011,0,"",shellscript,selection_mouse +17,52875,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1010,0,"",shellscript,selection_command +18,55061,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_dyn_yolorun_new_arch\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --num_steps=1000 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=8 \\n --init_lr=1e-3 \\n --max_lr=1e-3 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=100 \\n --name=dynamics-new-arch-speedrun-$slurm_job_id \\n --tags dynamics \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n ",shellscript,tab +19,56659,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1031,0,"",shellscript,selection_mouse +20,57518,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1031,1,"1",shellscript,content +21,68883,"train_dynamics.py",0,0,"",python,tab +22,80897,"TERMINAL",0,0,"salloc",,terminal_focus +23,87711,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +24,90442,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1325,0,"",shellscript,selection_mouse +25,555889,"models/dynamics.py",0,0,"",python,tab +26,557843,"models/dynamics.py",3094,0,"",python,selection_mouse +27,558733,"models/dynamics.py",3125,0,"",python,selection_command +28,558892,"models/dynamics.py",3184,0,"",python,selection_command +29,559335,"models/dynamics.py",3209,0,"\n jax.debug.breakpoint()",python,content +30,559341,"models/dynamics.py",3218,0,"",python,selection_command +31,753400,"train_dynamics.py",0,0,"",python,tab +32,754042,"models/dynamics.py",0,0,"",python,tab +33,755218,"models/dynamics.py",3200,0,"",python,selection_mouse +34,755784,"models/dynamics.py",3277,0,"",python,selection_mouse +35,756541,"models/dynamics.py",3240,0,"",python,selection_mouse +36,756562,"models/dynamics.py",3239,0,"",python,selection_command +37,759664,"train_dynamics.py",0,0,"",python,tab +38,767929,"train_dynamics.py",3055,0,"",python,selection_mouse +39,768080,"train_dynamics.py",3055,6,"logits",python,selection_mouse +40,769438,"train_dynamics.py",3200,0,"",python,selection_mouse +41,769587,"train_dynamics.py",3197,4,"mask",python,selection_mouse +42,770142,"train_dynamics.py",3288,0,"",python,selection_mouse +43,770284,"train_dynamics.py",3287,7,"reshape",python,selection_mouse +44,770824,"train_dynamics.py",3244,0,"",python,selection_mouse +45,770955,"train_dynamics.py",3242,6,"logits",python,selection_mouse +46,772192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"",python,tab +47,776553,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsAutoregressive\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport os\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n use_maskgit: bool\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n if self.use_maskgit:\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ) \n else:\n self.dynamics = DynamicsAutoregressive(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""])\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n\n def sample_causal(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ):\n """"""\n Autoregressively samples up to `seq_len` future frames using the causal transformer backend.\n\n - Input frames are tokenized once.\n - Future frames are generated one at a time, each conditioned on all previous frames.\n - All frames are detokenized in a single pass at the end.\n\n Args:\n batch: Dict with at least ""videos"" (B, T, H, W, C)\n seq_len: total number of frames to generate (including context)\n temperature: sampling temperature\n sample_argmax: if True, use argmax instead of sampling\n\n Returns:\n Generated video frames (B, seq_len, H, W, C)\n """"""\n # --- Encode context frames ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n\n # jax.debug.print(""token_idxs shape: {}"", token_idxs.shape)\n # --- Prepare initial token sequence ---\n # Pad with zeros for future frames\n pad_shape = (B, seq_len - T, N)\n token_idxs_full = jnp.concatenate(\n [token_idxs, jnp.zeros(pad_shape, dtype=token_idxs.dtype)], axis=1\n ) # (B, seq_len, N)\n\n # --- Prepare latent actions ---\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) # (B, S-1, )\n # --- Autoregressive generation loop ---\n rng = batch[""rng""]\n for t in range(T, seq_len):\n for n in range(32):\n dyna_inputs = {\n ""video_tokens"": token_idxs_full,\n ""latent_actions"": action_tokens\n }\n # jax.debug.print(""token_idxs_full 0: {}"", token_idxs_full[0,:,0])\n dyna_outputs = self.dynamics(dyna_inputs, training=False)\n # # dyna_outputs[""token_logits""]: (B, t, N, vocab_size)\n # # We want the logits for the last time step (frame t-1 predicting t)\n jax.debug.breakpoint()\n next_token_logits = dyna_outputs[""token_logits""][:, t, n, :].astype(jnp.float32) # (B, 1, vocab_size)\n\n # Sample or argmax for each patch\n if sample_argmax:\n next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)\n else:\n rng, step_rng = jax.random.split(rng)\n next_token = jax.random.categorical(\n step_rng, next_token_logits / temperature, axis=-1\n ) # (B, 1)\n\n # Insert the generated tokens into the sequence\n token_idxs_full = token_idxs_full.at[:, t, n].set(next_token)\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n token_idxs_full, video_hw=batch[""videos""].shape[2:4]\n )\n return final_frames\n\n\n @nn.compact\n def sample_maskgit(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""]) \n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\n handler_registry.add('dataloader_state', grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler)\n \n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)",python,tab +48,780791,"genie.py",6215,0,"",python,selection_mouse +49,780937,"genie.py",6211,10,"next_token",python,selection_mouse +50,781099,"genie.py",6211,11,"next_token ",python,selection_mouse +51,781121,"genie.py",6211,16,"next_token = jnp",python,selection_mouse +52,781142,"genie.py",6211,17,"next_token = jnp.",python,selection_mouse +53,781170,"genie.py",6211,23,"next_token = jnp.argmax",python,selection_mouse +54,781171,"genie.py",6190,31,"\n next_token",python,selection_mouse +55,781500,"genie.py",6211,49,"next_token = jnp.argmax(next_token_logits, axis=-",python,selection_mouse +56,781671,"genie.py",6211,50,"next_token = jnp.argmax(next_token_logits, axis=-1",python,selection_mouse +57,781799,"genie.py",6211,51,"next_token = jnp.argmax(next_token_logits, axis=-1)",python,selection_mouse +58,781842,"genie.py",6211,52,"next_token = jnp.argmax(next_token_logits, axis=-1) ",python,selection_mouse +59,781985,"genie.py",6211,53,"next_token = jnp.argmax(next_token_logits, axis=-1) ",python,selection_mouse +60,782024,"genie.py",6211,54,"next_token = jnp.argmax(next_token_logits, axis=-1) #",python,selection_mouse +61,782073,"genie.py",6211,56,"next_token = jnp.argmax(next_token_logits, axis=-1) # (",python,selection_mouse +62,782073,"genie.py",6211,57,"next_token = jnp.argmax(next_token_logits, axis=-1) # (B",python,selection_mouse +63,782105,"genie.py",6211,58,"next_token = jnp.argmax(next_token_logits, axis=-1) # (B,",python,selection_mouse +64,782106,"genie.py",6211,60,"next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1",python,selection_mouse +65,782125,"genie.py",6211,61,"next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)",python,selection_mouse +66,786867,"models/dynamics.py",0,0,"",python,tab +67,788209,"models/dynamics.py",3209,0,"",python,selection_mouse +68,788210,"models/dynamics.py",3208,0,"",python,selection_command +69,788997,"models/dynamics.py",3209,0,"\n ",python,content +70,790029,"models/dynamics.py",3218,0,"next_token = jnp.argmax(next_token_logits, axis=-1) # (B, 1)",python,content +71,792101,"models/dynamics.py",3278,1,"",python,content +72,792255,"models/dynamics.py",3277,1,"",python,content +73,792574,"models/dynamics.py",3275,2,"",python,content +74,792696,"models/dynamics.py",3274,1,"",python,content +75,792938,"models/dynamics.py",3273,1,"",python,content +76,793292,"models/dynamics.py",3271,2,"",python,content +77,793681,"models/dynamics.py",3270,1,"",python,content +78,793849,"models/dynamics.py",3269,1,"",python,content +79,795842,"models/dynamics.py",3242,17,"",python,content +80,796301,"models/dynamics.py",3242,0,"l",python,content +81,796303,"models/dynamics.py",3243,0,"",python,selection_keyboard +82,796495,"models/dynamics.py",3243,0,"o",python,content +83,796496,"models/dynamics.py",3244,0,"",python,selection_keyboard +84,796655,"models/dynamics.py",3244,0,"g",python,content +85,796657,"models/dynamics.py",3245,0,"",python,selection_keyboard +86,796742,"models/dynamics.py",3245,0,"i",python,content +87,796743,"models/dynamics.py",3246,0,"",python,selection_keyboard +88,797015,"models/dynamics.py",3246,0,"t",python,content +89,797016,"models/dynamics.py",3247,0,"",python,selection_keyboard +90,797172,"models/dynamics.py",3247,0,"s",python,content +91,797173,"models/dynamics.py",3248,0,"",python,selection_keyboard +92,797646,"models/dynamics.py",3247,0,"",python,selection_command +93,798891,"models/dynamics.py",3243,0,"",python,selection_mouse +94,799207,"models/dynamics.py",3242,6,"logits",python,selection_mouse +95,800158,"models/dynamics.py",3243,0,"",python,selection_mouse +96,800183,"models/dynamics.py",3242,6,"logits",python,selection_mouse +97,801097,"models/dynamics.py",3243,0,"",python,selection_mouse +98,801098,"models/dynamics.py",3242,6,"logits",python,selection_mouse +99,802122,"models/dynamics.py",3226,0,"",python,selection_mouse +100,802657,"models/dynamics.py",3235,0,"",python,selection_mouse +101,803582,"models/dynamics.py",3258,0,"",python,selection_mouse +102,803583,"models/dynamics.py",3257,0,"",python,selection_command +103,804650,"models/dynamics.py",3258,0,"\n ",python,content +104,804868,"models/dynamics.py",3267,0,"p",python,content +105,804869,"models/dynamics.py",3268,0,"",python,selection_keyboard +106,805011,"models/dynamics.py",3268,0,"r",python,content +107,805012,"models/dynamics.py",3269,0,"",python,selection_keyboard +108,805245,"models/dynamics.py",3269,0,"n",python,content +109,805246,"models/dynamics.py",3270,0,"",python,selection_keyboard +110,805485,"models/dynamics.py",3269,1,"",python,content +111,805656,"models/dynamics.py",3268,1,"",python,content +112,805767,"models/dynamics.py",3267,1,"",python,content +113,806425,"models/dynamics.py",3267,0,"p",python,content +114,806426,"models/dynamics.py",3268,0,"",python,selection_keyboard +115,806519,"models/dynamics.py",3268,0,"r",python,content +116,806520,"models/dynamics.py",3269,0,"",python,selection_keyboard +117,806618,"models/dynamics.py",3269,0,"i",python,content +118,806619,"models/dynamics.py",3270,0,"",python,selection_keyboard +119,806744,"models/dynamics.py",3270,0,"n",python,content +120,806745,"models/dynamics.py",3271,0,"",python,selection_keyboard +121,806751,"models/dynamics.py",3271,0,"t",python,content +122,806752,"models/dynamics.py",3272,0,"",python,selection_keyboard +123,807336,"models/dynamics.py",3272,0,"()",python,content +124,807337,"models/dynamics.py",3273,0,"",python,selection_keyboard +125,807572,"models/dynamics.py",3273,0,"n",python,content +126,807574,"models/dynamics.py",3274,0,"",python,selection_keyboard +127,807665,"models/dynamics.py",3274,0,"e",python,content +128,807666,"models/dynamics.py",3275,0,"",python,selection_keyboard +129,807891,"models/dynamics.py",3275,0,"x",python,content +130,807892,"models/dynamics.py",3276,0,"",python,selection_keyboard +131,807933,"models/dynamics.py",3276,0,"t",python,content +132,807934,"models/dynamics.py",3277,0,"",python,selection_keyboard +133,808310,"models/dynamics.py",3277,0,"_",python,content +134,808311,"models/dynamics.py",3278,0,"",python,selection_keyboard +135,808542,"models/dynamics.py",3278,0,"t",python,content +136,808543,"models/dynamics.py",3279,0,"",python,selection_keyboard +137,808621,"models/dynamics.py",3279,0,"o",python,content +138,808622,"models/dynamics.py",3280,0,"",python,selection_keyboard +139,808720,"models/dynamics.py",3280,0,"k",python,content +140,808721,"models/dynamics.py",3281,0,"",python,selection_keyboard +141,808813,"models/dynamics.py",3281,0,"e",python,content +142,808814,"models/dynamics.py",3282,0,"",python,selection_keyboard +143,809049,"models/dynamics.py",3282,0,".",python,content +144,809050,"models/dynamics.py",3283,0,"",python,selection_keyboard +145,809406,"models/dynamics.py",3282,1,"",python,content +146,809568,"models/dynamics.py",3282,0,"n",python,content +147,809568,"models/dynamics.py",3283,0,"",python,selection_keyboard +148,809815,"models/dynamics.py",3283,0,".",python,content +149,809816,"models/dynamics.py",3284,0,"",python,selection_keyboard +150,809938,"models/dynamics.py",3284,0,"s",python,content +151,809939,"models/dynamics.py",3285,0,"",python,selection_keyboard +152,810347,"models/dynamics.py",3284,1,"",python,content +153,811400,"models/dynamics.py",3284,0,"s",python,content +154,811401,"models/dynamics.py",3285,0,"",python,selection_keyboard +155,811533,"models/dynamics.py",3285,0,"h",python,content +156,811535,"models/dynamics.py",3286,0,"",python,selection_keyboard +157,811670,"models/dynamics.py",3286,0,"a",python,content +158,811671,"models/dynamics.py",3287,0,"",python,selection_keyboard +159,811715,"models/dynamics.py",3287,0,"p",python,content +160,811716,"models/dynamics.py",3288,0,"",python,selection_keyboard +161,811901,"models/dynamics.py",3288,0,"e",python,content +162,811902,"models/dynamics.py",3289,0,"",python,selection_keyboard +163,812492,"models/dynamics.py",3288,0,"",python,selection_command +164,821189,"TERMINAL",0,0,"watch",,terminal_focus +165,825179,"TERMINAL",0,0,"salloc",,terminal_focus +166,832996,"models/dynamics.py",0,0,"",python,tab +167,834356,"models/dynamics.py",3239,0,"",python,selection_command +168,834658,"models/dynamics.py",3193,0,"",python,selection_command +169,834885,"models/dynamics.py",3134,0,"",python,selection_command +170,835001,"models/dynamics.py",3103,0,"",python,selection_command +171,835378,"models/dynamics.py",3074,31,"",python,content +172,835394,"models/dynamics.py",3082,0,"",python,selection_command +173,835618,"models/dynamics.py",3002,0,"",python,selection_command +174,835777,"models/dynamics.py",2971,0,"",python,selection_command +175,836101,"models/dynamics.py",2963,31,"",python,content +176,836106,"models/dynamics.py",2971,0,"",python,selection_command +177,836179,"models/dynamics.py",2897,0,"",python,selection_command +178,836298,"models/dynamics.py",2866,0,"",python,selection_command +179,836631,"models/dynamics.py",2858,31,"",python,content +180,836639,"models/dynamics.py",2866,0,"",python,selection_command +181,836798,"models/dynamics.py",2940,0,"",python,selection_command +182,836964,"models/dynamics.py",3020,0,"",python,selection_command +183,837099,"models/dynamics.py",3079,0,"",python,selection_command +184,837238,"models/dynamics.py",3125,0,"",python,selection_command +185,837370,"models/dynamics.py",3174,0,"",python,selection_command +186,837503,"models/dynamics.py",3206,0,"",python,selection_command +187,837720,"models/dynamics.py",3174,0,"",python,selection_command +188,837918,"models/dynamics.py",3125,0,"",python,selection_command +189,838674,"models/dynamics.py",3174,0,"",python,selection_command +190,838838,"models/dynamics.py",3206,0,"",python,selection_command +191,839220,"models/dynamics.py",3207,0,"",python,selection_command +192,998146,"models/dynamics.py",0,0,"",python,tab +193,1000957,"models/dynamics.py",2776,0,"",python,selection_mouse +194,1001190,"models/dynamics.py",2775,5,"batch",python,selection_mouse +195,1001325,"models/dynamics.py",2775,6,"batch[",python,selection_mouse +196,1001373,"models/dynamics.py",2775,19,"batch[""video_tokens",python,selection_mouse +197,1001573,"models/dynamics.py",2775,20,"batch[""video_tokens""",python,selection_mouse +198,1001756,"models/dynamics.py",2775,21,"batch[""video_tokens""]",python,selection_mouse +199,1001789,"models/dynamics.py",2775,22,"batch[""video_tokens""])",python,selection_mouse +200,1002040,"models/dynamics.py",2775,21,"batch[""video_tokens""]",python,selection_mouse +201,1128326,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +202,1133223,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1212,0,"",shellscript,selection_mouse +203,1133725,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1066,0,"",shellscript,selection_mouse +204,1133773,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1065,0,"",shellscript,selection_command +205,1134241,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1086,0,"",shellscript,selection_mouse +206,1134259,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1085,0,"",shellscript,selection_command +207,1134756,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1118,0,"",shellscript,selection_mouse +208,1134806,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1117,0,"",shellscript,selection_command +209,1135277,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1167,0,"",shellscript,selection_mouse +210,1135289,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1166,0,"",shellscript,selection_command +211,1135759,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1118,0,"",shellscript,selection_mouse +212,1135773,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1117,0,"",shellscript,selection_command +213,1136290,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1167,0,"",shellscript,selection_mouse +214,1136291,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1166,0,"",shellscript,selection_command +215,1136790,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1118,0,"",shellscript,selection_mouse +216,1136791,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1117,0,"",shellscript,selection_command +217,1137323,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1130,0,"",shellscript,selection_mouse +218,1137324,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1129,0,"",shellscript,selection_command +219,1138173,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",993,0,"",shellscript,selection_mouse +220,1138174,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",992,0,"",shellscript,selection_command +221,1139273,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",971,0,"",shellscript,selection_mouse +222,1139423,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",967,6,"python",shellscript,selection_mouse +223,1139926,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1003,0,"",shellscript,selection_mouse +224,1140041,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1000,9,"save_ckpt",shellscript,selection_mouse +225,1140907,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1000,0,"",shellscript,selection_mouse +226,1143390,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",978,0,"",shellscript,selection_mouse +227,1144176,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",993,0,"\n",shellscript,content +228,1144690,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",994,0,"-",shellscript,content +229,1144691,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",995,0,"",shellscript,selection_keyboard +230,1145123,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",994,1,"",shellscript,content +231,1145277,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",994,0," ",shellscript,content +232,1145639,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",998,0,"-",shellscript,content +233,1145640,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",999,0,"",shellscript,selection_keyboard +234,1145808,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",999,0,"-",shellscript,content +235,1145809,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1000,0,"",shellscript,selection_keyboard +236,1148727,"models/dynamics.py",0,0,"",python,tab +237,1149293,"train_dynamics.py",0,0,"",python,tab +238,1153490,"train_dynamics.py",849,0,"",python,selection_mouse +239,1153625,"train_dynamics.py",840,12,"restore_ckpt",python,selection_mouse +240,1156061,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +241,1157460,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1000,0,"restore_ckpt",shellscript,content +242,1158025,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1012,0," ",shellscript,content +243,1158026,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1013,0,"",shellscript,selection_keyboard +244,1158356,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1013,0,"\",shellscript,content +245,1158357,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1014,0,"",shellscript,selection_keyboard +246,1158708,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1013,0,"",shellscript,selection_command +247,1162639,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1344,0,"",shellscript,selection_mouse +248,1162790,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1336,18,"tokenizer_ckpt_dir",shellscript,selection_mouse +249,1163539,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1339,0,"",shellscript,selection_mouse +250,1164106,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1226,0,"",shellscript,selection_mouse +251,1164241,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1223,12,"slurm_job_id",shellscript,selection_mouse +252,1164873,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1032,0,"",shellscript,selection_mouse +253,1164890,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1031,0,"",shellscript,selection_command +254,1166673,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",794,0,"",shellscript,selection_mouse +255,1166674,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",793,0,"",shellscript,selection_command +256,1166807,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",780,14,"CHECKPOINT_DIR",shellscript,selection_mouse +257,1166823,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",781,13,"HECKPOINT_DIR",shellscript,selection_command +258,1167391,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",767,0,"",shellscript,selection_mouse +259,1167524,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",757,12,"slurm_job_id",shellscript,selection_mouse +260,1167757,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",747,22,"job_name/$slurm_job_id",shellscript,selection_mouse +261,1167806,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",745,24,"/$job_name/$slurm_job_id",shellscript,selection_mouse +262,1167825,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",734,35,"checkpoints/$job_name/$slurm_job_id",shellscript,selection_mouse +263,1167923,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",757,37,"slurm_job_id\nmkdir -p $CHECKPOINT_DIR",shellscript,selection_mouse +264,1168159,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",727,42,"ws_dir/checkpoints/$job_name/$slurm_job_id",shellscript,selection_mouse +265,1168723,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",727,0,"",shellscript,selection_mouse +266,1168723,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",727,6,"ws_dir",shellscript,selection_mouse +267,1168940,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",727,18,"ws_dir/checkpoints",shellscript,selection_mouse +268,1169023,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",727,19,"ws_dir/checkpoints/",shellscript,selection_mouse +269,1169072,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",727,28,"ws_dir/checkpoints/$job_name",shellscript,selection_mouse +270,1169140,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",727,29,"ws_dir/checkpoints/$job_name/",shellscript,selection_mouse +271,1169174,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",727,30,"ws_dir/checkpoints/$job_name/$",shellscript,selection_mouse +272,1169175,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",727,42,"ws_dir/checkpoints/$job_name/$slurm_job_id",shellscript,selection_mouse +273,1169739,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",766,0,"",shellscript,selection_mouse +274,1169740,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",757,12,"slurm_job_id",shellscript,selection_mouse +275,1169940,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",756,13,"$slurm_job_id",shellscript,selection_mouse +276,1169991,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",747,22,"job_name/$slurm_job_id",shellscript,selection_mouse +277,1170040,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",745,24,"/$job_name/$slurm_job_id",shellscript,selection_mouse +278,1170040,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",734,35,"checkpoints/$job_name/$slurm_job_id",shellscript,selection_mouse +279,1170173,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",733,36,"/checkpoints/$job_name/$slurm_job_id",shellscript,selection_mouse +280,1170175,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",727,42,"ws_dir/checkpoints/$job_name/$slurm_job_id",shellscript,selection_mouse +281,1170406,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",726,43,"$ws_dir/checkpoints/$job_name/$slurm_job_id",shellscript,selection_mouse +282,1170456,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",725,44,"=$ws_dir/checkpoints/$job_name/$slurm_job_id",shellscript,selection_mouse +283,1170873,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",726,43,"$ws_dir/checkpoints/$job_name/$slurm_job_id",shellscript,selection_mouse +284,1177273,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",726,0,"",shellscript,selection_mouse +285,1177690,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",728,0,"",shellscript,selection_mouse +286,1178406,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",769,0,"\n",shellscript,content +287,1179240,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",770,0,"C",shellscript,content +288,1179241,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",771,0,"",shellscript,selection_keyboard +289,1179343,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",771,0,"H",shellscript,content +290,1179343,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",772,0,"",shellscript,selection_keyboard +291,1179794,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",770,2,"CHECKPOINT_DIR",shellscript,content +292,1180523,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",784,0,"=",shellscript,content +293,1180524,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",785,0,"",shellscript,selection_keyboard +294,1192007,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",785,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/interactive/3353884",shellscript,content +295,1193156,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",769,0,"",shellscript,selection_mouse +296,1193642,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",873,0,"",shellscript,selection_mouse +297,1202106,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",899,0,"",shellscript,selection_mouse +298,1202740,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",829,0,"",shellscript,selection_mouse +299,1202875,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",826,14,"jafa_ws_shared",shellscript,selection_mouse +300,1203008,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",770,104,"CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/interactive/3353884\n",shellscript,selection_mouse +301,1205479,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",874,0,"",shellscript,selection_command +302,1205965,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",770,0,"",shellscript,selection_command +303,1206335,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",770,104,"",shellscript,content +304,1206538,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",795,0,"",shellscript,selection_command +305,1206724,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",796,0,"",shellscript,selection_command +306,1206874,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",943,0,"",shellscript,selection_command +307,1207033,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",944,0,"",shellscript,selection_command +308,1207163,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",961,0,"",shellscript,selection_command +309,1207296,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",962,0,"",shellscript,selection_command +310,1207455,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",994,0,"",shellscript,selection_command +311,1207875,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",994,21,"",shellscript,content +312,1207893,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",998,0,"",shellscript,selection_command +313,1209308,"TERMINAL",0,0,"bash",,terminal_focus +314,1210041,"TERMINAL",0,0,"salloc",,terminal_focus +315,1419783,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +316,1422709,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",994,18,"",shellscript,content +317,1422723,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",998,0,"",shellscript,selection_command +318,1423873,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",1000,0,"save_ckpt \\n --",shellscript,content +319,1426089,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",710,0,"",shellscript,selection_mouse +320,1427526,"train_dynamics.py",0,0,"",python,tab +321,1548207,"train_dynamics.py",987,0,"",python,selection_mouse +322,1548223,"train_dynamics.py",986,0,"",python,selection_command +323,1570726,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +324,1572274,"TERMINAL",0,0,"bash",,terminal_focus +325,1573156,"TERMINAL",0,0,"salloc",,terminal_focus +326,1575909,"train_dynamics.py",0,0,"",python,tab +327,1576876,"models/dynamics.py",0,0,"",python,tab +328,1579044,"models/dynamics.py",3134,0,"",python,selection_mouse +329,1579790,"models/dynamics.py",3135,0,"",python,selection_command +330,1579990,"models/dynamics.py",3135,0,"s",python,content +331,1579991,"models/dynamics.py",3136,0,"",python,selection_keyboard +332,1580441,"models/dynamics.py",3135,0,"",python,selection_command +333,1580589,"models/dynamics.py",3185,0,"",python,selection_command +334,1580799,"models/dynamics.py",3186,0,"",python,selection_command +335,1580956,"models/dynamics.py",3187,0,"",python,selection_command +336,1581106,"models/dynamics.py",3188,0,"",python,selection_command +337,1581189,"models/dynamics.py",3189,0,"",python,selection_command +338,1581339,"models/dynamics.py",3190,0,"",python,selection_command +339,1581457,"models/dynamics.py",3191,0,"",python,selection_command +340,1581773,"models/dynamics.py",3191,0,"s",python,content +341,1581774,"models/dynamics.py",3192,0,"",python,selection_keyboard +342,1581972,"models/dynamics.py",3191,0,"",python,selection_command +343,1585341,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +344,1590324,"models/dynamics.py",0,0,"",python,tab +345,1592156,"models/dynamics.py",2777,0,"",python,selection_mouse +346,1592289,"models/dynamics.py",2775,5,"batch",python,selection_mouse +347,1592489,"models/dynamics.py",2775,7,"batch[""",python,selection_mouse +348,1592506,"models/dynamics.py",2775,19,"batch[""video_tokens",python,selection_mouse +349,1592922,"models/dynamics.py",2775,20,"batch[""video_tokens""",python,selection_mouse +350,1593456,"models/dynamics.py",2775,21,"batch[""video_tokens""]",python,selection_mouse +351,1737223,"TERMINAL",0,0,"bash",,terminal_focus +352,1738356,"TERMINAL",0,0,"salloc",,terminal_focus +353,1739092,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +354,1740773,"TERMINAL",0,0,"bash",,terminal_focus +355,1741075,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +356,1743659,"models/dynamics.py",0,0,"",python,tab +357,1745406,"models/dynamics.py",2989,0,"",python,selection_mouse +358,1761239,"models/dynamics.py",2776,0,"",python,selection_mouse +359,1761373,"models/dynamics.py",2775,5,"batch",python,selection_mouse +360,1762173,"models/dynamics.py",2778,0,"",python,selection_mouse +361,1762989,"models/dynamics.py",2678,0,"",python,selection_mouse +362,1763739,"models/dynamics.py",2664,0,"",python,selection_mouse +363,1763905,"models/dynamics.py",2658,8,"__call__",python,selection_mouse +364,2035389,"models/dynamics.py",2756,0,"",python,selection_mouse +365,2036089,"models/dynamics.py",3037,0,"",python,selection_mouse +366,2040455,"models/dynamics.py",2980,0,"",python,selection_mouse +367,2040889,"models/dynamics.py",3116,0,"",python,selection_mouse +368,2040908,"models/dynamics.py",3115,0,"",python,selection_command +369,2041663,"models/dynamics.py",3161,0,"",python,selection_command +370,2041994,"models/dynamics.py",3198,0,"",python,selection_command +371,2042173,"models/dynamics.py",3229,0,"",python,selection_command +372,2042325,"models/dynamics.py",3275,0,"",python,selection_command +373,2050373,"models/dynamics.py",3229,0,"",python,selection_command +374,2050571,"models/dynamics.py",3198,0,"",python,selection_command +375,2050815,"models/dynamics.py",3161,0,"",python,selection_command +376,2051572,"models/dynamics.py",3198,0,"",python,selection_command +377,2051780,"models/dynamics.py",3229,0,"",python,selection_command +378,2052105,"models/dynamics.py",3275,0,"",python,selection_command +379,2052355,"models/dynamics.py",3229,0,"",python,selection_command +380,2052523,"models/dynamics.py",3198,0,"",python,selection_command +381,2052692,"models/dynamics.py",3161,0,"",python,selection_command +382,2052832,"models/dynamics.py",3198,0,"",python,selection_command +383,2052955,"models/dynamics.py",3161,0,"",python,selection_command +384,2053069,"models/dynamics.py",3198,0,"",python,selection_command +385,2053234,"models/dynamics.py",3229,0,"",python,selection_command +386,2053407,"models/dynamics.py",3275,0,"",python,selection_command +387,2053539,"models/dynamics.py",3229,0,"",python,selection_command +388,2053603,"models/dynamics.py",3275,0,"",python,selection_command +389,2053726,"models/dynamics.py",3229,0,"",python,selection_command +390,2053809,"models/dynamics.py",3275,0,"",python,selection_command +391,2053911,"models/dynamics.py",3229,0,"",python,selection_command +392,2054135,"models/dynamics.py",3198,0,"",python,selection_command +393,2054355,"models/dynamics.py",3229,0,"",python,selection_command +394,2054707,"models/dynamics.py",3228,0,"",python,selection_command +395,2054858,"models/dynamics.py",3227,0,"",python,selection_command +396,2055080,"models/dynamics.py",3194,0,"",python,selection_command +397,2055442,"models/dynamics.py",3227,0,"",python,selection_command +398,2055662,"models/dynamics.py",3258,0,"",python,selection_command +399,2055767,"models/dynamics.py",3227,0,"",python,selection_command +400,2056012,"models/dynamics.py",3194,0,"",python,selection_command +401,2056285,"models/dynamics.py",3193,0,"",python,selection_command +402,2056428,"models/dynamics.py",3192,0,"",python,selection_command +403,2056644,"models/dynamics.py",3225,0,"",python,selection_command +404,2056822,"models/dynamics.py",3192,0,"",python,selection_command +405,2057039,"models/dynamics.py",3142,0,"",python,selection_command +406,2057121,"models/dynamics.py",3192,0,"",python,selection_command +407,2057272,"models/dynamics.py",3142,0,"",python,selection_command +408,2057373,"models/dynamics.py",3192,0,"",python,selection_command +409,2057773,"models/dynamics.py",3225,0,"",python,selection_command +410,2058005,"models/dynamics.py",3192,0,"",python,selection_command +411,2058155,"models/dynamics.py",3225,0,"",python,selection_command +412,2058306,"models/dynamics.py",3256,0,"",python,selection_command +413,2058472,"models/dynamics.py",3225,0,"",python,selection_command +414,2058556,"models/dynamics.py",3256,0,"",python,selection_command +415,2058772,"models/dynamics.py",3225,0,"",python,selection_command +416,2058938,"models/dynamics.py",3192,0,"",python,selection_command +417,2059039,"models/dynamics.py",3225,0,"",python,selection_command +418,2059238,"models/dynamics.py",3192,0,"",python,selection_command +419,2059324,"models/dynamics.py",3225,0,"",python,selection_command +420,2059455,"models/dynamics.py",3192,0,"",python,selection_command +421,2059555,"models/dynamics.py",3225,0,"",python,selection_command +422,2059638,"models/dynamics.py",3192,0,"",python,selection_command +423,2059822,"models/dynamics.py",3142,0,"",python,selection_command +424,2059888,"models/dynamics.py",3192,0,"",python,selection_command +425,2060039,"models/dynamics.py",3142,0,"",python,selection_command +426,2060088,"models/dynamics.py",3192,0,"",python,selection_command +427,2060272,"models/dynamics.py",3225,0,"",python,selection_command +428,2060406,"models/dynamics.py",3192,0,"",python,selection_command +429,2060655,"models/dynamics.py",3225,0,"",python,selection_command +430,2060822,"models/dynamics.py",3192,0,"",python,selection_command +431,2060938,"models/dynamics.py",3225,0,"",python,selection_command +432,2061107,"models/dynamics.py",3192,0,"",python,selection_command +433,2061174,"models/dynamics.py",3225,0,"",python,selection_command +434,2061307,"models/dynamics.py",3192,0,"",python,selection_command +435,2061355,"models/dynamics.py",3225,0,"",python,selection_command +436,2061489,"models/dynamics.py",3192,0,"",python,selection_command +437,2061555,"models/dynamics.py",3225,0,"",python,selection_command +438,2061688,"models/dynamics.py",3192,0,"",python,selection_command +439,2061772,"models/dynamics.py",3225,0,"",python,selection_command +440,2061890,"models/dynamics.py",3192,0,"",python,selection_command +441,2061988,"models/dynamics.py",3225,0,"",python,selection_command +442,2062105,"models/dynamics.py",3192,0,"",python,selection_command +443,2062189,"models/dynamics.py",3225,0,"",python,selection_command +444,2062339,"models/dynamics.py",3192,0,"",python,selection_command +445,2062341,"models/dynamics.py",3225,0,"",python,selection_command +446,2062505,"models/dynamics.py",3192,0,"",python,selection_command +447,2062556,"models/dynamics.py",3225,0,"",python,selection_command +448,2062705,"models/dynamics.py",3192,0,"",python,selection_command +449,2062772,"models/dynamics.py",3225,0,"",python,selection_command +450,2062906,"models/dynamics.py",3192,0,"",python,selection_command +451,2062989,"models/dynamics.py",3225,0,"",python,selection_command +452,2063139,"models/dynamics.py",3192,0,"",python,selection_command +453,2063239,"models/dynamics.py",3225,0,"",python,selection_command +454,2063405,"models/dynamics.py",3256,0,"",python,selection_command +455,2063555,"models/dynamics.py",3225,0,"",python,selection_command +456,2063872,"models/dynamics.py",3192,0,"",python,selection_command +457,2064072,"models/dynamics.py",3225,0,"",python,selection_command +458,2064207,"models/dynamics.py",3192,0,"",python,selection_command +459,2064439,"models/dynamics.py",3142,0,"",python,selection_command +460,2064505,"models/dynamics.py",3192,0,"",python,selection_command +461,2064672,"models/dynamics.py",3142,0,"",python,selection_command +462,2064772,"models/dynamics.py",3192,0,"",python,selection_command +463,2064955,"models/dynamics.py",3225,0,"",python,selection_command +464,2065205,"models/dynamics.py",3192,0,"",python,selection_command +465,2065390,"models/dynamics.py",3225,0,"",python,selection_command +466,2065538,"models/dynamics.py",3192,0,"",python,selection_command +467,2065672,"models/dynamics.py",3225,0,"",python,selection_command +468,2065806,"models/dynamics.py",3192,0,"",python,selection_command +469,2065905,"models/dynamics.py",3225,0,"",python,selection_command +470,2066040,"models/dynamics.py",3192,0,"",python,selection_command +471,2066155,"models/dynamics.py",3225,0,"",python,selection_command +472,2066272,"models/dynamics.py",3192,0,"",python,selection_command +473,2066339,"models/dynamics.py",3225,0,"",python,selection_command +474,2066505,"models/dynamics.py",3192,0,"",python,selection_command +475,2066590,"models/dynamics.py",3225,0,"",python,selection_command +476,2066772,"models/dynamics.py",3192,0,"",python,selection_command +477,2066822,"models/dynamics.py",3225,0,"",python,selection_command +478,2067005,"models/dynamics.py",3192,0,"",python,selection_command +479,2067056,"models/dynamics.py",3225,0,"",python,selection_command +480,2067655,"models/dynamics.py",3192,0,"",python,selection_command +481,2067805,"models/dynamics.py",3225,0,"",python,selection_command +482,2067938,"models/dynamics.py",3192,0,"",python,selection_command +483,2068072,"models/dynamics.py",3225,0,"",python,selection_command +484,2068291,"models/dynamics.py",3192,0,"",python,selection_command +485,2068422,"models/dynamics.py",3225,0,"",python,selection_command +486,2068639,"models/dynamics.py",3192,0,"",python,selection_command +487,2068739,"models/dynamics.py",3225,0,"",python,selection_command +488,2068922,"models/dynamics.py",3192,0,"",python,selection_command +489,2069007,"models/dynamics.py",3225,0,"",python,selection_command +490,2084825,"TERMINAL",0,0,"salloc",,terminal_focus +491,2152144,"models/dynamics.py",0,0,"",python,tab +492,2152962,"models/dynamics.py",0,0,"",python,tab +493,2186125,"train_dynamics.py",0,0,"",python,tab +494,2190324,"train_dynamics.py",2663,0,"",python,selection_mouse +495,2190488,"train_dynamics.py",2662,2,"[""",python,selection_mouse +496,2193388,"train_dynamics.py",2782,0,"",python,selection_mouse +497,2193538,"train_dynamics.py",2776,7,"outputs",python,selection_mouse +498,2194305,"train_dynamics.py",2783,0,"",python,selection_mouse +499,2194306,"train_dynamics.py",2776,7,"outputs",python,selection_mouse +500,2194505,"train_dynamics.py",2776,8,"outputs[",python,selection_mouse +501,2194506,"train_dynamics.py",2776,9,"outputs[""",python,selection_mouse +502,2194555,"train_dynamics.py",2776,21,"outputs[""video_tokens",python,selection_mouse +503,2194672,"train_dynamics.py",2776,22,"outputs[""video_tokens""",python,selection_mouse +504,2194722,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +505,2195122,"train_dynamics.py",2799,0,"",python,selection_mouse +506,2195122,"train_dynamics.py",2798,0,"",python,selection_command +507,2195621,"train_dynamics.py",2799,0,"",python,selection_mouse +508,2195623,"train_dynamics.py",2798,0,"",python,selection_command +509,2195790,"train_dynamics.py",2798,1,"]",python,selection_mouse +510,2195791,"train_dynamics.py",2799,0,"",python,selection_command +511,2195838,"train_dynamics.py",2798,1,"]",python,selection_mouse +512,2195839,"train_dynamics.py",2797,2,"""]",python,selection_mouse +513,2195888,"train_dynamics.py",2799,1,"\n",python,selection_mouse +514,2196305,"train_dynamics.py",2785,14,"video_tokens""]",python,selection_mouse +515,2196372,"train_dynamics.py",2784,15,"""video_tokens""]",python,selection_mouse +516,2196422,"train_dynamics.py",2783,16,"[""video_tokens""]",python,selection_mouse +517,2196471,"train_dynamics.py",2782,17,"s[""video_tokens""]",python,selection_mouse +518,2196522,"train_dynamics.py",2781,18,"ts[""video_tokens""]",python,selection_mouse +519,2196588,"train_dynamics.py",2780,19,"uts[""video_tokens""]",python,selection_mouse +520,2196622,"train_dynamics.py",2779,20,"puts[""video_tokens""]",python,selection_mouse +521,2196722,"train_dynamics.py",2778,21,"tputs[""video_tokens""]",python,selection_mouse +522,2196772,"train_dynamics.py",2777,22,"utputs[""video_tokens""]",python,selection_mouse +523,2197223,"train_dynamics.py",2777,0,"",python,selection_mouse +524,2197224,"train_dynamics.py",2776,7,"outputs",python,selection_mouse +525,2197455,"train_dynamics.py",2776,8,"outputs[",python,selection_mouse +526,2197472,"train_dynamics.py",2776,21,"outputs[""video_tokens",python,selection_mouse +527,2197605,"train_dynamics.py",2776,22,"outputs[""video_tokens""",python,selection_mouse +528,2197607,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +529,2199005,"train_dynamics.py",2454,0,"",python,selection_mouse +530,2215026,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"",python,tab +531,2216058,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +532,2453890,"models/dynamics.py",0,0,"",python,tab +533,2453892,"models/dynamics.py",2772,0,"",python,selection_mouse +534,2454422,"models/dynamics.py",3264,0,"",python,selection_mouse +535,2454888,"models/dynamics.py",3199,0,"",python,selection_mouse +536,2454890,"models/dynamics.py",3198,0,"",python,selection_command +537,2455906,"models/dynamics.py",3199,0,"\n ",python,content +538,2456655,"models/dynamics.py",3208,0,"u",python,content +539,2456656,"models/dynamics.py",3209,0,"",python,selection_keyboard +540,2456774,"models/dynamics.py",3209,0,"n",python,content +541,2456775,"models/dynamics.py",3210,0,"",python,selection_keyboard +542,2457106,"models/dynamics.py",3210,0,"i",python,content +543,2457107,"models/dynamics.py",3211,0,"",python,selection_keyboard +544,2457242,"models/dynamics.py",3211,0,"q",python,content +545,2457244,"models/dynamics.py",3212,0,"",python,selection_keyboard +546,2457406,"models/dynamics.py",3212,0,"u",python,content +547,2457407,"models/dynamics.py",3213,0,"",python,selection_keyboard +548,2457505,"models/dynamics.py",3213,0,"e",python,content +549,2457506,"models/dynamics.py",3214,0,"",python,selection_keyboard +550,2458021,"models/dynamics.py",3214,0," ",python,content +551,2458022,"models/dynamics.py",3215,0,"",python,selection_keyboard +552,2458238,"models/dynamics.py",3215,0,"=",python,content +553,2458240,"models/dynamics.py",3216,0,"",python,selection_keyboard +554,2458956,"models/dynamics.py",3216,0," ",python,content +555,2458957,"models/dynamics.py",3217,0,"",python,selection_keyboard +556,2459871,"models/dynamics.py",3217,0,"np.unique(batch[""video_tokens""])",python,content +557,2461321,"models/dynamics.py",3217,0,"",python,selection_mouse +558,2461922,"models/dynamics.py",3217,0,"j",python,content +559,2461923,"models/dynamics.py",3218,0,"",python,selection_keyboard +560,2462655,"models/dynamics.py",3217,0,"",python,selection_command +561,2463490,"models/dynamics.py",3199,0,"",python,selection_mouse +562,2463490,"models/dynamics.py",3198,0,"",python,selection_command +563,2464088,"models/dynamics.py",3281,0,"",python,selection_mouse +564,2464089,"models/dynamics.py",3280,0,"",python,selection_command +565,2486923,"models/dynamics.py",0,0,"",python,tab +566,2486924,"models/dynamics.py",2768,0,"",python,selection_mouse +567,2489404,"models/dynamics.py",2779,0,"",python,selection_mouse +568,2489973,"models/dynamics.py",2775,5,"batch",python,selection_mouse +569,2490755,"models/dynamics.py",2779,0,"",python,selection_mouse +570,2490756,"models/dynamics.py",2775,5,"batch",python,selection_mouse +571,2491557,"models/dynamics.py",2776,0,"",python,selection_mouse +572,2491557,"models/dynamics.py",2775,5,"batch",python,selection_mouse +573,2492240,"models/dynamics.py",2776,0,"",python,selection_mouse +574,2492772,"models/dynamics.py",2775,5,"batch",python,selection_mouse +575,2493789,"models/dynamics.py",2779,0,"",python,selection_mouse +576,2493972,"models/dynamics.py",2775,5,"batch",python,selection_mouse +577,2494905,"models/dynamics.py",2777,0,"",python,selection_mouse +578,2495655,"models/dynamics.py",2775,5,"batch",python,selection_mouse +579,2496721,"models/dynamics.py",2777,0,"",python,selection_mouse +580,2497438,"models/dynamics.py",2775,5,"batch",python,selection_mouse +581,2498324,"models/dynamics.py",2777,0,"",python,selection_mouse +582,2499738,"models/dynamics.py",2775,5,"batch",python,selection_mouse +583,2500922,"models/dynamics.py",2391,0,"",python,selection_mouse +584,2500922,"models/dynamics.py",2390,0,"",python,selection_command +585,2501586,"models/dynamics.py",2420,0,"",python,selection_mouse +586,2501722,"models/dynamics.py",2417,5,"False",python,selection_mouse +587,2502405,"models/dynamics.py",2411,0,"",python,selection_mouse +588,2502541,"models/dynamics.py",2404,12,"spacial_bert",python,selection_mouse +589,2506188,"models/dynamics.py",2191,0,"",python,selection_mouse +590,2506592,"utils/nn.py",0,0,"import math\nfrom typing import Dict, Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n# class STBlock2(nn.Module):\n # dim: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.remat\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # --- Spatial attention ---\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n\n # # --- Temporal attention ---\n # x = x.swapaxes(1, 2)\n # z = PositionalEncoding(self.dim)(x)\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # causal_mask = jnp.tri(z.shape[-2])\n # z = nn.MultiHeadAttention(\n # num_heads=self.num_heads,\n # qkv_features=self.dim,\n # dropout_rate=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z, mask=causal_mask)\n # x = x + z\n # x = x.swapaxes(1, 2)\n\n # # --- Feedforward ---\n # z = nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n # z = nn.Dense(\n # self.dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(z)\n # z = nn.gelu(z)\n # x = x + z\n\n # return x\n\n# class CausalTransformer(nn.Module):\n # model_dim: int\n # out_dim: int\n # num_blocks: int\n # num_heads: int\n # dropout: float\n # param_dtype: jnp.dtype\n # dtype: jnp.dtype\n\n # @nn.compact\n # def __call__(self, x: jax.Array) -> jax.Array:\n # # Input projection and normalization\n # x = nn.Sequential(\n # [\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.Dense(self.model_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # nn.LayerNorm(\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # ),\n # ]\n # )(x)\n # # Causal transformer blocks\n # for _ in range(self.num_blocks):\n # x = STBlock2(\n # dim=self.model_dim,\n # num_heads=self.num_heads,\n # dropout=self.dropout,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n\n # # Output projection\n # x = nn.Dense(\n # self.out_dim,\n # param_dtype=self.param_dtype,\n # dtype=self.dtype,\n # )(x)\n # return x # (B, T, E)\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n spacial_bert: bool = True\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n spacial_mask = None if self.spacial_bert else jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=spacial_mask)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n # FIXME (f.srambical): Here, the attention hidden dimension is the same as the FFN's. Usually, FFN hidden dimension is 4x model_dim\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n spacial_bert: bool = True\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n spacial_bert=self.spacial_bert,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n",python,tab +591,2508910,"models/dynamics.py",0,0,"",python,tab +592,2517959,"models/tokenizer.py",0,0,"from typing import Dict, Any, Tuple\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nn.Module):\n """"""ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n\n def setup(self):\n self.encoder = STTransformer(\n self.model_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.model_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n recon = self.decoder(outputs[""z_q""]) # (B, T, H_down * W_down, C)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n x = patchify(videos, self.patch_size)\n N = x.shape[2]\n x = self.encoder(x) # (B, T, N, E)\n\n # --- Vector quantize ---\n x = x.reshape(B * T * N, self.latent_dim)\n z_q, z, emb, indices = self.vq(x, training)\n z_q = z_q.reshape(B, T, N, self.latent_dim)\n indices = indices.reshape(B, T, N)\n return dict(z_q=z_q, z=z, emb=emb, indices=indices)\n\n def decode(self, indices: Any, video_hw: Tuple[int, int]):\n z = self.vq.codebook[indices]\n recon = self.decoder(z)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n return unpatchify(recon, self.patch_size, *video_hw)\n",python,tab +593,2522138,"models/tokenizer.py",533,0,"",python,selection_mouse +594,2522525,"utils/nn.py",0,0,"",python,tab +595,2523974,"utils/nn.py",6216,0,"",python,selection_mouse +596,2524121,"utils/nn.py",6209,12,"spacial_bert",python,selection_mouse +597,2525108,"utils/nn.py",6233,0,"",python,selection_mouse +598,2525255,"utils/nn.py",6230,4,"True",python,selection_mouse +599,2571659,"utils/nn.py",0,0,"",python,tab +600,2571660,"utils/nn.py",4677,0,"",python,selection_mouse +601,2571755,"utils/nn.py",4676,4,"None",python,selection_mouse +602,2572371,"utils/nn.py",4690,0,"",python,selection_mouse +603,2572523,"utils/nn.py",4689,12,"spacial_bert",python,selection_mouse +604,2573124,"utils/nn.py",4686,0,"",python,selection_mouse +605,2573254,"utils/nn.py",4684,4,"self",python,selection_mouse +606,2573522,"utils/nn.py",4684,17,"self.spacial_bert",python,selection_mouse +607,2628225,"models/tokenizer.py",0,0,"",python,tab +608,2657909,"models/dynamics.py",0,0,"",python,tab +609,2660222,"TERMINAL",0,0,"bash",,terminal_focus +610,2660957,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +611,2662525,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"",python,tab +612,2670026,"train_dynamics.py",0,0,"",python,tab +613,2670962,"train_dynamics.py",0,0,"",python,tab +614,2674158,"TERMINAL",0,0,"salloc",,terminal_focus +615,2677223,"train_dynamics.py",0,0,"",python,tab +616,2677224,"train_dynamics.py",2406,0,"",python,selection_mouse +617,2677338,"train_dynamics.py",2404,6,"videos",python,selection_mouse +618,2677904,"train_dynamics.py",2416,0,"",python,selection_mouse +619,2678023,"train_dynamics.py",2415,6,"inputs",python,selection_mouse +620,2678588,"train_dynamics.py",2433,0,"",python,selection_mouse +621,2678788,"train_dynamics.py",2432,6,"astype",python,selection_mouse +622,2682273,"train_dynamics.py",2744,0,"",python,selection_mouse +623,2682440,"train_dynamics.py",2738,7,"outputs",python,selection_mouse +624,2682971,"train_dynamics.py",2780,0,"",python,selection_mouse +625,2683106,"train_dynamics.py",2776,7,"outputs",python,selection_mouse +626,2683322,"train_dynamics.py",2776,21,"outputs[""video_tokens",python,selection_mouse +627,2683471,"train_dynamics.py",2776,22,"outputs[""video_tokens""",python,selection_mouse +628,2683472,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +629,2686707,"train_dynamics.py",2799,0,"",python,selection_mouse +630,2686707,"train_dynamics.py",2798,0,"",python,selection_command +631,2686871,"train_dynamics.py",2799,0,"",python,selection_mouse +632,2686873,"train_dynamics.py",2798,0,"",python,selection_command +633,2687088,"train_dynamics.py",2798,1,"]",python,selection_mouse +634,2687088,"train_dynamics.py",2799,0,"",python,selection_command +635,2687104,"train_dynamics.py",2798,1,"]",python,selection_mouse +636,2687138,"train_dynamics.py",2785,14,"video_tokens""]",python,selection_mouse +637,2687238,"train_dynamics.py",2783,16,"[""video_tokens""]",python,selection_mouse +638,2687271,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +639,2687988,"train_dynamics.py",2777,0,"",python,selection_mouse +640,2687989,"train_dynamics.py",2776,7,"outputs",python,selection_mouse +641,2688238,"train_dynamics.py",2776,8,"outputs[",python,selection_mouse +642,2688254,"train_dynamics.py",2776,21,"outputs[""video_tokens",python,selection_mouse +643,2688355,"train_dynamics.py",2776,22,"outputs[""video_tokens""",python,selection_mouse +644,2688371,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +645,2688954,"train_dynamics.py",2799,0,"",python,selection_mouse +646,2688955,"train_dynamics.py",2798,0,"",python,selection_command +647,2689104,"train_dynamics.py",2799,0,"",python,selection_mouse +648,2689106,"train_dynamics.py",2798,0,"",python,selection_command +649,2689288,"train_dynamics.py",2798,1,"]",python,selection_mouse +650,2689289,"train_dynamics.py",2799,0,"",python,selection_command +651,2689337,"train_dynamics.py",2797,2,"""]",python,selection_mouse +652,2689355,"train_dynamics.py",2785,14,"video_tokens""]",python,selection_mouse +653,2689471,"train_dynamics.py",2784,15,"""video_tokens""]",python,selection_mouse +654,2689488,"train_dynamics.py",2783,16,"[""video_tokens""]",python,selection_mouse +655,2689504,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +656,2690338,"train_dynamics.py",2781,0,"",python,selection_mouse +657,2690339,"train_dynamics.py",2776,7,"outputs",python,selection_mouse +658,2690655,"train_dynamics.py",2776,9,"outputs[""",python,selection_mouse +659,2690656,"train_dynamics.py",2776,21,"outputs[""video_tokens",python,selection_mouse +660,2690956,"train_dynamics.py",2776,22,"outputs[""video_tokens""",python,selection_mouse +661,2690990,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +662,2691488,"train_dynamics.py",2799,0,"",python,selection_mouse +663,2691489,"train_dynamics.py",2798,0,"",python,selection_command +664,2691638,"train_dynamics.py",2799,0,"",python,selection_mouse +665,2691639,"train_dynamics.py",2798,0,"",python,selection_command +666,2691838,"train_dynamics.py",2798,1,"]",python,selection_mouse +667,2691839,"train_dynamics.py",2798,0,"",python,selection_mouse +668,2691840,"train_dynamics.py",2799,0,"",python,selection_command +669,2691841,"train_dynamics.py",2797,2,"""]",python,selection_mouse +670,2691854,"train_dynamics.py",2785,14,"video_tokens""]",python,selection_mouse +671,2691939,"train_dynamics.py",2783,16,"[""video_tokens""]",python,selection_mouse +672,2691988,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +673,2692604,"train_dynamics.py",2778,0,"",python,selection_mouse +674,2692605,"train_dynamics.py",2776,7,"outputs",python,selection_mouse +675,2692838,"train_dynamics.py",2776,9,"outputs[""",python,selection_mouse +676,2692871,"train_dynamics.py",2776,21,"outputs[""video_tokens",python,selection_mouse +677,2692888,"train_dynamics.py",2776,24,"outputs[""video_tokens""]\n",python,selection_mouse +678,2693439,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +679,2693888,"train_dynamics.py",2799,0,"",python,selection_mouse +680,2693889,"train_dynamics.py",2798,0,"",python,selection_command +681,2694039,"train_dynamics.py",2799,0,"",python,selection_mouse +682,2694041,"train_dynamics.py",2798,0,"",python,selection_command +683,2694221,"train_dynamics.py",2798,1,"]",python,selection_mouse +684,2694222,"train_dynamics.py",2799,0,"",python,selection_command +685,2694254,"train_dynamics.py",2797,2,"""]",python,selection_mouse +686,2694271,"train_dynamics.py",2785,14,"video_tokens""]",python,selection_mouse +687,2694373,"train_dynamics.py",2799,1,"\n",python,selection_mouse +688,2694938,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +689,2695721,"train_dynamics.py",2778,0,"",python,selection_mouse +690,2695722,"train_dynamics.py",2776,7,"outputs",python,selection_mouse +691,2695954,"train_dynamics.py",2776,24,"outputs[""video_tokens""]\n",python,selection_mouse +692,2697005,"train_dynamics.py",2800,0,"",python,selection_mouse +693,2697271,"train_dynamics.py",2799,1,"\n",python,selection_mouse +694,2697305,"train_dynamics.py",2798,2,"]\n",python,selection_mouse +695,2697338,"train_dynamics.py",2797,3,"""]\n",python,selection_mouse +696,2697840,"train_dynamics.py",2797,0,"",python,selection_mouse +697,2697841,"train_dynamics.py",2785,12,"video_tokens",python,selection_mouse +698,2698004,"train_dynamics.py",2762,38," targets = outputs[""video_tokens""]\n",python,selection_mouse +699,2698305,"train_dynamics.py",2762,39," targets = outputs[""video_tokens""]\n\n",python,selection_mouse +700,2698704,"train_dynamics.py",2800,0,"",python,selection_mouse +701,2699408,"train_dynamics.py",2780,0,"",python,selection_mouse +702,2699572,"train_dynamics.py",2776,7,"outputs",python,selection_mouse +703,2699771,"train_dynamics.py",2776,9,"outputs[""",python,selection_mouse +704,2699774,"train_dynamics.py",2776,21,"outputs[""video_tokens",python,selection_mouse +705,2699871,"train_dynamics.py",2776,22,"outputs[""video_tokens""",python,selection_mouse +706,2699904,"train_dynamics.py",2776,23,"outputs[""video_tokens""]",python,selection_mouse +707,2720673,"train_dynamics.py",3076,0,"",python,selection_mouse +708,2720688,"train_dynamics.py",3075,0,"",python,selection_command +709,2727027,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +710,2743841,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +711,2751073,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch",0,0,"",shellscript,tab +712,2755209,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +713,2813260,"models/dynamics.py",0,0,"",python,tab +714,2817587,"models/dynamics.py",3180,0,"",python,selection_mouse +715,2818204,"models/dynamics.py",3178,0,"",python,selection_mouse +716,2848740,"models/dynamics.py",0,0,"",python,tab +717,2848741,"models/dynamics.py",2777,0,"",python,selection_mouse +718,2848871,"models/dynamics.py",2775,5,"batch",python,selection_mouse +719,2848987,"models/dynamics.py",2775,6,"batch[",python,selection_mouse +720,2849021,"models/dynamics.py",2775,19,"batch[""video_tokens",python,selection_mouse +721,2849222,"models/dynamics.py",2775,20,"batch[""video_tokens""",python,selection_mouse +722,2849255,"models/dynamics.py",2775,21,"batch[""video_tokens""]",python,selection_mouse +723,2874940,"models/dynamics.py",0,0,"",python,tab +724,2874940,"models/dynamics.py",3128,0,"",python,selection_mouse +725,2875055,"models/dynamics.py",3125,11,"next_tokens",python,selection_mouse +726,2895338,"models/dynamics.py",0,0,"",python,tab +727,2895339,"models/dynamics.py",2648,0,"",python,selection_mouse +728,2895354,"models/dynamics.py",2647,0,"",python,selection_command +729,2902473,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +730,2905521,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",822,0,"",shellscript,selection_mouse +731,2905688,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",821,4,"work",shellscript,selection_mouse +732,2905837,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",796,147,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\n",shellscript,selection_mouse +733,2907888,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",971,0,"",shellscript,selection_mouse +734,2908788,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",993,0,"\n",shellscript,content +735,2909621,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",994,0," ",shellscript,content +736,2910554,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",998,0,"-",shellscript,content +737,2910555,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",999,0,"",shellscript,selection_keyboard +738,2910689,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",999,0,"-",shellscript,content +739,2910690,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1000,0,"",shellscript,selection_keyboard +740,2914227,"train_dynamics.py",0,0,"",python,tab +741,2922571,"train_dynamics.py",845,0,"",python,selection_mouse +742,2922704,"train_dynamics.py",840,12,"restore_ckpt",python,selection_mouse +743,2925307,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +744,2925904,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1000,0,"restore_ckpt",shellscript,content +745,2926555,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1012,0," ",shellscript,content +746,2926556,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1013,0,"",shellscript,selection_keyboard +747,2926854,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1013,0,"\",shellscript,content +748,2926855,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1014,0,"",shellscript,selection_keyboard +749,2927324,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1013,0,"",shellscript,selection_command +750,2929321,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1125,0,"",shellscript,selection_mouse +751,2930906,"train_dynamics.py",0,0,"",python,tab +752,2930907,"train_dynamics.py",1216,0,"",python,selection_mouse +753,2951925,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +754,2953623,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +755,2956721,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",740,0,"",shellscript,selection_mouse +756,2958606,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",769,0,"\n",shellscript,content +757,2959221,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"C",shellscript,content +758,2959222,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",771,0,"",shellscript,selection_keyboard +759,2959271,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",771,0,"H",shellscript,content +760,2959272,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",772,0,"",shellscript,selection_keyboard +761,2959704,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,2,"CHECKPOINT_DIR",shellscript,content +762,2960388,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",784,0,"=",shellscript,content +763,2960389,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",785,0,"",shellscript,selection_keyboard +764,2961388,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",785,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared//checkpoints/interactive/3353884",shellscript,content +765,2962056,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",872,0,"",shellscript,selection_command +766,3048609,"models/dynamics.py",0,0,"",python,tab +767,3050387,"models/dynamics.py",2778,0,"",python,selection_mouse +768,3050555,"models/dynamics.py",2775,5,"batch",python,selection_mouse +769,3082485,"models/dynamics.py",0,0,"",python,tab +770,3082486,"models/dynamics.py",3183,0,"",python,selection_mouse +771,3082589,"models/dynamics.py",3181,11,"next_tokens",python,selection_mouse +772,3165491,"models/dynamics.py",0,0,"",python,tab +773,3165492,"models/dynamics.py",2777,0,"",python,selection_mouse +774,3165687,"models/dynamics.py",2775,5,"batch",python,selection_mouse +775,3165753,"models/dynamics.py",2775,19,"batch[""video_tokens",python,selection_mouse +776,3166470,"models/dynamics.py",2796,0,"",python,selection_mouse +777,3166670,"models/dynamics.py",2795,1,"]",python,selection_mouse +778,3166687,"models/dynamics.py",2793,3,"s""]",python,selection_mouse +779,3166687,"models/dynamics.py",2791,5,"ens""]",python,selection_mouse +780,3166688,"models/dynamics.py",2790,6,"kens""]",python,selection_mouse +781,3166720,"models/dynamics.py",2788,8,"tokens""]",python,selection_mouse +782,3166755,"models/dynamics.py",2787,9,"_tokens""]",python,selection_mouse +783,3166755,"models/dynamics.py",2785,11,"eo_tokens""]",python,selection_mouse +784,3166803,"models/dynamics.py",2784,12,"deo_tokens""]",python,selection_mouse +785,3166853,"models/dynamics.py",2783,13,"ideo_tokens""]",python,selection_mouse +786,3166855,"models/dynamics.py",2782,14,"video_tokens""]",python,selection_mouse +787,3166855,"models/dynamics.py",2781,15,"""video_tokens""]",python,selection_mouse +788,3166855,"models/dynamics.py",2780,16,"[""video_tokens""]",python,selection_mouse +789,3166903,"models/dynamics.py",2779,17,"h[""video_tokens""]",python,selection_mouse +790,3166954,"models/dynamics.py",2778,18,"ch[""video_tokens""]",python,selection_mouse +791,3167022,"models/dynamics.py",2777,19,"tch[""video_tokens""]",python,selection_mouse +792,3167320,"models/dynamics.py",2776,20,"atch[""video_tokens""]",python,selection_mouse +793,3350021,"models/dynamics.py",0,0,"",python,tab +794,3350155,"models/dynamics.py",2787,0,"",python,selection_mouse +795,3350156,"models/dynamics.py",2782,12,"video_tokens",python,selection_mouse +796,3350853,"models/dynamics.py",2775,0,"",python,selection_mouse +797,3350988,"models/dynamics.py",2775,5,"batch",python,selection_mouse +798,3351239,"models/dynamics.py",2775,7,"batch[""",python,selection_mouse +799,3351286,"models/dynamics.py",2775,19,"batch[""video_tokens",python,selection_mouse +800,3352237,"models/dynamics.py",2775,20,"batch[""video_tokens""",python,selection_mouse +801,3352719,"models/dynamics.py",2775,21,"batch[""video_tokens""]",python,selection_mouse +802,3358324,"genie.py",0,0,"",python,tab +803,3366353,"genie.py",2941,0,"",python,selection_mouse +804,3367886,"genie.py",2870,0,"",python,selection_mouse +805,3369437,"genie.py",2927,0,"",python,selection_mouse +806,3370287,"genie.py",2838,0,"",python,selection_mouse +807,3388556,"genie.py",2899,0,"\n ",python,content +808,3389303,"genie.py",2904,4,"",python,content +809,3389755,"genie.py",2900,4,"",python,content +810,3390171,"genie.py",2899,1,"",python,content +811,3390454,"genie.py",2898,0,"",python,selection_command +812,3390753,"genie.py",2984,0,"",python,selection_command +813,3391303,"genie.py",2985,0,"\n ",python,content +814,3394187,"genie.py",2994,0,"j",python,content +815,3394188,"genie.py",2995,0,"",python,selection_keyboard +816,3394353,"genie.py",2995,0,"a",python,content +817,3394354,"genie.py",2996,0,"",python,selection_keyboard +818,3395269,"genie.py",2994,2,"jax",python,content +819,3395570,"genie.py",2997,0,".",python,content +820,3395572,"genie.py",2998,0,"",python,selection_keyboard +821,3395987,"genie.py",2998,0,"d",python,content +822,3395989,"genie.py",2999,0,"",python,selection_keyboard +823,3396136,"genie.py",2999,0,"e",python,content +824,3396138,"genie.py",3000,0,"",python,selection_keyboard +825,3396326,"genie.py",3000,0,"b",python,content +826,3396326,"genie.py",3001,0,"",python,selection_keyboard +827,3396421,"genie.py",3001,0,"u",python,content +828,3396423,"genie.py",3002,0,"",python,selection_keyboard +829,3396553,"genie.py",3002,0,"g",python,content +830,3396554,"genie.py",3003,0,"",python,selection_keyboard +831,3396687,"genie.py",3003,0,".",python,content +832,3396688,"genie.py",3004,0,"",python,selection_keyboard +833,3397072,"genie.py",3004,0,"b",python,content +834,3397073,"genie.py",3005,0,"",python,selection_keyboard +835,3397188,"genie.py",3005,0,"r",python,content +836,3397189,"genie.py",3006,0,"",python,selection_keyboard +837,3397353,"genie.py",3006,0,"e",python,content +838,3397354,"genie.py",3007,0,"",python,selection_keyboard +839,3397521,"genie.py",3007,0,"a",python,content +840,3397522,"genie.py",3008,0,"",python,selection_keyboard +841,3397598,"genie.py",3008,0,"k",python,content +842,3397599,"genie.py",3009,0,"",python,selection_keyboard +843,3397837,"genie.py",3009,0,"p",python,content +844,3397838,"genie.py",3010,0,"",python,selection_keyboard +845,3397986,"genie.py",3010,0,"o",python,content +846,3397987,"genie.py",3011,0,"",python,selection_keyboard +847,3398222,"genie.py",3011,0,"i",python,content +848,3398223,"genie.py",3012,0,"",python,selection_keyboard +849,3398272,"genie.py",3012,0,"n",python,content +850,3398273,"genie.py",3013,0,"",python,selection_keyboard +851,3398336,"genie.py",3013,0,"t",python,content +852,3398337,"genie.py",3014,0,"",python,selection_keyboard +853,3399020,"genie.py",3014,0,"()",python,content +854,3399021,"genie.py",3015,0,"",python,selection_keyboard +855,3399086,"genie.py",3015,1,")",python,content +856,3399087,"genie.py",3016,0,"",python,selection_keyboard +857,3401738,"genie.py",3015,0,"",python,selection_command +858,3402589,"genie.py",2986,31,"",python,content +859,3402619,"genie.py",2994,0,"",python,selection_command +860,3403957,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +861,3566549,"models/dynamics.py",0,0,"",python,tab +862,3566550,"models/dynamics.py",3224,0,"",python,selection_mouse +863,3568654,"models/dynamics.py",3200,51,"",python,content +864,3568687,"models/dynamics.py",3208,0,"",python,selection_command +865,3569320,"models/dynamics.py",3175,0,"",python,selection_command +866,3571903,"models/dynamics.py",3178,0,"",python,selection_mouse +867,3572570,"models/dynamics.py",3212,0,"",python,selection_mouse +868,3580973,"models/dynamics.py",3179,0,"",python,selection_command +869,3581786,"models/dynamics.py",3212,0,"",python,selection_command +870,3681624,"models/dynamics.py",0,0,"",python,tab +871,3681626,"models/dynamics.py",3157,0,"",python,selection_mouse +872,3917754,"models/dynamics.py",0,0,"",python,tab +873,3917756,"models/dynamics.py",2774,0,"",python,selection_mouse +874,3917872,"models/dynamics.py",2763,11,"patch_embed",python,selection_mouse +875,3918419,"models/dynamics.py",2776,0,"",python,selection_mouse +876,3918523,"models/dynamics.py",2775,5,"batch",python,selection_mouse +877,3918738,"models/dynamics.py",2775,7,"batch[""",python,selection_mouse +878,3918753,"models/dynamics.py",2775,19,"batch[""video_tokens",python,selection_mouse +879,3918995,"models/dynamics.py",2775,20,"batch[""video_tokens""",python,selection_mouse +880,3919337,"models/dynamics.py",2775,21,"batch[""video_tokens""]",python,selection_mouse +881,4563755,"models/dynamics.py",0,0,"",python,tab +882,4563757,"models/dynamics.py",2988,0,"",python,selection_mouse +883,4565336,"models/dynamics.py",2988,1,"1",python,content +884,4566985,"models/dynamics.py",3113,0,"",python,selection_mouse +885,4568536,"models/dynamics.py",3070,0,"",python,selection_mouse +886,4568538,"models/dynamics.py",3069,0,"",python,selection_command +887,4569654,"models/dynamics.py",3064,0,"",python,selection_mouse +888,4571386,"models/dynamics.py",3064,0,"1",python,content +889,4571387,"models/dynamics.py",3065,0,"",python,selection_keyboard +890,4587371,"models/dynamics.py",0,0,"",python,tab +891,4587372,"models/dynamics.py",2674,0,"",python,selection_mouse +892,4587809,"models/dynamics.py",2673,0,"",python,selection_command +893,4589403,"models/dynamics.py",3213,0,"",python,selection_command +894,4589935,"models/dynamics.py",3209,0,"",python,selection_command +895,4590603,"models/dynamics.py",3209,0,"#",python,content +896,4590604,"models/dynamics.py",3210,0,"",python,selection_keyboard +897,4590684,"models/dynamics.py",3210,0," ",python,content +898,4590685,"models/dynamics.py",3211,0,"",python,selection_keyboard +899,4590923,"models/dynamics.py",3210,0,"",python,selection_command +900,4591082,"models/dynamics.py",3177,0,"",python,selection_command +901,4591900,"models/dynamics.py",3176,0,"",python,selection_command +902,4592566,"models/dynamics.py",3176,0,"#",python,content +903,4592567,"models/dynamics.py",3177,0,"",python,selection_keyboard +904,4592602,"models/dynamics.py",3177,0," ",python,content +905,4592603,"models/dynamics.py",3178,0,"",python,selection_keyboard +906,4592853,"models/dynamics.py",3177,0,"",python,selection_command +907,4593235,"models/dynamics.py",3127,0,"",python,selection_command +908,4593352,"models/dynamics.py",3081,0,"",python,selection_command +909,4593469,"models/dynamics.py",3021,0,"",python,selection_command +910,4593606,"models/dynamics.py",2941,0,"",python,selection_command +911,4593735,"models/dynamics.py",2867,0,"",python,selection_command +912,4594286,"models/dynamics.py",2941,0,"",python,selection_command +913,4594999,"models/dynamics.py",2940,0,"",python,selection_command +914,4598049,"models/dynamics.py",2866,0,"",python,selection_command +915,4598205,"models/dynamics.py",2806,0,"",python,selection_command +916,4598366,"models/dynamics.py",2866,0,"",python,selection_command +917,4598582,"models/dynamics.py",2806,0,"",python,selection_command +918,4598735,"models/dynamics.py",2866,0,"",python,selection_command +919,4598876,"models/dynamics.py",2806,0,"",python,selection_command +920,4598963,"models/dynamics.py",2866,0,"",python,selection_command +921,4599150,"models/dynamics.py",2940,0,"",python,selection_command +922,4599388,"models/dynamics.py",3020,0,"",python,selection_command +923,4599703,"models/dynamics.py",3080,0,"",python,selection_command +924,4601226,"models/dynamics.py",3126,0,"",python,selection_command +925,4601386,"models/dynamics.py",3176,0,"",python,selection_command +926,4601832,"models/dynamics.py",3126,0,"",python,selection_command +927,4602788,"models/dynamics.py",3126,0,"#",python,content +928,4602789,"models/dynamics.py",3127,0,"",python,selection_keyboard +929,4603053,"models/dynamics.py",3127,0," ",python,content +930,4603054,"models/dynamics.py",3128,0,"",python,selection_keyboard +931,4603568,"models/dynamics.py",3127,0,"",python,selection_command +932,4606039,"genie.py",0,0,"",python,tab +933,4606540,"genie.py",3231,0,"",python,selection_mouse +934,4606540,"genie.py",3230,0,"",python,selection_command +935,4610719,"genie.py",4866,0,"",python,selection_command +936,4611355,"genie.py",5654,0,"",python,selection_command +937,4611837,"genie.py",5968,0,"",python,selection_command +938,4612998,"genie.py",5964,0,"",python,selection_command +939,4613803,"genie.py",5964,0,"#",python,content +940,4613804,"genie.py",5965,0,"",python,selection_keyboard +941,4613835,"genie.py",5965,0," ",python,content +942,4613837,"genie.py",5966,0,"",python,selection_keyboard +943,4614214,"genie.py",5965,0,"",python,selection_command +944,4614417,"genie.py",5970,0,"",python,selection_command +945,4615179,"genie.py",4866,0,"",python,selection_command +946,4615382,"genie.py",5654,0,"",python,selection_command +947,4615642,"genie.py",5970,0,"",python,selection_command +948,4615884,"genie.py",4866,0,"",python,selection_command +949,4616082,"genie.py",5654,0,"",python,selection_command +950,4618759,"train_dynamics.py",0,0,"",python,tab +951,4619791,"train_dynamics.py",6737,0,"",python,selection_mouse +952,4621537,"train_dynamics.py",7210,0,"",python,selection_command +953,4627557,"train_dynamics.py",2973,0,"",python,selection_mouse +954,4628509,"train_dynamics.py",2983,0,"\n ",python,content +955,4628953,"train_dynamics.py",2984,8,"",python,content +956,4653858,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",0,0,"",shellscript,tab +957,4655719,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1118,0,"",shellscript,selection_mouse +958,4655720,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1117,0,"",shellscript,selection_command +959,4656786,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1098,21,"",shellscript,content +960,4656802,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1102,0,"",shellscript,selection_command +961,4659353,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1070,0,"",shellscript,selection_command +962,4659536,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1065,0,"",shellscript,selection_command +963,4659702,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1052,0,"",shellscript,selection_command +964,4659835,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",1047,0,"",shellscript,selection_command +965,4659972,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",904,0,"",shellscript,selection_command +966,4660124,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",899,0,"",shellscript,selection_command +967,4660272,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",878,0,"",shellscript,selection_command +968,4660390,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",774,0,"",shellscript,selection_command +969,4660836,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,104,"",shellscript,content +970,4661552,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",795,0,"",shellscript,selection_command +971,4661785,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +972,4661952,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +973,4662253,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +974,4662368,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +975,4662468,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +976,4662568,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +977,4662618,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +978,4662752,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +979,4662837,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +980,4662971,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +981,4663052,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +982,4663156,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +983,4663319,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +984,4663451,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +985,4663551,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +986,4663702,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +987,4663754,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +988,4663969,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +989,4664019,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +990,4664173,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +991,4664302,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +992,4664485,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +993,4664573,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +994,4664769,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +995,4664857,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +996,4665019,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +997,4665057,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +998,4665251,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +999,4665335,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +1000,4665522,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +1001,4665585,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +1002,4665788,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +1003,4665868,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +1004,4666052,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +1005,4666120,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +1006,4666307,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +1007,4666385,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +1008,4666568,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +1009,4666622,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +1010,4666819,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +1011,4666890,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +1012,4667135,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +1013,4667185,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +1014,4667419,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",795,0,"",shellscript,selection_command +1015,4667723,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",770,0,"",shellscript,selection_command +1016,4668120,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",711,0,"",shellscript,selection_command +1017,4668335,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",712,0,"",shellscript,selection_command +1018,4668835,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",713,0,"",shellscript,selection_command +1019,4668856,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",714,0,"",shellscript,selection_command +1020,4668885,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",715,0,"",shellscript,selection_command +1021,4668902,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",716,0,"",shellscript,selection_command +1022,4668953,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",717,0,"",shellscript,selection_command +1023,4669002,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",718,0,"",shellscript,selection_command +1024,4669002,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",719,0,"",shellscript,selection_command +1025,4669056,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",720,0,"",shellscript,selection_command +1026,4669057,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",721,0,"",shellscript,selection_command +1027,4669101,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",722,0,"",shellscript,selection_command +1028,4669151,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",723,0,"",shellscript,selection_command +1029,4669152,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",724,0,"",shellscript,selection_command +1030,4669202,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",725,0,"",shellscript,selection_command +1031,4669252,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",726,0,"",shellscript,selection_command +1032,4669252,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",727,0,"",shellscript,selection_command +1033,4669336,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",728,0,"",shellscript,selection_command +1034,4669336,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",729,0,"",shellscript,selection_command +1035,4669356,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",730,0,"",shellscript,selection_command +1036,4669357,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",731,0,"",shellscript,selection_command +1037,4669403,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",732,0,"",shellscript,selection_command +1038,4669452,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",733,0,"",shellscript,selection_command +1039,4669453,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",734,0,"",shellscript,selection_command +1040,4669507,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",735,0,"",shellscript,selection_command +1041,4669552,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",736,0,"",shellscript,selection_command +1042,4669552,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",737,0,"",shellscript,selection_command +1043,4669602,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",738,0,"",shellscript,selection_command +1044,4669602,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",739,0,"",shellscript,selection_command +1045,4669652,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",740,0,"",shellscript,selection_command +1046,4669702,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",741,0,"",shellscript,selection_command +1047,4669702,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",742,0,"",shellscript,selection_command +1048,4669752,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",743,0,"",shellscript,selection_command +1049,4669753,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",744,0,"",shellscript,selection_command +1050,4669802,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",745,0,"",shellscript,selection_command +1051,4669851,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",746,0,"",shellscript,selection_command +1052,4670340,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",745,0,"",shellscript,selection_command +1053,4671689,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",745,0,"/",shellscript,content +1054,4671690,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",746,0,"",shellscript,selection_keyboard +1055,4671818,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",746,0,"d",shellscript,content +1056,4671819,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",747,0,"",shellscript,selection_keyboard +1057,4672240,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",747,0,"e",shellscript,content +1058,4672240,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",748,0,"",shellscript,selection_keyboard +1059,4672336,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",748,0,"b",shellscript,content +1060,4672337,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",749,0,"",shellscript,selection_keyboard +1061,4672435,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",749,0,"u",shellscript,content +1062,4672435,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",750,0,"",shellscript,selection_keyboard +1063,4672969,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",750,0,"g",shellscript,content +1064,4672970,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",751,0,"",shellscript,selection_keyboard +1065,4673385,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",751,0,"s",shellscript,content +1066,4673386,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",752,0,"",shellscript,selection_keyboard +1067,4673835,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",751,1,"",shellscript,content +1068,4674422,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",751,0,"-",shellscript,content +1069,4674424,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",752,0,"",shellscript,selection_keyboard +1070,4675041,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",752,0,"s",shellscript,content +1071,4675042,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",753,0,"",shellscript,selection_keyboard +1072,4675252,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",753,0,"e",shellscript,content +1073,4675253,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",754,0,"",shellscript,selection_keyboard +1074,4675437,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",754,0,"s",shellscript,content +1075,4675438,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",755,0,"",shellscript,selection_keyboard +1076,4675968,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",754,1,"",shellscript,content +1077,4676103,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",752,2,"",shellscript,content +1078,4676286,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",751,1,"",shellscript,content +1079,4676869,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",746,5,"",shellscript,content +1080,4677252,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",745,1,"",shellscript,content +1081,4677552,"slurm/jobs/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch",744,0,"",shellscript,selection_command +1082,5163057,"train_dynamics.py",0,0,"",python,tab +1083,5163058,"train_dynamics.py",3488,0,"",python,selection_mouse +1084,5163518,"train_dynamics.py",3438,0,"",python,selection_mouse +1085,5163668,"train_dynamics.py",3434,5,"recon",python,selection_mouse +1086,5171418,"train_dynamics.py",3489,0,"",python,selection_mouse +1087,5172035,"train_dynamics.py",3363,0,"",python,selection_mouse +1088,5172952,"train_dynamics.py",3336,0,"",python,selection_mouse +1089,5173068,"train_dynamics.py",3333,5,"recon",python,selection_mouse +1090,5181721,"train_dynamics.py",12599,0,"",python,selection_mouse +1091,5182934,"train_dynamics.py",12561,0,"",python,selection_mouse +1092,5183835,"train_dynamics.py",12563,0,"",python,selection_mouse +1093,5183967,"train_dynamics.py",12559,6,"gt_seq",python,selection_mouse +1094,5185917,"train_dynamics.py",12616,0,"",python,selection_mouse +1095,5185919,"train_dynamics.py",12615,0,"",python,selection_command +1096,5186136,"train_dynamics.py",12615,1,"0",python,selection_mouse +1097,5186169,"train_dynamics.py",12616,0,"",python,selection_command +1098,5186203,"train_dynamics.py",12615,1,"0",python,selection_mouse +1099,5186218,"train_dynamics.py",12614,2,".0",python,selection_mouse +1100,5186251,"train_dynamics.py",12613,3,"5.0",python,selection_mouse +1101,5186252,"train_dynamics.py",12612,4,"55.0",python,selection_mouse +1102,5186269,"train_dynamics.py",12611,5,"255.0",python,selection_mouse +1103,5186735,"train_dynamics.py",12610,6," 255.0",python,selection_mouse +1104,5186736,"train_dynamics.py",12609,7,"/ 255.0",python,selection_mouse +1105,5186801,"train_dynamics.py",12608,8,"#/ 255.0",python,selection_mouse +1106,5187101,"train_dynamics.py",12608,0,"",python,selection_mouse +1107,5187601,"train_dynamics.py",12608,2,"#/",python,selection_mouse +1108,5187602,"train_dynamics.py",12608,3,"#/ ",python,selection_mouse +1109,5187618,"train_dynamics.py",12608,5,"#/ 25",python,selection_mouse +1110,5187651,"train_dynamics.py",12608,6,"#/ 255",python,selection_mouse +1111,5187652,"train_dynamics.py",12608,7,"#/ 255.",python,selection_mouse +1112,5187685,"train_dynamics.py",12608,8,"#/ 255.0",python,selection_mouse +1113,5188002,"train_dynamics.py",12616,0,"",python,selection_mouse +1114,5188003,"train_dynamics.py",12615,0,"",python,selection_command +1115,5188468,"train_dynamics.py",12616,0,"",python,selection_mouse +1116,5188469,"train_dynamics.py",12615,0,"",python,selection_command +1117,5188668,"train_dynamics.py",12615,1,"0",python,selection_mouse +1118,5188669,"train_dynamics.py",12616,0,"",python,selection_command +1119,5188684,"train_dynamics.py",12615,1,"0",python,selection_mouse +1120,5188718,"train_dynamics.py",12614,2,".0",python,selection_mouse +1121,5188751,"train_dynamics.py",12613,3,"5.0",python,selection_mouse +1122,5188786,"train_dynamics.py",12612,4,"55.0",python,selection_mouse +1123,5188818,"train_dynamics.py",12611,5,"255.0",python,selection_mouse +1124,5188868,"train_dynamics.py",12610,6," 255.0",python,selection_mouse +1125,5188917,"train_dynamics.py",12609,7,"/ 255.0",python,selection_mouse +1126,5188934,"train_dynamics.py",12608,8,"#/ 255.0",python,selection_mouse +1127,5189618,"train_dynamics.py",12608,0,"",python,selection_mouse +1128,5189784,"train_dynamics.py",12607,1," ",python,selection_mouse +1129,5190020,"train_dynamics.py",12607,4," #/ ",python,selection_mouse +1130,5190051,"train_dynamics.py",12607,7," #/ 255",python,selection_mouse +1131,5190052,"train_dynamics.py",12607,9," #/ 255.0",python,selection_mouse +1132,5190387,"train_dynamics.py",12616,0,"",python,selection_mouse +1133,5190451,"train_dynamics.py",12615,0,"",python,selection_command +1134,5190734,"train_dynamics.py",12615,1,"0",python,selection_mouse +1135,5190735,"train_dynamics.py",12616,0,"",python,selection_command +1136,5190969,"train_dynamics.py",12615,1,"0",python,selection_mouse +1137,5190985,"train_dynamics.py",12538,78,"\n gt_seq = inputs[""videos""][0].astype(jnp.float32) #/ 255.0",python,selection_mouse +1138,5191269,"train_dynamics.py",12538,0,"",python,selection_mouse +1139,5191271,"train_dynamics.py",12537,0,"",python,selection_command +1140,5192118,"train_dynamics.py",12652,0,"",python,selection_mouse +1141,5192984,"train_dynamics.py",12644,0,"",python,selection_mouse +1142,5193135,"train_dynamics.py",12637,9,"recon_seq",python,selection_mouse +1143,5199301,"train_dynamics.py",3336,0,"",python,selection_mouse +1144,5199451,"train_dynamics.py",3333,5,"recon",python,selection_mouse +1145,5204651,"train_dynamics.py",3304,0,"",python,selection_mouse +1146,5204802,"train_dynamics.py",3301,6,"inputs",python,selection_mouse +1147,5309056,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"",python,tab +1148,5313656,"genie.py",0,0,"",python,tab +1149,5315901,"genie.py",6058,0,"",python,selection_mouse +1150,5320018,"genie.py",6057,0,"",python,selection_command +1151,5353617,"genie.py",6058,0,"",python,selection_command +1152,5354884,"genie.py",6058,0,"-",python,content +1153,5354886,"genie.py",6059,0,"",python,selection_keyboard +1154,5354900,"genie.py",6059,0,"1",python,content +1155,5354901,"genie.py",6060,0,"",python,selection_keyboard +1156,5355284,"genie.py",6059,0,"",python,selection_command +1157,5361401,"TERMINAL",0,0,"bash",,terminal_focus +1158,5399714,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +1159,5406590,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",720,0,"",shellscript,selection_mouse +1160,5407515,"TERMINAL",0,0,"salloc",,terminal_focus +1161,5409201,"TERMINAL",0,0,"bash",,terminal_focus +1162,5418050,"TERMINAL",0,0,"watch",,terminal_focus +1163,5422766,"TERMINAL",0,0,"ls *.gif",,terminal_command +1164,5422777,"TERMINAL",0,0,"]633;E;2025-07-18 18:50:55 ls *.gif;ec0e1a7c-4669-4536-8bdb-633880f5f144]633;Cgeneration_1752832080.5531173.gif generation_1752832239.8356233.gif\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +1165,5428704,"TERMINAL",0,0,"mv *.gif gifs",,terminal_command +1166,5428732,"TERMINAL",0,0,"]633;E;2025-07-18 18:51:01 mv *.gif gifs;ec0e1a7c-4669-4536-8bdb-633880f5f144]633;C]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1167,5429755,"TERMINAL",0,0,"bash",,terminal_focus +1168,5558120,"TERMINAL",0,0,"salloc",,terminal_focus +1169,5559357,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +1170,5564684,"genie.py",0,0,"",python,tab +1171,5566833,"genie.py",6058,0,"",python,selection_command +1172,5566991,"genie.py",6058,1,"",python,content +1173,5567134,"genie.py",6058,1,"",python,content +1174,5570454,"slurm/jobs/mihir/horeka/yolo-runs/sampling.sh",0,0,"",shellscript,tab +1175,5574383,"TERMINAL",0,0,"bash",,terminal_focus +1176,5808603,"utils/nn.py",0,0,"",python,tab +1177,5809252,"train_dynamics.py",0,0,"",python,tab +1178,5809877,"models/tokenizer.py",0,0,"",python,tab +1179,5810393,"models/dynamics.py",0,0,"",python,tab +1180,5815692,"genie.py",0,0,"",python,tab +1181,5817108,"models/dynamics.py",0,0,"",python,tab +1182,5817109,"models/dynamics.py",3198,0,"",python,selection_mouse +1183,5817721,"models/dynamics.py",3134,0,"",python,selection_mouse +1184,5818553,"models/dynamics.py",3260,0,"",python,selection_mouse +1185,5819099,"models/dynamics.py",3228,0,"",python,selection_mouse +1186,5819620,"models/dynamics.py",3204,0,"\n ",python,content +1187,5821044,"models/dynamics.py",3213,1,"",python,content +1188,5821682,"models/dynamics.py",3213,1,"",python,content +1189,5821849,"models/dynamics.py",3213,1,"",python,content +1190,5822001,"models/dynamics.py",3213,1,"",python,content +1191,5822129,"models/dynamics.py",3213,1,"",python,content +1192,5822255,"models/dynamics.py",3213,1,"",python,content +1193,5822407,"models/dynamics.py",3213,1,"",python,content +1194,5822560,"models/dynamics.py",3213,1,"",python,content +1195,5822685,"models/dynamics.py",3213,1,"",python,content +1196,5822822,"models/dynamics.py",3213,1,"",python,content +1197,5822960,"models/dynamics.py",3213,1,"",python,content +1198,5823395,"models/dynamics.py",3212,0,"",python,selection_command +1199,5823650,"models/dynamics.py",3177,0,"",python,selection_command +1200,5824468,"models/dynamics.py",3170,34," # print(next_tokens.shape)",python,selection_command +1201,5824667,"models/dynamics.py",3118,86," # next_tokens = jnp.argmax(logits, axis=-1)\n # print(next_tokens.shape)",python,selection_command +1202,5824890,"models/dynamics.py",3126,0,"",python,selection_command +1203,5825391,"models/dynamics.py",3178,1,"",python,content +1204,5825392,"models/dynamics.py",3126,1,"",python,content +1205,5825539,"models/dynamics.py",3177,1,"",python,content +1206,5825539,"models/dynamics.py",3126,1,"",python,content +1207,5825647,"models/dynamics.py",3125,0,"",python,selection_command +1208,5827707,"TERMINAL",0,0,"salloc",,terminal_focus +1209,5828149,"TERMINAL",0,0,"bash",,terminal_focus +1210,5888163,"models/dynamics.py",0,0,"",python,tab +1211,5888165,"models/dynamics.py",2836,0,"",python,selection_mouse +1212,5889195,"models/dynamics.py",2778,0,"",python,selection_mouse +1213,5889406,"models/dynamics.py",2775,5,"batch",python,selection_mouse +1214,5889551,"models/dynamics.py",2775,19,"batch[""video_tokens",python,selection_mouse +1215,5889604,"models/dynamics.py",2696,84,"training: bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch",python,selection_mouse +1216,5889643,"models/dynamics.py",2704,76,": bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch",python,selection_mouse +1217,5889690,"models/dynamics.py",2705,75," bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch",python,selection_mouse +1218,5889773,"models/dynamics.py",2706,74,"bool = True) -> Dict[str, Any]:\n vid_embed = self.patch_embed(batch",python,selection_mouse +1219,5889929,"models/dynamics.py",2775,20,"batch[""video_tokens""",python,selection_mouse +1220,5890238,"models/dynamics.py",2775,21,"batch[""video_tokens""]",python,selection_mouse +1221,5902049,"models/dynamics.py",0,0,"",python,tab +1222,5902050,"models/dynamics.py",3034,0,"",python,selection_mouse +1223,5904124,"models/dynamics.py",3150,0,"",python,selection_mouse +1224,5905005,"models/dynamics.py",3191,0,"",python,selection_mouse +1225,5905160,"models/dynamics.py",3182,11,"next_tokens",python,selection_mouse +1226,6029444,"models/dynamics.py",0,0,"",python,tab +1227,6029446,"models/dynamics.py",2777,0,"",python,selection_mouse +1228,6029582,"models/dynamics.py",2775,5,"batch",python,selection_mouse +1229,6029723,"models/dynamics.py",2775,7,"batch[""",python,selection_mouse +1230,6029734,"models/dynamics.py",2775,79,"batch[""video_tokens""])\n act_embed = self.action_up(batch[""latent_actions",python,selection_mouse +1231,6029935,"models/dynamics.py",2775,19,"batch[""video_tokens",python,selection_mouse +1232,6030362,"models/dynamics.py",2775,20,"batch[""video_tokens""",python,selection_mouse +1233,6030602,"models/dynamics.py",2775,21,"batch[""video_tokens""]",python,selection_mouse diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-73ddfe20-a667-477d-9924-94f7208128f81752186339186-2025_07_11-00.25.58.835/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-73ddfe20-a667-477d-9924-94f7208128f81752186339186-2025_07_11-00.25.58.835/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..e7a42e8d7d9dd6cf52d3173a4dd454a12e548657 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-73ddfe20-a667-477d-9924-94f7208128f81752186339186-2025_07_11-00.25.58.835/source.csv @@ -0,0 +1,12 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,6,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-modelsize-scaling/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-modelsize-scaling/%x_%j.log\n#SBATCH --job-name=train_dynamics_modelsize_scaling_36M_2_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-modelsize-scaling/$job_name\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-36M-$slurm_job_id \\n --tags dynamics modelsize-scaling 36M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,tab +2,766,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"12:25:58 AM [info] Activating crowd-code\n12:25:58 AM [info] Recording started\n12:25:58 AM [info] Initializing git provider using file system watchers...\n12:25:59 AM [info] Git repository found\n12:25:59 AM [info] Git provider initialized successfully\n12:25:59 AM [info] Initial git state: [object Object]\n",Log,tab +3,4116,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +4,4194,"TERMINAL",0,0,"]633;E;2025-07-11 00:26:02 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;d1c8480d-5bbb-4ee3-b67f-eb04590abc9f]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +5,76596,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",0,0,"",shellscript,tab +6,76600,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1303,0,"",shellscript,selection_mouse +7,76617,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1302,0,"",shellscript,selection_command +8,77837,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",1620,0,"",shellscript,selection_mouse +9,217736,"TERMINAL",0,0,"bash",,terminal_focus +10,218636,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch",0,0,"",shellscript,tab +11,226720,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics_cotraining/sqrt_lr/train_dynamics_2_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-batchsize-scaling/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-batchsize-scaling/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_2_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-batchsize-scaling/$job_name\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-batch-size-scaling-2-node-$slurm_job_id \\n --tags dynamics batch-size-scaling 2-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n",shellscript,tab diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7c1bdcf0-d594-4018-8499-7d2ed33930611752094287328-2025_07_09-22.51.39.315/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7c1bdcf0-d594-4018-8499-7d2ed33930611752094287328-2025_07_09-22.51.39.315/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..0c01ca184d644b696b463f95b1705533101d30a1 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7c1bdcf0-d594-4018-8499-7d2ed33930611752094287328-2025_07_09-22.51.39.315/source.csv @@ -0,0 +1,216 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,5,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n noise = jax.random.normal(rng2, self.mask_token.shape) * 1.0 # stddev=1.0, adjust if needed\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n \n\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n",python,tab +2,410,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:51:39 PM [info] Activating crowd-code\n10:51:39 PM [info] Recording started\n10:51:39 PM [info] Initializing git provider using file system watchers...\n10:51:39 PM [info] Git repository found\n10:51:39 PM [info] Git provider initialized successfully\n",Log,tab +3,569,"extension-output-pdoom-org.crowd-code-#1-crowd-code",250,0,"10:51:39 PM [info] Initial git state: [object Object]\n",Log,content +4,3410,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +5,3484,"TERMINAL",0,0,"]633;E;2025-07-09 22:51:42 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;dbf2f7cf-c02e-4ed1-93dc-847ffbf8836e]633;C",,terminal_output +6,3495,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +7,5445,"models/dynamics.py",0,0,"",python,tab +8,5449,"models/dynamics.py",1436,0,"",python,selection_mouse +9,5460,"models/dynamics.py",1435,0,"",python,selection_command +10,7028,"models/dynamics.py",1436,0,"\n ",python,content +11,8322,"models/dynamics.py",1437,12,"",python,content +12,8764,"models/dynamics.py",1043,0,"",python,selection_mouse +13,10291,"models/dynamics.py",1437,0,"",python,selection_mouse +14,10947,"models/dynamics.py",1437,0,"\n rng1, rng2 = jax.random.split(batch[""mask_rng""])",python,content +15,10975,"models/dynamics.py",1450,0,"",python,selection_command +16,11582,"models/dynamics.py",1437,0,"",python,selection_command +17,12054,"models/dynamics.py",1437,1,"",python,content +18,12066,"models/dynamics.py",1449,0,"",python,selection_command +19,12469,"models/dynamics.py",1450,0,"",python,selection_command +20,12594,"models/dynamics.py",1451,0,"",python,selection_command +21,12742,"models/dynamics.py",1452,0,"",python,selection_command +22,12880,"models/dynamics.py",1453,0,"",python,selection_command +23,13017,"models/dynamics.py",1454,0,"",python,selection_command +24,13410,"models/dynamics.py",1455,0,"",python,selection_command +25,13862,"models/dynamics.py",1455,5,"",python,content +26,14581,"models/dynamics.py",1455,0,"r",python,content +27,14583,"models/dynamics.py",1456,0,"",python,selection_keyboard +28,14768,"models/dynamics.py",1456,0,"n",python,content +29,14770,"models/dynamics.py",1457,0,"",python,selection_keyboard +30,14888,"models/dynamics.py",1457,0,"g",python,content +31,14890,"models/dynamics.py",1458,0,"",python,selection_keyboard +32,15046,"models/dynamics.py",1458,0,"_",python,content +33,15048,"models/dynamics.py",1459,0,"",python,selection_keyboard +34,15668,"models/dynamics.py",1458,1,"",python,content +35,15795,"models/dynamics.py",1457,1,"",python,content +36,15977,"models/dynamics.py",1456,1,"",python,content +37,16094,"models/dynamics.py",1455,1,"",python,content +38,16361,"models/dynamics.py",1455,0,"_",python,content +39,16362,"models/dynamics.py",1456,0,"",python,selection_keyboard +40,16741,"models/dynamics.py",1456,0,"r",python,content +41,16742,"models/dynamics.py",1457,0,"",python,selection_keyboard +42,16881,"models/dynamics.py",1457,0,"n",python,content +43,16883,"models/dynamics.py",1458,0,"",python,selection_keyboard +44,17195,"models/dynamics.py",1458,0,"g",python,content +45,17196,"models/dynamics.py",1459,0,"",python,selection_keyboard +46,17324,"models/dynamics.py",1459,0," ",python,content +47,17326,"models/dynamics.py",1460,0,"",python,selection_keyboard +48,17677,"models/dynamics.py",1459,0,"",python,selection_command +49,18158,"models/dynamics.py",1460,0,"",python,selection_command +50,18628,"models/dynamics.py",1497,0,"",python,selection_command +51,18905,"models/dynamics.py",1496,0,"",python,selection_command +52,19342,"models/dynamics.py",1494,2,"",python,content +53,19678,"models/dynamics.py",1486,8,"",python,content +54,19957,"models/dynamics.py",1484,2,"",python,content +55,20366,"models/dynamics.py",1479,5,"",python,content +56,20872,"models/dynamics.py",1479,0,"r",python,content +57,20873,"models/dynamics.py",1480,0,"",python,selection_keyboard +58,21049,"models/dynamics.py",1480,0,"n",python,content +59,21050,"models/dynamics.py",1481,0,"",python,selection_keyboard +60,21640,"models/dynamics.py",1481,0,"g",python,content +61,21642,"models/dynamics.py",1482,0,"",python,selection_keyboard +62,22058,"models/dynamics.py",1482,0,"1",python,content +63,22059,"models/dynamics.py",1483,0,"",python,selection_keyboard +64,22539,"models/dynamics.py",1482,0,"",python,selection_command +65,24952,"models/dynamics.py",1530,0,"",python,selection_command +66,25648,"models/dynamics.py",1529,0,"",python,selection_command +67,25800,"models/dynamics.py",1528,0,"",python,selection_command +68,25938,"models/dynamics.py",1527,0,"",python,selection_command +69,26098,"models/dynamics.py",1526,0,"",python,selection_command +70,26219,"models/dynamics.py",1525,0,"",python,selection_command +71,26363,"models/dynamics.py",1524,0,"",python,selection_command +72,26519,"models/dynamics.py",1523,0,"",python,selection_command +73,26856,"models/dynamics.py",1523,4,"",python,content +74,27627,"models/dynamics.py",1523,0,"_",python,content +75,27629,"models/dynamics.py",1524,0,"",python,selection_keyboard +76,27875,"models/dynamics.py",1524,0,"r",python,content +77,27876,"models/dynamics.py",1525,0,"",python,selection_keyboard +78,27987,"models/dynamics.py",1525,0,"n",python,content +79,27990,"models/dynamics.py",1526,0,"",python,selection_keyboard +80,28146,"models/dynamics.py",1526,0,"g",python,content +81,28147,"models/dynamics.py",1527,0,"",python,selection_keyboard +82,28633,"models/dynamics.py",1526,0,"",python,selection_command +83,28772,"models/dynamics.py",1527,0,"",python,selection_command +84,29254,"models/dynamics.py",1528,0,"",python,selection_command +85,29298,"models/dynamics.py",1529,0,"",python,selection_command +86,29341,"models/dynamics.py",1530,0,"",python,selection_command +87,29342,"models/dynamics.py",1531,0,"",python,selection_command +88,29395,"models/dynamics.py",1532,0,"",python,selection_command +89,29401,"models/dynamics.py",1533,0,"",python,selection_command +90,29446,"models/dynamics.py",1534,0,"",python,selection_command +91,29489,"models/dynamics.py",1535,0,"",python,selection_command +92,29502,"models/dynamics.py",1536,0,"",python,selection_command +93,29563,"models/dynamics.py",1537,0,"",python,selection_command +94,29604,"models/dynamics.py",1538,0,"",python,selection_command +95,29606,"models/dynamics.py",1539,0,"",python,selection_command +96,29607,"models/dynamics.py",1540,0,"",python,selection_command +97,29649,"models/dynamics.py",1541,0,"",python,selection_command +98,29690,"models/dynamics.py",1542,0,"",python,selection_command +99,29702,"models/dynamics.py",1543,0,"",python,selection_command +100,29735,"models/dynamics.py",1544,0,"",python,selection_command +101,29778,"models/dynamics.py",1545,0,"",python,selection_command +102,29793,"models/dynamics.py",1546,0,"",python,selection_command +103,29834,"models/dynamics.py",1547,0,"",python,selection_command +104,29865,"models/dynamics.py",1548,0,"",python,selection_command +105,29905,"models/dynamics.py",1549,0,"",python,selection_command +106,29999,"models/dynamics.py",1550,0,"",python,selection_command +107,30170,"models/dynamics.py",1551,0,"",python,selection_command +108,30301,"models/dynamics.py",1552,0,"",python,selection_command +109,30497,"models/dynamics.py",1554,0,"",python,selection_command +110,32175,"models/dynamics.py",1554,2,"",python,content +111,33125,"models/dynamics.py",1553,0,"",python,selection_command +112,33308,"models/dynamics.py",1552,0,"",python,selection_command +113,33722,"models/dynamics.py",1552,2,"",python,content +114,34101,"models/dynamics.py",1552,1,"",python,content +115,34397,"models/dynamics.py",1552,1,"",python,content +116,35045,"models/dynamics.py",1552,1,"",python,content +117,35331,"models/dynamics.py",1552,1,"",python,content +118,35905,"models/dynamics.py",1552,1,"",python,content +119,36383,"models/dynamics.py",1552,1,"",python,content +120,36428,"models/dynamics.py",1552,1,"",python,content +121,36440,"models/dynamics.py",1552,1,"",python,content +122,36481,"models/dynamics.py",1552,1,"",python,content +123,36513,"models/dynamics.py",1552,1,"",python,content +124,36554,"models/dynamics.py",1552,1,"",python,content +125,36563,"models/dynamics.py",1552,1,"",python,content +126,36595,"models/dynamics.py",1552,1,"",python,content +127,36638,"models/dynamics.py",1552,1,"",python,content +128,36688,"models/dynamics.py",1552,1,"",python,content +129,36689,"models/dynamics.py",1552,1,"",python,content +130,36732,"models/dynamics.py",1552,1,"",python,content +131,36774,"models/dynamics.py",1552,1,"",python,content +132,36776,"models/dynamics.py",1552,1,"",python,content +133,36822,"models/dynamics.py",1552,1,"",python,content +134,36863,"models/dynamics.py",1552,1,"",python,content +135,36889,"models/dynamics.py",1552,1,"",python,content +136,36932,"models/dynamics.py",1552,1,"",python,content +137,36975,"models/dynamics.py",1552,1,"",python,content +138,36976,"models/dynamics.py",1552,1,"",python,content +139,36997,"models/dynamics.py",1552,1,"",python,content +140,37170,"models/dynamics.py",1552,1,"",python,content +141,37363,"models/dynamics.py",1552,1,"",python,content +142,37534,"models/dynamics.py",1552,1,"",python,content +143,37726,"models/dynamics.py",1552,1,"",python,content +144,37903,"models/dynamics.py",1552,1,"",python,content +145,38092,"models/dynamics.py",1552,1,"",python,content +146,38267,"models/dynamics.py",1552,1,"",python,content +147,38278,"models/dynamics.py",1551,0,"",python,selection_command +148,42166,"models/dynamics.py",1616,0,"",python,selection_mouse +149,42779,"models/dynamics.py",1615,0,"",python,selection_mouse +150,42928,"models/dynamics.py",1614,4,"self",python,selection_mouse +151,43144,"models/dynamics.py",1614,5,"self.",python,selection_mouse +152,43144,"models/dynamics.py",1614,15,"self.mask_token",python,selection_mouse +153,45149,"models/dynamics.py",1614,15,"",python,content +154,45559,"models/dynamics.py",1614,0,"n",python,content +155,45560,"models/dynamics.py",1615,0,"",python,selection_keyboard +156,45762,"models/dynamics.py",1615,0,"o",python,content +157,45764,"models/dynamics.py",1616,0,"",python,selection_keyboard +158,45932,"models/dynamics.py",1616,0,"i",python,content +159,45934,"models/dynamics.py",1617,0,"",python,selection_keyboard +160,46041,"models/dynamics.py",1617,0,"s",python,content +161,46042,"models/dynamics.py",1618,0,"",python,selection_keyboard +162,46230,"models/dynamics.py",1618,0,"e",python,content +163,46232,"models/dynamics.py",1619,0,"",python,selection_keyboard +164,46792,"models/dynamics.py",1618,0,"",python,selection_command +165,51459,"models/dynamics.py",1626,0,"",python,selection_mouse +166,52068,"models/dynamics.py",1644,0,"",python,selection_mouse +167,52072,"models/dynamics.py",1643,0,"",python,selection_command +168,52748,"models/dynamics.py",1644,0,"",python,selection_mouse +169,52762,"models/dynamics.py",1643,0,"",python,selection_command +170,53477,"models/dynamics.py",1629,0,"",python,selection_mouse +171,54060,"models/dynamics.py",1631,0,"",python,selection_mouse +172,54062,"models/dynamics.py",1630,0,"",python,selection_command +173,54246,"models/dynamics.py",1630,1,")",python,selection_mouse +174,54247,"models/dynamics.py",1631,0,"",python,selection_command +175,54370,"models/dynamics.py",1549,82,"e) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +176,54371,"models/dynamics.py",1534,97,"mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +177,54371,"models/dynamics.py",1472,159,".split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +178,54372,"models/dynamics.py",1418,213,"ith gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +179,54372,"models/dynamics.py",1411,220,"ange, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +180,54372,"models/dynamics.py",1409,222,"change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +181,54412,"models/dynamics.py",1405,226," my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +182,54413,"models/dynamics.py",1402,229," # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +183,54427,"models/dynamics.py",1391,240,"\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +184,54548,"models/dynamics.py",1300,331," # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +185,54599,"models/dynamics.py",1262,369," # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +186,54691,"models/dynamics.py",1218,413," mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +187,54729,"models/dynamics.py",1139,492," mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +188,54753,"models/dynamics.py",1066,565," mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +189,54808,"models/dynamics.py",1005,626," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +190,54849,"models/dynamics.py",984,647," if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +191,54979,"models/dynamics.py",924,707," vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)",python,selection_mouse +192,56344,"models/dynamics.py",1009,0,"",python,selection_mouse +193,56345,"models/dynamics.py",1005,12," ",python,selection_mouse +194,56935,"models/dynamics.py",1005,65," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n ",python,selection_mouse +195,56983,"models/dynamics.py",1005,138," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n ",python,selection_mouse +196,56984,"models/dynamics.py",1005,218," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n ",python,selection_mouse +197,57025,"models/dynamics.py",1005,303," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n ",python,selection_mouse +198,57028,"models/dynamics.py",1005,396," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n ",python,selection_mouse +199,57065,"models/dynamics.py",1005,491," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n ",python,selection_mouse +200,57066,"models/dynamics.py",1005,492," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n ",python,selection_mouse +201,57070,"models/dynamics.py",1005,497," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise",python,selection_mouse +202,57088,"models/dynamics.py",1005,569," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed",python,selection_mouse +203,57228,"models/dynamics.py",1005,560," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n ",python,selection_mouse +204,57229,"models/dynamics.py",1005,492," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n ",python,selection_mouse +205,57231,"models/dynamics.py",1005,491," rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n ",python,selection_mouse +206,57634,"models/dynamics.py",1496,0,"",python,selection_mouse +207,59595,"models/dynamics.py",128,0,"",python,selection_mouse +208,59744,"models/dynamics.py",127,5,"class",python,selection_mouse +209,59933,"models/dynamics.py",127,67,"class DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n",python,selection_mouse +210,59976,"models/dynamics.py",127,196,"class DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def",python,selection_mouse +211,59977,"models/dynamics.py",127,627,"class DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +212,60095,"models/dynamics.py",127,833,"class DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed",python,selection_mouse +213,60096,"models/dynamics.py",127,1263,"class DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)",python,selection_mouse +214,60097,"models/dynamics.py",127,1557,"class DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)\n \n\n else:\n mask = None\n",python,selection_mouse +215,60097,"models/dynamics.py",127,1823,"class DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # before: with mask token\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n # my change, with gaussian noise\n rng1, _rng = jax.random.split(rng1)\n noise = jax.random.normal(_rng, self.mask_token.shape) \n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)\n \n\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n",python,selection_mouse diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7d09022e-0451-4d5a-95fd-fe8f629e1b4b1757071522446-2025_09_05-13.26.09.836/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7d09022e-0451-4d5a-95fd-fe8f629e1b4b1757071522446-2025_09_05-13.26.09.836/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..e252b77ef44915bbba606fcf656dcd54c9bf01a5 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7d09022e-0451-4d5a-95fd-fe8f629e1b4b1757071522446-2025_09_05-13.26.09.836/source.csv @@ -0,0 +1,20977 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,333,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab +3,511,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"1:26:09 PM [info] Activating crowd-code\n1:26:09 PM [info] Recording started\n1:26:09 PM [info] Initializing git provider using file system watchers...\n1:26:10 PM [info] Git repository found\n1:26:10 PM [info] Git provider initialized successfully\n1:26:10 PM [info] Initial git state: [object Object]\n",Log,content +4,15038,"utils/dataloader.py",0,0,"import jax\nimport numpy as np\nimport grain\nfrom typing import Any\nimport pickle\nimport os\n\n\nclass EpisodeLengthFilter(grain.transforms.Filter):\n """"""\n A Grain Filter that keeps only episodes with sufficient length.\n """"""\n\n def __init__(self, seq_len: int, image_h: int, image_w: int, image_c: int):\n """"""Initializes the filter with sequence length requirements.""""""\n self.seq_len = seq_len\n self.image_h = image_h\n self.image_w = image_w\n self.image_c = image_c\n\n def filter(self, element: Any) -> bool:\n """"""\n Filters episodes based on length.\n\n Args:\n element: A dictionary representing one record from the DataSource.\n Expected to contain 'raw_video' (bytes) and 'sequence_length' (int)\n\n Returns:\n True if the episode has sufficient length, False otherwise.\n """"""\n assert isinstance(element, bytes)\n element = pickle.loads(element)\n\n current_episode_len = element[""sequence_length""]\n if current_episode_len < self.seq_len:\n print(\n f""Filtering out episode with length {current_episode_len}, which is ""\n f""shorter than the requested sequence length {self.seq_len}.""\n )\n return False\n\n return True\n\n\nclass ProcessEpisodeAndSlice(grain.transforms.RandomMap):\n """"""\n A Grain Transformation that combines parsing, slicing, and normalizing.\n """"""\n\n def __init__(self, seq_len: int, image_h: int, image_w: int, image_c: int):\n """"""Initializes the transformation with processing parameters.""""""\n self.seq_len = seq_len\n self.image_h = image_h\n self.image_w = image_w\n self.image_c = image_c\n\n def random_map(self, element: dict, rng: np.random.Generator) -> Any:\n """"""\n Processes a single raw episode from the data source.\n\n Args:\n element: A dictionary representing one record from the DataSource.\n Expected to contain 'raw_video' (bytes) and 'sequence_length' (int)\n rng: A per-record random number generator provided by the Grain sampler.\n\n Returns:\n A processed video sequence as a NumPy array with shape\n (seq_len, height, width, channels) and dtype float32.\n """"""\n assert isinstance(element, bytes)\n element = pickle.loads(element)\n\n video_shape = (\n element[""sequence_length""],\n self.image_h,\n self.image_w,\n self.image_c,\n )\n episode_tensor = np.frombuffer(element[""raw_video""], dtype=np.uint8)\n episode_tensor = episode_tensor.reshape(video_shape)\n\n current_episode_len = episode_tensor.shape[0]\n if current_episode_len < self.seq_len:\n raise ValueError(\n f""Episode length {current_episode_len} is shorter than ""\n f""requested sequence length {self.seq_len}. This should ""\n f""have been filtered out.""\n )\n\n max_start_idx = current_episode_len - self.seq_len\n\n start_idx = rng.integers(0, max_start_idx + 1)\n\n seq = episode_tensor[start_idx : start_idx + self.seq_len]\n\n return seq\n\n\ndef get_dataloader(\n array_record_paths: list[str],\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n num_workers: int = 1,\n prefetch_buffer_size: int = 1,\n seed: int = 42,\n):\n """"""\n Creates a data loading pipeline using Grain.\n """"""\n if not array_record_paths:\n raise ValueError(""array_record_paths list cannot be empty."")\n\n num_processes = jax.process_count()\n\n if global_batch_size % num_processes != 0:\n raise ValueError(\n f""Global batch size {global_batch_size} must be divisible by ""\n f""the number of JAX processes {num_processes} for proper sharding.""\n )\n per_process_batch_size = global_batch_size // num_processes\n\n source = grain.sources.ArrayRecordDataSource(array_record_paths)\n\n sampler = grain.samplers.IndexSampler(\n num_records=len(source),\n shard_options=grain.sharding.ShardByJaxProcess(drop_remainder=True),\n shuffle=True,\n num_epochs=None,\n seed=seed,\n )\n\n operations = [\n EpisodeLengthFilter(\n seq_len=seq_len, image_h=image_h, image_w=image_w, image_c=image_c\n ),\n ProcessEpisodeAndSlice(\n seq_len=seq_len, image_h=image_h, image_w=image_w, image_c=image_c\n ),\n grain.transforms.Batch(batch_size=per_process_batch_size, drop_remainder=True),\n ]\n\n read_options = grain.ReadOptions(\n prefetch_buffer_size=prefetch_buffer_size,\n num_threads=1,\n )\n dataloader = grain.DataLoader(\n data_source=source,\n sampler=sampler,\n operations=operations,\n worker_count=num_workers,\n worker_buffer_size=1,\n read_options=read_options,\n )\n\n return dataloader\n\ndef create_dataloader_iterator(\n data_dir: str, \n image_shape: tuple[int, int, int], \n seq_len: int, \n batch_size: int, \n seed: int = 42\n) -> grain.DataLoaderIterator:\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n return grain_iterator",python,tab +5,15041,"utils/dataloader.py",3253,0,"",python,selection_mouse +6,60062,"TERMINAL",0,0,"git diff main > diff.diff",,terminal_command +7,60142,"TERMINAL",0,0,"]633;C",,terminal_output +8,60169,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +9,151595,"train_tokenizer.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import create_dataloader_iterator\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n # --- Compute loss ---\n # FIXME (f.srambical): Can we even do native int8 training without casting the video at all?\n # FIXME (f.srambical): If the tokenizer is the reason for the dynamics model being memory-bound,\n # should we at least train the tokenizer natively in int8?\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=True)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@nnx.jit\ndef train_step(\n tokenizer: TokenizerVQVAE, optimizer: nnx.Optimizer, inputs: dict\n) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n tokenizer\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n@nnx.jit\ndef val_step(tokenizer: TokenizerVQVAE, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\ndef calculate_validation_metrics(val_dataloader):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n print(f""Calculating validation metrics..."")\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, recon, val_metrics\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(tokenizer, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n \n # --- TRAIN LOOP ---\n dataloader_train = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n inputs = dict(videos=videos)\n loss, recon, metrics = train_step(tokenizer, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n # TODO mihir: add validation recons here\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +10,158916,"train_lam.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(\n model: LatentActionModel, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n # --- Compute loss ---\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n model.train()\n outputs = model(inputs, training=True)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n gt_future_frames = gt[:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@nnx.jit\ndef train_step(\n lam: LatentActionModel,\n optimizer: nnx.Optimizer,\n inputs: dict,\n action_last_active: jax.Array,\n rng: jax.Array,\n) -> tuple[jax.Array, jax.Array, jax.Array, dict]:\n def loss_fn(\n model: LatentActionModel,\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n return lam_loss_fn(model, inputs)\n\n # --- Update model ---\n (loss, (recon, idx_counts, metrics)), grads = nnx.value_and_grad(\n loss_fn, has_aux=True\n )(lam)\n optimizer.update(grads)\n\n # --- Reset inactive latent actions ---\n codebook = lam.vq.codebook\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook.value\n )\n lam.vq.codebook.value = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return loss, recon, action_last_active, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n # Count parameters\n _, params, _ = nnx.split(lam, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(lam, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )\n print(f""Starting training from step {step}..."")\n action_last_active = jnp.zeros(args.num_latents, dtype=jnp.int32)\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n inputs = dict(videos=videos, rng=_rng)\n rng, _rng = jax.random.split(rng)\n loss, recon, action_last_active, metrics = train_step(\n lam, optimizer, inputs, action_last_active, _rng\n )\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +11,207382,"train_tokenizer.py",0,0,"",python,tab +12,213450,"train_tokenizer.py",3917,0,"",python,selection_mouse +13,214173,"train_tokenizer.py",3923,0,"",python,selection_mouse +14,214324,"train_tokenizer.py",3920,5,"train",python,selection_mouse +15,215057,"train_tokenizer.py",3917,0,"",python,selection_mouse +16,215192,"train_tokenizer.py",3914,5,"model",python,selection_mouse +17,215779,"train_tokenizer.py",3921,0,"",python,selection_mouse +18,220683,"train_lam.py",0,0,"",python,tab +19,225844,"train_lam.py",3667,0,"",python,selection_mouse +20,227062,"train_lam.py",3706,0,"\n model.train()",python,content +21,227110,"train_lam.py",3715,0,"",python,selection_command +22,241399,"train_lam.py",2225,0,"",python,selection_mouse +23,242051,"train_lam.py",2214,18,"",python,content +24,242096,"train_lam.py",2218,0,"",python,selection_command +25,248802,"train_lam.py",2230,0,"",python,selection_mouse +26,249105,"train_lam.py",2228,5,"model",python,selection_mouse +27,249699,"train_lam.py",2219,0,"",python,selection_mouse +28,249866,"train_lam.py",2218,7,"outputs",python,selection_mouse +29,252463,"train_lam.py",2246,0,"",python,selection_mouse +30,253422,"train_lam.py",2244,0,"",python,selection_mouse +31,253543,"train_lam.py",2242,8,"training",python,selection_mouse +32,254258,"train_lam.py",2295,0,"",python,selection_mouse +33,254419,"train_lam.py",2294,3,"""].",python,selection_mouse +34,255015,"train_lam.py",2252,0,"",python,selection_mouse +35,255194,"train_lam.py",2251,4,"True",python,selection_mouse +36,260139,"train_lam.py",2246,0,"",python,selection_mouse +37,260278,"train_lam.py",2242,8,"training",python,selection_mouse +38,261058,"train_lam.py",2228,0,"",python,selection_mouse +39,261683,"train_lam.py",2228,5,"model",python,selection_mouse +40,274213,"models/lam.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nnx.Module):\n """"""Latent Action ST-ViVit VQ-VAE\n \n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n M: model dimension\n L: latent dimension\n E: B * (T - 1)\n H: height\n W: width\n C: number of channels (n_dim)\n P: patch token dimension (patch_size^2 * C)\n\n Tm1: T - 1\n Np1: N + 1\n """"""\n\n def __init__(\n self,\n in_dim: int,\n model_dim: int,\n ffn_dim: int,\n latent_dim: int,\n num_latents: int,\n patch_size: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n codebook_dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.in_dim = in_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.patch_size = patch_size\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.codebook_dropout = codebook_dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.patch_token_dim,\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_in = nnx.Param(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (1, 1, 1, self.patch_token_dim)\n )\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n rngs=rngs,\n )\n self.patch_up = nnx.Linear(\n self.patch_token_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.action_up = nnx.Linear(\n self.latent_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n videos_BTHWC = batch[""videos""]\n outputs = self.vq_encode(videos_BTHWC, training)\n patch_BTNP = outputs[""patches""]\n z_q_BTm11L = outputs[""z_q""]\n action_BTm11M = self.action_up(z_q_BTm11L)\n patch_BTm1NM = self.patch_up(patch_BTNP[:, :-1])\n action_BTm1NM = jnp.broadcast_to(action_BTm11M, patch_BTm1NM.shape)\n video_action_patches_BTm1NM = action_BTm1NM + patch_BTm1NM\n del outputs[""patches""], patch_BTNP, patch_BTm1NM\n\n # --- Decode ---\n video_recon_BTm1P = self.decoder(video_action_patches_BTm1NM)\n video_recon_BTm1P = video_recon_BTm1P.astype(jnp.float32)\n video_recon_BTm1P = nnx.sigmoid(video_recon_BTm1P)\n video_recon_BTm1P = video_recon_BTm1P.astype(self.dtype)\n video_recon_BTHWC = unpatchify(video_recon_BTm1P, self.patch_size, H, W)\n outputs[""recon""] = video_recon_BTHWC\n return outputs\n\n def vq_encode(\n self, videos_BTHWC: jax.Array, training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Preprocess videos ---\n B, T = videos_BTHWC.shape[:2]\n patch_BTNP = patchify(videos_BTHWC, self.patch_size)\n action_pad_BT1P = jnp.broadcast_to(\n self.action_in.value, (B, T, 1, self.patch_token_dim)\n )\n padded_patch_BTNp1P = jnp.concatenate((action_pad_BT1P, patch_BTNP), axis=2)\n\n # --- Encode ---\n z_BTNp1L = self.encoder(padded_patch_BTNp1P)\n # Get latent action for all future frames\n z_BTm1L = z_BTNp1L[:, 1:, 0]\n\n # --- Vector quantize ---\n z_EL = z_BTm1L.reshape(B * (T - 1), self.latent_dim)\n z_q_EL, z_EL, emb_EL, indices_E = self.vq(z_EL, training)\n z_q_BTm11L = z_q_EL.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patch_BTNP, z_q=z_q_BTm11L, z=z_EL, emb=emb_EL, indices=indices_E)\n",python,tab +41,280182,"models/lam.py",3143,0,"",python,selection_mouse +42,280308,"models/lam.py",3137,8,"training",python,selection_mouse +43,285436,"models/lam.py",3380,0,"",python,selection_mouse +44,286391,"models/lam.py",3329,0,"",python,selection_mouse +45,286545,"models/lam.py",4186,0,"",python,selection_command +46,288704,"models/lam.py",4238,0,"",python,selection_mouse +47,288839,"models/lam.py",4236,8,"training",python,selection_mouse +48,292097,"models/lam.py",4939,0,"",python,selection_mouse +49,292234,"models/lam.py",2142,0,"",python,selection_command +50,294289,"models/lam.py",2152,0,"",python,selection_mouse +51,294761,"utils/nn.py",0,0,"import math\nfrom typing import Tuple, Callable, List\n\nfrom flax import nnx\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass SpatioTemporalPositionalEncoding(nnx.Module):\n """"""\n Applies separate sinusoidal positional encodings to the temporal and spatial dimensions.\n """"""\n def __init__(self, d_model: int, max_len: int = 5000):\n self.d_model = d_model\n self.max_len = max_len\n\n pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n pe = pe.at[:, 0::2].set(jnp.sin(position * div_term))\n pe = pe.at[:, 1::2].set(jnp.cos(position * div_term))\n self.pe = nnx.Variable(pe)\n\n def __call__(self, x: jax.Array) -> jax.Array:\n """"""\n Args:\n x: The input tensor of shape (Batch, Time, Space, Dimension).\n\n Returns:\n The input tensor with positional encodings added.\n """"""\n assert x.ndim == 4, f""Input must be 4-dimensional, but got shape {x.shape}""\n\n num_timesteps = x.shape[1]\n num_spatial_patches = x.shape[2]\n\n # Temporal positional encoding: (1, T, 1, D)\n temporal_pe = self.pe.value[None, :num_timesteps, None, :]\n x = x + temporal_pe\n\n # Spatial positional encoding: (1, 1, S, D)\n spatial_pe = self.pe.value[None, None, :num_spatial_patches, :]\n x = x + spatial_pe\n\n return x\n\n\nclass STBlock(nnx.Module):\n def __init__(\n self,\n dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.dim = dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n rngs=rngs,\n decode=False,\n )\n\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=False,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x_BTNM: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z_BTNM = self.spatial_norm(x_BTNM)\n z_BTNM = self.spatial_attention(z_BTNM)\n x_BTNM = x_BTNM + z_BTNM\n\n # --- Temporal attention ---\n x_BNTM = x_BTNM.swapaxes(1, 2)\n z_BNTM = self.temporal_norm(x_BNTM)\n z_BNTM = self.temporal_attention(z_BNTM)\n x_BNTM = x_BNTM + z_BNTM\n x_BTNM = x_BNTM.swapaxes(1, 2)\n\n # --- Feedforward ---\n z_BTNM = self.ffn_norm(x_BTNM)\n z_BTND = self.ffn_dense1(z_BTNM)\n z_BTND = jax.nn.gelu(z_BTND)\n z_BTNM = self.ffn_dense2(z_BTND)\n x_BTNM = x_BTNM + z_BTNM\n\n return x_BTNM\n\n\nclass STTransformer(nnx.Module):\n """"""\n Dimension keys:\n B: batch size\n T: number of frames\n N: number of patches per frame\n I: number of input features\n M: model dimension\n D: FFN dimension\n O: number of output features\n """"""\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n max_len: int = 5000,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n self.pos_enc = SpatioTemporalPositionalEncoding(self.model_dim, max_len=max_len)\n\n self.blocks = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x_BTNI: jax.Array) -> jax.Array:\n x_BTNI = self.input_norm1(x_BTNI)\n x_BTNM = self.input_dense(x_BTNI)\n x_BTNM = self.input_norm2(x_BTNM)\n x_BTNM = self.pos_enc(x_BTNM)\n for block in self.blocks:\n x_BTNM = block(x_BTNM)\n\n x_BTNO = self.output_dense(x_BTNM)\n return x_BTNO\n\nclass TransformerBlock(nnx.Module):\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.model_dim,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.model_dim,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x_BTNM: jax.Array, pos_index: Tuple[jax.Array, jax.Array] | None = None) -> jax.Array:\n # --- Spatial attention ---\n B, T, N, M = x_BTNM.shape\n z_FNM = einops.rearrange(x_BTNM, ""b t n m -> (b t) n m"")\n z_FNM = self.spatial_norm(z_FNM)\n z_FNM = self.spatial_attention(z_FNM)\n z_BTNM = einops.rearrange(z_FNM, ""(b t) n m -> b t n m"", t=T)\n x_BTNM = x_BTNM + z_BTNM\n # --- Temporal attention ---\n z_PTM = einops.rearrange(x_BTNM, ""b t n m -> (b n) t m"")\n z_PTM = self.temporal_norm(z_PTM)\n z_PTM = self.temporal_attention(z_PTM)\n z_BTNM = einops.rearrange(z_PTM, ""(b n) t m -> b t n m"", n=N)\n x_BTNM = x_BTNM + z_BTNM\n # --- Feedforward ---\n z_BTNM = self.ffn_norm(x_BTNM)\n z_BTND = self.ffn_dense1(z_BTNM)\n z_BTND = jax.nn.gelu(z_BTND)\n z_BTNM = self.ffn_dense2(z_BTND)\n x_BTNM = x_BTNM + z_BTNM\n\n return x_BTNM\n\nclass Transformer(nnx.Module):\n """"""\n Dimension keys:\n B: batch size\n T: number of frames\n N: number of patches per frame\n I: number of input features\n M: model dimension\n D: FFN dimension\n O: number of output features\n F: number of frames in batch\n P: number of patch positions in batch\n """"""\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n max_len: int = 5000,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n self.pos_enc = SpatioTemporalPositionalEncoding(self.model_dim, max_len=max_len)\n\n self.blocks: List[TransformerBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n TransformerBlock(\n model_dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n )\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x_BTNI: jax.Array, pos_index: Tuple[jax.Array, jax.Array] | None = None) -> jax.Array:\n x_BTNI = self.input_norm1(x_BTNI)\n x_BTNM = self.input_dense(x_BTNI)\n x_BTNM = self.input_norm2(x_BTNM)\n x_BTNM = self.pos_enc(x_BTNM)\n for block in self.blocks:\n x_BTNM = block(x_BTNM, pos_index)\n\n x_BTNV = self.output_dense(x_BTNM)\n return x_BTNV\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n """"""\n Dimension keys:\n D: B * T * N\n K: number of latents\n L: latent dimension\n """"""\n def __init__(\n self, latent_dim: int, num_latents: int, dropout: float, rngs: nnx.Rngs\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x_DL: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x_DL = normalize(x_DL)\n normalized_codebook_KL = normalize(self.codebook.value)\n distance_DK = -jnp.matmul(x_DL, normalized_codebook_KL.T)\n if training:\n distance_DK = self.drop(distance_DK)\n\n # --- Get indices and embeddings ---\n indices_D = jnp.argmin(distance_DK, axis=-1)\n z_DL = self.codebook[indices_D]\n\n # --- Straight through estimator ---\n z_q_DL = x_DL + jax.lax.stop_gradient(z_DL - x_DL)\n return z_q_DL, z_DL, x_DL, indices_D\n\n def get_codes(self, indices_E: jax.Array) -> jax.Array:\n return self.codebook[indices_E]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n flax.nnx.MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim),\n but jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim). We reshape to\n ensure compatibility. cuDNN's flash attention additionally requires a sequence length that\n is a multiple of 4. We pad the sequence length to the nearest multiple of 4 and mask\n accordingly. Note that cuDNN requires the mask to be broadcast before calling the attention\n function due to strict shape checking.\n """"""\n\n def attention_fn(query_BTHD, key_BSHD, value_BSHD, bias=None, mask_B111=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _merge_batch_dims(x):\n return einops.rearrange(x, ""... l h k -> (...) l h k"")\n\n def _pad(x, pad_size):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n original_shape = query_BTHD.shape\n T = query_BTHD.shape[-3]\n S = key_BSHD.shape[-3]\n\n # Pad to nearest multiple of 4\n Q = ((T + 3) // 4) * 4\n pad_size_Q = Q - T\n K = ((S + 3) // 4) * 4\n pad_size_K = K - S\n\n query_BQHD = _pad(_merge_batch_dims(query_BTHD), pad_size_Q)\n key_BKHD = _pad(_merge_batch_dims(key_BSHD), pad_size_K)\n value_BKHD = _pad(_merge_batch_dims(value_BSHD), pad_size_K)\n\n attention_mask = jnp.ones((Q, K), dtype=jnp.bool_)\n attention_mask = attention_mask.at[T:, :].set(False)\n attention_mask = attention_mask.at[:, S:].set(False)\n\n mask_11TS = attention_mask[jnp.newaxis, jnp.newaxis, :, :]\n\n bias_4d = jnp.pad(_merge_batch_dims(bias), ((0, 0), (0, 0), (0, pad_size_Q), (0, pad_size_K))) if bias is not None else None\n\n # NOTE: jax.nn.dot_product_attention does not support dropout\n output_4d = jax.nn.dot_product_attention(\n query=query_BQHD,\n key=key_BKHD,\n value=value_BKHD,\n bias=bias_4d,\n mask=mask_11TS,\n implementation=implementation,\n is_causal=is_causal,\n )\n return output_4d[..., :T, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +52,294765,"utils/nn.py",14132,0,"",python,selection_command +53,300823,"utils/nn.py",14784,0,"",python,selection_mouse +54,300848,"utils/nn.py",14783,0,"",python,selection_command +55,300976,"utils/nn.py",14784,0,"",python,selection_mouse +56,300976,"utils/nn.py",14783,0,"",python,selection_command +57,301482,"utils/nn.py",14821,0,"",python,selection_mouse +58,301649,"utils/nn.py",14816,8,"training",python,selection_mouse +59,311742,"train_lam.py",0,0,"",python,tab +60,314038,"train_lam.py",2247,0,"",python,selection_mouse +61,314788,"train_lam.py",2102,0,"",python,selection_mouse +62,314799,"train_lam.py",2101,0,"",python,selection_command +63,315374,"train_lam.py",2034,0,"",python,selection_mouse +64,315994,"train_lam.py",2017,0,"",python,selection_mouse +65,315996,"train_lam.py",2016,0,"",python,selection_command +66,316610,"train_lam.py",2051,0,"",python,selection_mouse +67,316874,"train_lam.py",2050,5,"Array",python,selection_mouse +68,317475,"train_lam.py",2008,0,"",python,selection_mouse +69,318148,"train_lam.py",2017,0,"",python,selection_mouse +70,318192,"train_lam.py",2016,0,"",python,selection_command +71,319465,"train_lam.py",2017,0,"",python,selection_command +72,319707,"train_lam.py",2017,0,",",python,content +73,319709,"train_lam.py",2018,0,"",python,selection_keyboard +74,320123,"train_lam.py",2018,0," ",python,content +75,320124,"train_lam.py",2019,0,"",python,selection_keyboard +76,320297,"train_lam.py",2019,0,"t",python,content +77,320298,"train_lam.py",2020,0,"",python,selection_keyboard +78,320497,"train_lam.py",2020,0,"r",python,content +79,320498,"train_lam.py",2021,0,"",python,selection_keyboard +80,320696,"train_lam.py",2021,0,"a",python,content +81,320697,"train_lam.py",2022,0,"",python,selection_keyboard +82,320833,"train_lam.py",2022,0,"n",python,content +83,320835,"train_lam.py",2023,0,"",python,selection_keyboard +84,321284,"train_lam.py",2022,1,"",python,content +85,321547,"train_lam.py",2022,0,"i",python,content +86,321548,"train_lam.py",2023,0,"",python,selection_keyboard +87,321638,"train_lam.py",2023,0,"n",python,content +88,321639,"train_lam.py",2024,0,"",python,selection_keyboard +89,321790,"train_lam.py",2024,0,"i",python,content +90,321791,"train_lam.py",2025,0,"",python,selection_keyboard +91,321852,"train_lam.py",2025,0,"n",python,content +92,321853,"train_lam.py",2026,0,"",python,selection_keyboard +93,321940,"train_lam.py",2026,0,"g",python,content +94,321941,"train_lam.py",2027,0,"",python,selection_keyboard +95,322487,"train_lam.py",2027,0," ",python,content +96,322489,"train_lam.py",2028,0,"",python,selection_keyboard +97,324869,"train_lam.py",2027,1,"",python,content +98,325291,"train_lam.py",2027,0,":",python,content +99,325291,"train_lam.py",2028,0,"",python,selection_keyboard +100,325533,"train_lam.py",2028,0," ",python,content +101,325533,"train_lam.py",2029,0,"",python,selection_keyboard +102,325792,"train_lam.py",2029,0,"b",python,content +103,325794,"train_lam.py",2030,0,"",python,selection_keyboard +104,326129,"train_lam.py",2030,0,"o",python,content +105,326130,"train_lam.py",2031,0,"",python,selection_keyboard +106,326251,"train_lam.py",2031,0,"o",python,content +107,326253,"train_lam.py",2032,0,"",python,selection_keyboard +108,326486,"train_lam.py",2032,0,"l",python,content +109,326487,"train_lam.py",2033,0,"",python,selection_keyboard +110,326681,"train_lam.py",2033,0," ",python,content +111,326682,"train_lam.py",2034,0,"",python,selection_keyboard +112,327458,"train_lam.py",2034,0,"=",python,content +113,327458,"train_lam.py",2035,0,"",python,selection_keyboard +114,327728,"train_lam.py",2035,0," ",python,content +115,327728,"train_lam.py",2036,0,"",python,selection_keyboard +116,327982,"train_lam.py",2036,0,"T",python,content +117,327983,"train_lam.py",2037,0,"",python,selection_keyboard +118,328200,"train_lam.py",2037,0,"r",python,content +119,328202,"train_lam.py",2038,0,"",python,selection_keyboard +120,328386,"train_lam.py",2038,0,"u",python,content +121,328388,"train_lam.py",2039,0,"",python,selection_keyboard +122,328719,"train_lam.py",2039,0,"e",python,content +123,328721,"train_lam.py",2040,0,"",python,selection_keyboard +124,329188,"train_lam.py",2039,0,"",python,selection_command +125,329356,"train_lam.py",2097,0,"",python,selection_command +126,329608,"train_lam.py",2124,0,"",python,selection_command +127,329768,"train_lam.py",2190,0,"",python,selection_command +128,329956,"train_lam.py",2235,0,"",python,selection_command +129,330129,"train_lam.py",2278,0,"",python,selection_command +130,330571,"train_lam.py",2277,0,"",python,selection_command +131,330785,"train_lam.py",2276,0,"",python,selection_command +132,330905,"train_lam.py",2275,0,"",python,selection_command +133,330998,"train_lam.py",2274,0,"",python,selection_command +134,331603,"train_lam.py",2274,4,"",python,content +135,332113,"train_lam.py",2274,0,"t",python,content +136,332114,"train_lam.py",2275,0,"",python,selection_keyboard +137,332215,"train_lam.py",2275,0,"r",python,content +138,332216,"train_lam.py",2276,0,"",python,selection_keyboard +139,332412,"train_lam.py",2276,0,"a",python,content +140,332412,"train_lam.py",2277,0,"",python,selection_keyboard +141,332432,"train_lam.py",2277,0,"i",python,content +142,332432,"train_lam.py",2278,0,"",python,selection_keyboard +143,332522,"train_lam.py",2278,0,"n",python,content +144,332524,"train_lam.py",2279,0,"",python,selection_keyboard +145,332630,"train_lam.py",2279,0,"i",python,content +146,332631,"train_lam.py",2280,0,"",python,selection_keyboard +147,332680,"train_lam.py",2280,0,"n",python,content +148,332681,"train_lam.py",2281,0,"",python,selection_keyboard +149,332733,"train_lam.py",2281,0,"g",python,content +150,332734,"train_lam.py",2282,0,"",python,selection_keyboard +151,333650,"train_lam.py",2281,0,"",python,selection_command +152,353505,"train_lam.py",3778,0,"",python,selection_mouse +153,355123,"train_lam.py",3778,0,",",python,content +154,355123,"train_lam.py",3779,0,"",python,selection_keyboard +155,355226,"train_lam.py",3779,0," ",python,content +156,355226,"train_lam.py",3780,0,"",python,selection_keyboard +157,356169,"train_lam.py",3780,0,"T",python,content +158,356170,"train_lam.py",3781,0,"",python,selection_keyboard +159,356333,"train_lam.py",3781,0,"r",python,content +160,356334,"train_lam.py",3782,0,"",python,selection_keyboard +161,356477,"train_lam.py",3782,0,"u",python,content +162,356478,"train_lam.py",3783,0,"",python,selection_keyboard +163,356568,"train_lam.py",3783,0,"e",python,content +164,356569,"train_lam.py",3784,0,"",python,selection_keyboard +165,357075,"train_lam.py",3783,0,"",python,selection_command +166,357952,"train_lam.py",3813,0,"",python,selection_mouse +167,357963,"train_lam.py",3812,0,"",python,selection_command +168,359117,"train_lam.py",3780,0,"",python,selection_mouse +169,362874,"train_lam.py",3780,0,"t",python,content +170,362876,"train_lam.py",3781,0,"",python,selection_keyboard +171,362968,"train_lam.py",3781,0,"r",python,content +172,362970,"train_lam.py",3782,0,"",python,selection_keyboard +173,363134,"train_lam.py",3782,0,"a",python,content +174,363135,"train_lam.py",3783,0,"",python,selection_keyboard +175,363239,"train_lam.py",3783,0,"i",python,content +176,363239,"train_lam.py",3784,0,"",python,selection_keyboard +177,363292,"train_lam.py",3784,0,"n",python,content +178,363293,"train_lam.py",3785,0,"",python,selection_keyboard +179,363399,"train_lam.py",3785,0,"i",python,content +180,363401,"train_lam.py",3786,0,"",python,selection_keyboard +181,363500,"train_lam.py",3786,0,"n",python,content +182,363502,"train_lam.py",3787,0,"",python,selection_keyboard +183,363519,"train_lam.py",3787,0,"g",python,content +184,363520,"train_lam.py",3788,0,"",python,selection_keyboard +185,364699,"train_lam.py",3788,0,"=",python,content +186,364701,"train_lam.py",3789,0,"",python,selection_keyboard +187,366797,"train_lam.py",3788,0,"",python,selection_command +188,380070,"train_lam.py",3602,0,"",python,selection_command +189,381646,"train_lam.py",4641,0,"",python,selection_mouse +190,382593,"train_lam.py",4641,0,"\n",python,content +191,383245,"train_lam.py",4641,0,"",python,selection_command +192,389671,"train_lam.py",4641,0,"@nnx.jit\ndef val_step(lam: LatentActionModel, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n lam.eval()\n (loss, (recon, idx_counts, metrics)) = lam_loss_fn(lam, inputs)\n return loss, recon, metrics\n",python,content +193,401510,"train_lam.py",4779,0,"",python,selection_mouse +194,401650,"train_lam.py",4773,10,"idx_counts",python,selection_mouse +195,415641,"train_lam.py",3847,0,"",python,selection_mouse +196,415806,"train_lam.py",3842,10,"idx_counts",python,selection_mouse +197,420044,"train_lam.py",4080,0,"",python,selection_mouse +198,420185,"train_lam.py",4072,12,"active_codes",python,selection_mouse +199,456440,"train_lam.py",3602,0,"",python,selection_command +200,457598,"train_lam.py",4780,0,"",python,selection_mouse +201,457743,"train_lam.py",4773,10,"idx_counts",python,selection_mouse +202,458636,"train_lam.py",4773,10,"",python,content +203,458897,"train_lam.py",4773,0,"_",python,content +204,458898,"train_lam.py",4774,0,"",python,selection_keyboard +205,459989,"train_lam.py",4773,0,"",python,selection_command +206,460695,"train_lam.py",4845,0,"",python,selection_mouse +207,461916,"train_lam.py",4844,0,"",python,selection_mouse +208,461923,"train_lam.py",4843,0,"",python,selection_command +209,473779,"train_lam.py",4846,0,"",python,selection_mouse +210,474912,"train_lam.py",4846,0,"\n",python,content +211,475336,"train_lam.py",4846,0,"",python,selection_command +212,475712,"train_lam.py",4846,0,"def calculate_validation_metrics(val_dataloader):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n print(f""Calculating validation metrics..."")\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(lam, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, recon, val_metrics",python,content +213,480144,"train_lam.py",5257,0,"",python,selection_mouse +214,480827,"train_lam.py",5275,0,"",python,selection_mouse +215,499589,"train_lam.py",6595,0,"",python,selection_mouse +216,500461,"train_lam.py",6594,0,"",python,selection_command +217,503255,"train_lam.py",9985,0,"",python,selection_command +218,519853,"train_tokenizer.py",0,0,"",python,tab +219,528395,"train_tokenizer.py",3821,0,"",python,selection_command +220,537612,"train_tokenizer.py",4872,0,"",python,selection_mouse +221,537615,"train_tokenizer.py",4871,0,"",python,selection_command +222,538130,"train_tokenizer.py",4905,0,"",python,selection_mouse +223,538947,"train_tokenizer.py",4873,48,"",python,content +224,539021,"train_tokenizer.py",4877,0,"",python,selection_command +225,540492,"train_tokenizer.py",4810,0,"",python,selection_command +226,549260,"train_tokenizer.py",12362,0,"",python,selection_mouse +227,550042,"train_tokenizer.py",12398,0,"\n print(f""Calculating validation metrics..."")",python,content +228,550106,"train_tokenizer.py",12403,0,"",python,selection_command +229,550827,"train_tokenizer.py",12403,0," ",python,content +230,551006,"train_tokenizer.py",12407,0," ",python,content +231,551413,"train_tokenizer.py",12411,0," ",python,content +232,551804,"train_tokenizer.py",12414,0,"",python,selection_command +233,576491,"train_tokenizer.py",12622,0,"",python,selection_mouse +234,576801,"train_tokenizer.py",12567,55," print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +235,576838,"train_tokenizer.py",12566,56," print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +236,576839,"train_tokenizer.py",12567,55," print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_command +237,576840,"train_tokenizer.py",12467,155," val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +238,576909,"train_tokenizer.py",12405,217," print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +239,576910,"train_tokenizer.py",12404,218," print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +240,576910,"train_tokenizer.py",12403,219," print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +241,576910,"train_tokenizer.py",12402,220," print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +242,576944,"train_tokenizer.py",12401,221," print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +243,576979,"train_tokenizer.py",12332,290," if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +244,577024,"train_tokenizer.py",12331,291," if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +245,577202,"train_tokenizer.py",12293,329," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +246,581177,"train_tokenizer.py",13976,0,"",python,selection_mouse +247,582741,"train_tokenizer.py",13285,0,"",python,selection_mouse +248,583645,"train_tokenizer.py",13228,0,"",python,selection_mouse +249,584212,"train_tokenizer.py",13284,0,"",python,selection_mouse +250,588197,"train_tokenizer.py",13202,0,"",python,selection_mouse +251,588199,"train_tokenizer.py",13201,0,"",python,selection_command +252,588516,"train_tokenizer.py",13201,1,")",python,selection_mouse +253,588516,"train_tokenizer.py",13200,1,"t",python,selection_mouse +254,588516,"train_tokenizer.py",13162,39,"\n wandb.log(log_dict",python,selection_mouse +255,588516,"train_tokenizer.py",13111,90," **val_metrics\n })\n wandb.log(log_dict",python,selection_mouse +256,588517,"train_tokenizer.py",13056,145," ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict",python,selection_mouse +257,588591,"train_tokenizer.py",13202,0,"",python,selection_command +258,588592,"train_tokenizer.py",12905,297," }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)",python,selection_mouse +259,588592,"train_tokenizer.py",12829,373," ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)",python,selection_mouse +260,588651,"train_tokenizer.py",12678,524," if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)",python,selection_mouse +261,588705,"train_tokenizer.py",12623,579," # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)",python,selection_mouse +262,588755,"train_tokenizer.py",12555,647," print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)",python,selection_mouse +263,588788,"train_tokenizer.py",12459,743," val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)",python,selection_mouse +264,588846,"train_tokenizer.py",12399,803," print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)",python,selection_mouse +265,588933,"train_tokenizer.py",12331,871," if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)",python,selection_mouse +266,589280,"train_tokenizer.py",12293,909," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)",python,selection_mouse +267,598327,"train_lam.py",0,0,"",python,tab +268,602239,"train_lam.py",12724,0,"",python,selection_mouse +269,602245,"train_lam.py",12723,0,"",python,selection_command +270,602443,"train_lam.py",12723,1,")",python,selection_mouse +271,602471,"train_lam.py",12724,0,"",python,selection_command +272,602598,"train_lam.py",12723,1,")",python,selection_mouse +273,602660,"train_lam.py",12722,2," )",python,selection_mouse +274,602702,"train_lam.py",12721,3," )",python,selection_mouse +275,602807,"train_lam.py",12694,30," }\n )",python,selection_mouse +276,603427,"train_lam.py",12692,32," }\n )",python,selection_mouse +277,603427,"train_lam.py",12650,74," **metrics,\n }\n )",python,selection_mouse +278,603475,"train_lam.py",12603,121," ""step"": step,\n **metrics,\n }\n )",python,selection_mouse +279,603475,"train_lam.py",12531,193," {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )",python,selection_mouse +280,603512,"train_lam.py",12418,306," if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )",python,selection_mouse +281,603552,"train_lam.py",12393,331," if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )",python,selection_mouse +282,603601,"train_lam.py",12362,362,"\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )",python,selection_mouse +283,604141,"train_lam.py",12363,361," # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )",python,selection_mouse +284,605379,"train_lam.py",12363,361,"",python,content +285,606429,"train_lam.py",12363,0," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_recon, val_metrics = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)",python,content +286,615861,"train_tokenizer.py",0,0,"",python,tab +287,618106,"train_tokenizer.py",11900,0,"",python,selection_mouse +288,618124,"train_tokenizer.py",11899,0,"",python,selection_command +289,618349,"train_tokenizer.py",11899,1,")",python,selection_mouse +290,618350,"train_tokenizer.py",11868,31," for elem in val_iterator\n ",python,selection_mouse +291,618351,"train_tokenizer.py",11795,104," jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n ",python,selection_mouse +292,618351,"train_tokenizer.py",11794,105," jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n ",python,selection_mouse +293,618351,"train_tokenizer.py",11770,129," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n ",python,selection_mouse +294,618396,"train_tokenizer.py",11900,0,"",python,selection_command +295,618396,"train_tokenizer.py",11769,131," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +296,618466,"train_tokenizer.py",11763,137," )\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +297,618481,"train_tokenizer.py",11728,172," for elem in train_iterator\n )\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +298,618541,"train_tokenizer.py",11658,242," jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +299,618665,"train_tokenizer.py",11633,267," dataloader_train = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +300,622267,"train_lam.py",0,0,"",python,tab +301,624909,"train_lam.py",11733,0,"",python,selection_mouse +302,625314,"train_lam.py",11731,2," )",python,selection_mouse +303,625315,"train_lam.py",11694,39," for elem in grain_iterator\n )",python,selection_mouse +304,625315,"train_lam.py",11693,40," for elem in grain_iterator\n )",python,selection_mouse +305,625315,"train_lam.py",11623,110," jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )",python,selection_mouse +306,625488,"train_lam.py",11604,129," dataloader = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )",python,selection_mouse +307,626208,"train_lam.py",11604,129,"",python,content +308,626484,"train_lam.py",11604,0," dataloader_train = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,content +309,630381,"train_lam.py",12049,0,"",python,selection_mouse +310,633797,"train_lam.py",11618,0,"",python,selection_mouse +311,633881,"train_lam.py",11618,1,"_",python,selection_mouse +312,633929,"train_lam.py",11618,3,"_tr",python,selection_mouse +313,633930,"train_lam.py",11618,4,"_tra",python,selection_mouse +314,634161,"train_lam.py",11618,5,"_trai",python,selection_mouse +315,634557,"train_lam.py",11618,6,"_train",python,selection_mouse +316,635082,"train_lam.py",11618,6,"",python,content +317,635828,"train_lam.py",11614,0,"",python,selection_mouse +318,635975,"train_lam.py",11608,10,"dataloader",python,selection_mouse +319,640513,"train_lam.py",11618,0,"",python,selection_command +320,640558,"train_lam.py",12053,0,"_train",python,content +321,640558,"train_lam.py",11618,0,"_train",python,content +322,647818,"train_tokenizer.py",0,0,"",python,tab +323,655321,"train_tokenizer.py",10600,0,"",python,selection_mouse +324,655561,"train_tokenizer.py",10484,116,"val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +325,655562,"train_tokenizer.py",10483,117," val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +326,655585,"train_tokenizer.py",10484,116,"val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_command +327,655586,"train_tokenizer.py",10482,118," val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +328,655626,"train_tokenizer.py",10481,119," val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +329,655672,"train_tokenizer.py",10363,237," train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +330,655672,"train_tokenizer.py",10362,238," train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +331,655730,"train_tokenizer.py",10285,315," image_shape = (args.image_height, args.image_width, args.image_channels)\n train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +332,655826,"train_tokenizer.py",10229,371," # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +333,659556,"train_lam.py",0,0,"",python,tab +334,663080,"train_lam.py",10747,0,"",python,selection_mouse +335,663272,"train_lam.py",10681,66,"rator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +336,663311,"train_lam.py",10679,68,"terator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +337,663312,"train_lam.py",10617,130,"l_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +338,663353,"train_lam.py",10615,132,"ial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +339,663354,"train_lam.py",10606,141,"\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +340,663431,"train_lam.py",10583,164," seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +341,663432,"train_lam.py",10526,221," num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +342,663432,"train_lam.py",10477,270," args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +343,663457,"train_lam.py",10352,395," # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +344,663479,"train_lam.py",10262,485," grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +345,663510,"train_lam.py",10174,573," for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +346,663534,"train_lam.py",10135,612," os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +347,663568,"train_lam.py",10108,639," array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +348,663613,"train_lam.py",10031,716," image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +349,664511,"train_lam.py",10031,716,"",python,content +350,664963,"train_lam.py",10031,0," # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,content +351,666190,"train_lam.py",10042,0,"",python,selection_mouse +352,666554,"train_lam.py",10041,0,"",python,selection_command +353,667620,"train_lam.py",10031,56,"",python,content +354,667687,"train_lam.py",10035,0,"",python,selection_command +355,670100,"train_tokenizer.py",0,0,"",python,tab +356,673656,"train_tokenizer.py",472,0,"",python,selection_mouse +357,676227,"train_lam.py",0,0,"",python,tab +358,679302,"train_lam.py",430,0,"",python,selection_mouse +359,679303,"train_lam.py",429,0,"",python,selection_command +360,680032,"train_lam.py",472,0,"",python,selection_mouse +361,680655,"train_lam.py",474,0,"\nfrom utils.dataloader import create_dataloader_iterator",python,content +362,680687,"train_lam.py",475,0,"",python,selection_command +363,680878,"train_lam.py",431,0,"",python,selection_command +364,681229,"train_lam.py",431,44,"",python,content +365,685536,"train_lam.py",0,0,"",python,tab +366,688249,"train_tokenizer.py",0,0,"",python,tab +367,688250,"train_tokenizer.py",352,0,"",python,selection_mouse +368,688259,"train_tokenizer.py",351,0,"",python,selection_command +369,689022,"train_tokenizer.py",597,0,"",python,selection_mouse +370,689677,"train_tokenizer.py",598,0,"",python,selection_mouse +371,695555,"train_tokenizer.py",1925,0,"",python,selection_mouse +372,696333,"train_tokenizer.py",1911,26," val_data_dir: str = """"",python,selection_command +373,696529,"train_tokenizer.py",1911,57," val_data_dir: str = """"\n val_interval: int = 20_000",python,selection_command +374,696664,"train_tokenizer.py",1911,81," val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50",python,selection_command +375,697422,"train_tokenizer.py",1911,0,"",python,selection_command +376,700151,"train_lam.py",0,0,"",python,tab +377,700152,"train_lam.py",1885,0,"",python,selection_mouse +378,701538,"train_lam.py",1944,0,"",python,selection_mouse +379,702962,"train_lam.py",1856,0,"",python,selection_mouse +380,704725,"train_lam.py",1883,0,"\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50",python,content +381,704756,"train_lam.py",1888,0,"",python,selection_command +382,765192,"train_tokenizer.py",0,0,"",python,tab +383,765193,"train_tokenizer.py",4943,0,"",python,selection_mouse +384,765235,"train_tokenizer.py",4942,0,"",python,selection_command +385,766133,"train_tokenizer.py",5179,0,"",python,selection_mouse +386,766158,"train_tokenizer.py",5178,0,"",python,selection_command +387,853600,"train_tokenizer.py",8736,0,"",python,selection_mouse +388,853735,"train_tokenizer.py",8734,4,"step",python,selection_mouse +389,866952,"train_tokenizer.py",9800,0,"",python,selection_mouse +390,866965,"train_tokenizer.py",9799,0,"",python,selection_command +391,867121,"train_tokenizer.py",9799,1,")",python,selection_mouse +392,867126,"train_tokenizer.py",9800,0,"",python,selection_command +393,867199,"train_tokenizer.py",9721,79,"cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +394,867200,"train_tokenizer.py",9720,80," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +395,867201,"train_tokenizer.py",9719,81," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +396,867319,"train_tokenizer.py",9675,125," grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +397,867319,"train_tokenizer.py",9673,127," grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +398,867320,"train_tokenizer.py",9641,159," ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +399,867320,"train_tokenizer.py",9640,160," ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +400,867320,"train_tokenizer.py",9613,187," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +401,867321,"train_tokenizer.py",9606,194," )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +402,867343,"train_tokenizer.py",9482,318," grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +403,867368,"train_tokenizer.py",9450,350," ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +404,867387,"train_tokenizer.py",9424,376," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +405,867433,"train_tokenizer.py",9418,382," )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +406,867484,"train_tokenizer.py",9336,464," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +407,867583,"train_tokenizer.py",9292,508," grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +408,867620,"train_tokenizer.py",9258,542," ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +409,867946,"train_tokenizer.py",9232,568," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +410,869611,"train_tokenizer.py",9044,0,"",python,selection_mouse +411,869740,"train_tokenizer.py",9043,4," ",python,selection_mouse +412,869918,"train_tokenizer.py",9043,61," handler_registry.add(\n ""train_dataloader_state"",\n ",python,selection_mouse +413,869956,"train_tokenizer.py",9043,62," handler_registry.add(\n ""train_dataloader_state"",\n ",python,selection_mouse +414,869956,"train_tokenizer.py",9043,186," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n ",python,selection_mouse +415,870031,"train_tokenizer.py",9043,192," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n ",python,selection_mouse +416,870121,"train_tokenizer.py",9043,299," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n ",python,selection_mouse +417,870122,"train_tokenizer.py",9043,452," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain",python,selection_mouse +418,870199,"train_tokenizer.py",9043,650," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint",python,selection_mouse +419,870272,"train_tokenizer.py",9043,695," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers",python,selection_mouse +420,870334,"train_tokenizer.py",9043,650," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint",python,selection_mouse +421,870443,"train_tokenizer.py",9043,687," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.",python,selection_mouse +422,870444,"train_tokenizer.py",9043,686," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp",python,selection_mouse +423,870505,"train_tokenizer.py",9043,757," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +424,875448,"train_lam.py",0,0,"",python,tab +425,875448,"train_lam.py",9640,0,"",python,selection_mouse +426,875462,"train_lam.py",9639,0,"",python,selection_command +427,875484,"train_lam.py",9639,1,")",python,selection_mouse +428,875548,"train_lam.py",9640,0,"",python,selection_command +429,875549,"train_lam.py",9512,128," grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +430,875549,"train_lam.py",9449,191," )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +431,875598,"train_lam.py",9367,273," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +432,875765,"train_lam.py",9326,314," grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +433,875952,"train_lam.py",9298,342," ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +434,876097,"train_lam.py",9272,368," handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +435,877106,"train_lam.py",9272,368,"",python,content +436,877710,"train_lam.py",9272,0," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,content +437,882374,"train_tokenizer.py",0,0,"",python,tab +438,882375,"train_tokenizer.py",10119,0,"",python,selection_mouse +439,882422,"train_tokenizer.py",10118,0,"",python,selection_command +440,893270,"train_tokenizer.py",11139,0,"",python,selection_mouse +441,896118,"train_lam.py",0,0,"",python,tab +442,896119,"train_lam.py",11272,0,"",python,selection_mouse +443,897705,"train_lam.py",11272,0,"p",python,content +444,897706,"train_lam.py",11273,0,"",python,selection_keyboard +445,898655,"train_lam.py",11272,1,"",python,content +446,899385,"train_lam.py",11271,0,"",python,selection_command +447,900120,"train_lam.py",11341,0,"\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore",python,content +448,900160,"train_lam.py",11358,0,"",python,selection_command +449,905100,"train_tokenizer.py",0,0,"",python,tab +450,905100,"train_tokenizer.py",11361,0,"",python,selection_mouse +451,905123,"train_tokenizer.py",11360,0,"",python,selection_command +452,905635,"train_tokenizer.py",11421,0,"",python,selection_mouse +453,905645,"train_tokenizer.py",11420,0,"",python,selection_command +454,906636,"train_tokenizer.py",11417,0,"",python,selection_command +455,908367,"train_lam.py",0,0,"",python,tab +456,908368,"train_lam.py",11612,0,"",python,selection_mouse +457,909301,"train_lam.py",11638,0,"\n train_iterator = restored[""train_dataloader_state""]\n val_iterator = restored[""val_dataloader_state""]",python,content +458,909321,"train_lam.py",11647,0,"",python,selection_command +459,909748,"train_lam.py",11593,0,"",python,selection_command +460,910274,"train_lam.py",11585,54,"",python,content +461,910310,"train_lam.py",11593,0,"",python,selection_command +462,910433,"train_lam.py",11537,0,"",python,selection_command +463,911734,"train_lam.py",11309,0,"",python,selection_mouse +464,912871,"train_lam.py",11309,1,"t",python,content +465,943748,"train_tokenizer.py",0,0,"",python,tab +466,943749,"train_tokenizer.py",14334,0,"",python,selection_mouse +467,943815,"train_tokenizer.py",14333,0,"",python,selection_command +468,943816,"train_tokenizer.py",14333,1,")",python,selection_mouse +469,943869,"train_tokenizer.py",14334,0,"",python,selection_command +470,943916,"train_tokenizer.py",14304,30," ),\n )",python,selection_mouse +471,943970,"train_tokenizer.py",14303,31," ),\n )",python,selection_mouse +472,946614,"train_tokenizer.py",15150,0,"",python,selection_mouse +473,946616,"train_tokenizer.py",15149,0,"",python,selection_command +474,947700,"train_tokenizer.py",15132,0,"",python,selection_mouse +475,947711,"train_tokenizer.py",15131,0,"",python,selection_command +476,948638,"train_tokenizer.py",15132,0,"",python,selection_mouse +477,948650,"train_tokenizer.py",15131,0,"",python,selection_command +478,948826,"train_tokenizer.py",15131,1,",",python,selection_mouse +479,948838,"train_tokenizer.py",15132,0,"",python,selection_command +480,948950,"train_tokenizer.py",15109,23,"\n ),",python,selection_mouse +481,948951,"train_tokenizer.py",15048,84," val_iterator # type: ignore\n ),\n ),",python,selection_mouse +482,948952,"train_tokenizer.py",15046,86," val_iterator # type: ignore\n ),\n ),",python,selection_mouse +483,948952,"train_tokenizer.py",14950,182," val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +484,948952,"train_tokenizer.py",14919,213," ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +485,948952,"train_tokenizer.py",14918,214," ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +486,949033,"train_tokenizer.py",14856,276," train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +487,949034,"train_tokenizer.py",14757,375," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +488,949034,"train_tokenizer.py",14755,377," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +489,949074,"train_tokenizer.py",14754,378," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +490,949097,"train_tokenizer.py",14663,469," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +491,949122,"train_tokenizer.py",14662,470," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +492,949188,"train_tokenizer.py",14661,471," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +493,949273,"train_tokenizer.py",14615,517," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,selection_mouse +494,953586,"train_lam.py",0,0,"",python,tab +495,953586,"train_lam.py",15352,0,"",python,selection_mouse +496,953587,"train_lam.py",15257,95," grain_iterator # type: ignore\n ),\n ),",python,selection_mouse +497,953587,"train_lam.py",15161,191," dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),",python,selection_mouse +498,953587,"train_lam.py",15159,193," dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),",python,selection_mouse +499,953588,"train_lam.py",15067,285," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),",python,selection_mouse +500,953588,"train_lam.py",15066,286," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),",python,selection_mouse +501,953588,"train_lam.py",15065,287," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),",python,selection_mouse +502,953589,"train_lam.py",15064,288," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),",python,selection_mouse +503,953589,"train_lam.py",15019,333," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),",python,selection_mouse +504,953640,"train_lam.py",15351,0,"",python,selection_command +505,953640,"train_lam.py",15257,95," grain_iterator # type: ignore\n ),\n ),",python,selection_command +506,953641,"train_lam.py",15019,333," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),",python,selection_mouse +507,954852,"train_lam.py",15019,333,"",python,content +508,955505,"train_lam.py",15019,0," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),",python,content +509,963064,"train_lam.py",14944,0,"",python,selection_mouse +510,1067866,"train_tokenizer.py",0,0,"",python,tab +511,1067867,"train_tokenizer.py",15082,0,"",python,selection_mouse +512,1067944,"train_tokenizer.py",15081,0,"",python,selection_command +513,1105650,"train_tokenizer.py",5495,0,"",python,selection_mouse +514,1105657,"train_tokenizer.py",5494,0,"",python,selection_command +515,1109337,"train_tokenizer.py",5519,0,"",python,selection_mouse +516,1109747,"train_tokenizer.py",5517,5,"recon",python,selection_mouse +517,1109870,"train_tokenizer.py",5496,40," return val_loss, recon, val_metrics\n",python,selection_mouse +518,1110616,"train_tokenizer.py",5519,0,"",python,selection_mouse +519,1110617,"train_tokenizer.py",5517,5,"recon",python,selection_mouse +520,1134538,"train_tokenizer.py",13288,0,"",python,selection_mouse +521,1137916,"train_tokenizer.py",12491,0,"",python,selection_mouse +522,1138071,"train_tokenizer.py",12485,9,"val_recon",python,selection_mouse +523,1141454,"train_tokenizer.py",13300,0,"",python,selection_mouse +524,1142869,"train_tokenizer.py",13360,0,"",python,selection_mouse +525,1143047,"train_tokenizer.py",13357,6,"videos",python,selection_mouse +526,1143825,"train_tokenizer.py",13676,0,"",python,selection_mouse +527,1143836,"train_tokenizer.py",13675,0,"",python,selection_command +528,1148276,"train_tokenizer.py",13676,0,"\n ",python,content +529,1148687,"train_tokenizer.py",13697,0,"g",python,content +530,1148688,"train_tokenizer.py",13698,0,"",python,selection_keyboard +531,1148830,"train_tokenizer.py",13698,0,"e",python,content +532,1148832,"train_tokenizer.py",13699,0,"",python,selection_keyboard +533,1149293,"train_tokenizer.py",13698,1,"",python,content +534,1149480,"train_tokenizer.py",13698,0,"t",python,content +535,1149482,"train_tokenizer.py",13699,0,"",python,selection_keyboard +536,1149874,"train_tokenizer.py",13699,0,"s",python,content +537,1149876,"train_tokenizer.py",13700,0,"",python,selection_keyboard +538,1150413,"train_tokenizer.py",13699,1,"",python,content +539,1150662,"train_tokenizer.py",13699,0,"_",python,content +540,1150663,"train_tokenizer.py",13700,0,"",python,selection_keyboard +541,1150813,"train_tokenizer.py",13700,0,"s",python,content +542,1150814,"train_tokenizer.py",13701,0,"",python,selection_keyboard +543,1150993,"train_tokenizer.py",13701,0,"e",python,content +544,1150995,"train_tokenizer.py",13702,0,"",python,selection_keyboard +545,1151163,"train_tokenizer.py",13702,0,"q",python,content +546,1151165,"train_tokenizer.py",13703,0,"",python,selection_keyboard +547,1151479,"train_tokenizer.py",13703,0,"_",python,content +548,1151481,"train_tokenizer.py",13704,0,"",python,selection_keyboard +549,1151663,"train_tokenizer.py",13704,0,"v",python,content +550,1151665,"train_tokenizer.py",13705,0,"",python,selection_keyboard +551,1151846,"train_tokenizer.py",13705,0,"a",python,content +552,1151848,"train_tokenizer.py",13706,0,"",python,selection_keyboard +553,1151897,"train_tokenizer.py",13706,0,"l",python,content +554,1151898,"train_tokenizer.py",13707,0,"",python,selection_keyboard +555,1152420,"train_tokenizer.py",13707,0," ",python,content +556,1152421,"train_tokenizer.py",13708,0,"",python,selection_keyboard +557,1152649,"train_tokenizer.py",13708,0,"=",python,content +558,1152650,"train_tokenizer.py",13709,0,"",python,selection_keyboard +559,1153046,"train_tokenizer.py",13709,0," ",python,content +560,1153048,"train_tokenizer.py",13710,0,"",python,selection_keyboard +561,1165435,"train_tokenizer.py",13710,0,"N",python,content +562,1165437,"train_tokenizer.py",13711,0,"",python,selection_keyboard +563,1165580,"train_tokenizer.py",13711,0,"o",python,content +564,1165581,"train_tokenizer.py",13712,0,"",python,selection_keyboard +565,1165706,"train_tokenizer.py",13712,0,"n",python,content +566,1165707,"train_tokenizer.py",13713,0,"",python,selection_keyboard +567,1165796,"train_tokenizer.py",13713,0,"e",python,content +568,1165798,"train_tokenizer.py",13714,0,"",python,selection_keyboard +569,1168570,"train_tokenizer.py",13357,0,"",python,selection_mouse +570,1168726,"train_tokenizer.py",13357,6,"videos",python,selection_mouse +571,1169294,"train_tokenizer.py",13352,0,"",python,selection_mouse +572,1169429,"train_tokenizer.py",13349,6,"inputs",python,selection_mouse +573,1185788,"train_tokenizer.py",5523,0,"",python,selection_mouse +574,1186712,"train_tokenizer.py",5516,0,"",python,selection_mouse +575,1187729,"train_tokenizer.py",5517,0,"",python,selection_mouse +576,1188830,"train_tokenizer.py",5517,0,"v",python,content +577,1188830,"train_tokenizer.py",5518,0,"",python,selection_keyboard +578,1189013,"train_tokenizer.py",5518,0,"a",python,content +579,1189014,"train_tokenizer.py",5519,0,"",python,selection_keyboard +580,1189113,"train_tokenizer.py",5519,0,"l",python,content +581,1189114,"train_tokenizer.py",5520,0,"",python,selection_keyboard +582,1189491,"train_tokenizer.py",5520,0,"_",python,content +583,1189493,"train_tokenizer.py",5521,0,"",python,selection_keyboard +584,1189780,"train_tokenizer.py",5521,0,"m",python,content +585,1189782,"train_tokenizer.py",5522,0,"",python,selection_keyboard +586,1189911,"train_tokenizer.py",5522,0,"e",python,content +587,1189914,"train_tokenizer.py",5523,0,"",python,selection_keyboard +588,1190119,"train_tokenizer.py",5523,0,"t",python,content +589,1190120,"train_tokenizer.py",5524,0,"",python,selection_keyboard +590,1190386,"train_tokenizer.py",5524,0,"i",python,content +591,1190388,"train_tokenizer.py",5525,0,"",python,selection_keyboard +592,1190732,"train_tokenizer.py",5524,1,"",python,content +593,1190952,"train_tokenizer.py",5524,0,"r",python,content +594,1190953,"train_tokenizer.py",5525,0,"",python,selection_keyboard +595,1191013,"train_tokenizer.py",5525,0,"i",python,content +596,1191014,"train_tokenizer.py",5526,0,"",python,selection_keyboard +597,1191129,"train_tokenizer.py",5526,0,"c",python,content +598,1191130,"train_tokenizer.py",5527,0,"",python,selection_keyboard +599,1191279,"train_tokenizer.py",5527,0,"s",python,content +600,1191280,"train_tokenizer.py",5528,0,"",python,selection_keyboard +601,1192427,"train_tokenizer.py",5528,0,",",python,content +602,1192428,"train_tokenizer.py",5529,0,"",python,selection_keyboard +603,1192842,"train_tokenizer.py",5529,0," ",python,content +604,1192843,"train_tokenizer.py",5530,0,"",python,selection_keyboard +605,1194008,"train_tokenizer.py",5530,5,"",python,content +606,1194520,"train_tokenizer.py",5530,1,"",python,content +607,1194675,"train_tokenizer.py",5530,12,"",python,content +608,1198043,"train_tokenizer.py",5530,0,"i",python,content +609,1198044,"train_tokenizer.py",5531,0,"",python,selection_keyboard +610,1198191,"train_tokenizer.py",5531,0,"n",python,content +611,1198192,"train_tokenizer.py",5532,0,"",python,selection_keyboard +612,1198413,"train_tokenizer.py",5532,0,"p",python,content +613,1198415,"train_tokenizer.py",5533,0,"",python,selection_keyboard +614,1198641,"train_tokenizer.py",5533,0,"u",python,content +615,1198642,"train_tokenizer.py",5534,0,"",python,selection_keyboard +616,1198771,"train_tokenizer.py",5534,0,"t",python,content +617,1198773,"train_tokenizer.py",5535,0,"",python,selection_keyboard +618,1199002,"train_tokenizer.py",5535,0,"s",python,content +619,1199003,"train_tokenizer.py",5536,0,"",python,selection_keyboard +620,1199228,"train_tokenizer.py",5536,0,",",python,content +621,1199229,"train_tokenizer.py",5537,0,"",python,selection_keyboard +622,1199314,"train_tokenizer.py",5537,0," ",python,content +623,1199315,"train_tokenizer.py",5538,0,"",python,selection_keyboard +624,1200480,"train_tokenizer.py",5538,0,"r",python,content +625,1200481,"train_tokenizer.py",5539,0,"",python,selection_keyboard +626,1200646,"train_tokenizer.py",5539,0,"e",python,content +627,1200647,"train_tokenizer.py",5540,0,"",python,selection_keyboard +628,1200846,"train_tokenizer.py",5540,0,"c",python,content +629,1200847,"train_tokenizer.py",5541,0,"",python,selection_keyboard +630,1200907,"train_tokenizer.py",5541,0,"o",python,content +631,1200909,"train_tokenizer.py",5542,0,"",python,selection_keyboard +632,1201046,"train_tokenizer.py",5542,0,"n",python,content +633,1201047,"train_tokenizer.py",5543,0,"",python,selection_keyboard +634,1204586,"train_tokenizer.py",5641,0,"",python,selection_mouse +635,1209964,"train_tokenizer.py",12499,0,"",python,selection_mouse +636,1210077,"train_tokenizer.py",12493,9,"val_recon",python,selection_mouse +637,1210572,"train_tokenizer.py",12493,9,"",python,content +638,1210760,"train_tokenizer.py",12492,1,"",python,content +639,1211162,"train_tokenizer.py",12491,1,"",python,content +640,1212047,"train_tokenizer.py",12504,0,"",python,selection_mouse +641,1212897,"train_tokenizer.py",12504,0,",",python,content +642,1212897,"train_tokenizer.py",12505,0,"",python,selection_keyboard +643,1212905,"train_tokenizer.py",12505,0," ",python,content +644,1212906,"train_tokenizer.py",12506,0,"",python,selection_keyboard +645,1214846,"train_tokenizer.py",12506,0,"v",python,content +646,1214847,"train_tokenizer.py",12507,0,"",python,selection_keyboard +647,1214943,"train_tokenizer.py",12507,0,"l",python,content +648,1214944,"train_tokenizer.py",12508,0,"",python,selection_keyboard +649,1215245,"train_tokenizer.py",12508,0,"_",python,content +650,1215246,"train_tokenizer.py",12509,0,"",python,selection_keyboard +651,1215745,"train_tokenizer.py",12508,1,"",python,content +652,1215905,"train_tokenizer.py",12507,1,"",python,content +653,1216070,"train_tokenizer.py",12507,0,"a",python,content +654,1216071,"train_tokenizer.py",12508,0,"",python,selection_keyboard +655,1216169,"train_tokenizer.py",12508,0,"ö",python,content +656,1216169,"train_tokenizer.py",12509,0,"",python,selection_keyboard +657,1217154,"train_tokenizer.py",12508,1,"",python,content +658,1218421,"train_tokenizer.py",12508,0,"l",python,content +659,1218422,"train_tokenizer.py",12509,0,"",python,selection_keyboard +660,1218705,"train_tokenizer.py",12509,0,"_",python,content +661,1218706,"train_tokenizer.py",12510,0,"",python,selection_keyboard +662,1218979,"train_tokenizer.py",12510,0,"g",python,content +663,1218981,"train_tokenizer.py",12511,0,"",python,selection_keyboard +664,1219130,"train_tokenizer.py",12511,0,"t",python,content +665,1219132,"train_tokenizer.py",12512,0,"",python,selection_keyboard +666,1219663,"train_tokenizer.py",12512,0,"_",python,content +667,1219665,"train_tokenizer.py",12513,0,"",python,selection_keyboard +668,1224117,"train_tokenizer.py",12513,0,"b",python,content +669,1224118,"train_tokenizer.py",12514,0,"",python,selection_keyboard +670,1224272,"train_tokenizer.py",12514,0,"a",python,content +671,1224274,"train_tokenizer.py",12515,0,"",python,selection_keyboard +672,1224836,"train_tokenizer.py",12515,0,"t",python,content +673,1224837,"train_tokenizer.py",12516,0,"",python,selection_keyboard +674,1225414,"train_tokenizer.py",12516,0,"c",python,content +675,1225416,"train_tokenizer.py",12517,0,"",python,selection_keyboard +676,1225693,"train_tokenizer.py",12517,0,"h",python,content +677,1225695,"train_tokenizer.py",12518,0,"",python,selection_keyboard +678,1228313,"train_tokenizer.py",12518,0,",",python,content +679,1228314,"train_tokenizer.py",12519,0,"",python,selection_keyboard +680,1228398,"train_tokenizer.py",12519,0," ",python,content +681,1228399,"train_tokenizer.py",12520,0,"",python,selection_keyboard +682,1229231,"train_tokenizer.py",12520,0,"v",python,content +683,1229232,"train_tokenizer.py",12521,0,"",python,selection_keyboard +684,1229397,"train_tokenizer.py",12521,0,"a",python,content +685,1229399,"train_tokenizer.py",12522,0,"",python,selection_keyboard +686,1229492,"train_tokenizer.py",12522,0,"l",python,content +687,1229494,"train_tokenizer.py",12523,0,"",python,selection_keyboard +688,1229975,"train_tokenizer.py",12523,0,"_",python,content +689,1229976,"train_tokenizer.py",12524,0,"",python,selection_keyboard +690,1230139,"train_tokenizer.py",12524,0,"r",python,content +691,1230140,"train_tokenizer.py",12525,0,"",python,selection_keyboard +692,1230297,"train_tokenizer.py",12525,0,"e",python,content +693,1230298,"train_tokenizer.py",12526,0,"",python,selection_keyboard +694,1230438,"train_tokenizer.py",12526,0,"c",python,content +695,1230439,"train_tokenizer.py",12527,0,"",python,selection_keyboard +696,1230496,"train_tokenizer.py",12527,0,"o",python,content +697,1230497,"train_tokenizer.py",12528,0,"",python,selection_keyboard +698,1230862,"train_tokenizer.py",12528,0,"n",python,content +699,1230863,"train_tokenizer.py",12529,0,"",python,selection_keyboard +700,1232996,"train_tokenizer.py",12644,0,"",python,selection_mouse +701,1251914,"train_tokenizer.py",13698,0,"",python,selection_mouse +702,1252552,"train_tokenizer.py",13735,0,"",python,selection_mouse +703,1252666,"train_tokenizer.py",13732,4,"None",python,selection_mouse +704,1253180,"train_tokenizer.py",13732,4,"",python,content +705,1254580,"train_tokenizer.py",13732,0,"v",python,content +706,1254581,"train_tokenizer.py",13733,0,"",python,selection_keyboard +707,1254834,"train_tokenizer.py",13733,0,"a",python,content +708,1254836,"train_tokenizer.py",13734,0,"",python,selection_keyboard +709,1254853,"train_tokenizer.py",13734,0,"l",python,content +710,1254854,"train_tokenizer.py",13735,0,"",python,selection_keyboard +711,1255512,"train_tokenizer.py",13735,0,"_",python,content +712,1255513,"train_tokenizer.py",13736,0,"",python,selection_keyboard +713,1256240,"train_tokenizer.py",13736,0,"g",python,content +714,1256241,"train_tokenizer.py",13737,0,"",python,selection_keyboard +715,1256371,"train_tokenizer.py",13737,0,"t",python,content +716,1256372,"train_tokenizer.py",13738,0,"",python,selection_keyboard +717,1256574,"train_tokenizer.py",13738,0,"_",python,content +718,1256575,"train_tokenizer.py",13739,0,"",python,selection_keyboard +719,1256969,"train_tokenizer.py",13739,0,"b",python,content +720,1256969,"train_tokenizer.py",13740,0,"",python,selection_keyboard +721,1257115,"train_tokenizer.py",13740,0,"a",python,content +722,1257117,"train_tokenizer.py",13741,0,"",python,selection_keyboard +723,1257333,"train_tokenizer.py",13741,0,"t",python,content +724,1257335,"train_tokenizer.py",13742,0,"",python,selection_keyboard +725,1257555,"train_tokenizer.py",13742,0,"c",python,content +726,1257556,"train_tokenizer.py",13743,0,"",python,selection_keyboard +727,1258171,"train_tokenizer.py",13732,11,"val_gt_batch",python,content +728,1265570,"train_tokenizer.py",13744,0,"[]",python,content +729,1265571,"train_tokenizer.py",13745,0,"",python,selection_keyboard +730,1265876,"train_tokenizer.py",13745,0,"""""",python,content +731,1265878,"train_tokenizer.py",13746,0,"",python,selection_keyboard +732,1266152,"train_tokenizer.py",13746,0,"v",python,content +733,1266154,"train_tokenizer.py",13747,0,"",python,selection_keyboard +734,1266350,"train_tokenizer.py",13747,0,"i",python,content +735,1266351,"train_tokenizer.py",13748,0,"",python,selection_keyboard +736,1266450,"train_tokenizer.py",13748,0,"d",python,content +737,1266451,"train_tokenizer.py",13749,0,"",python,selection_keyboard +738,1266550,"train_tokenizer.py",13749,0,"e",python,content +739,1266552,"train_tokenizer.py",13750,0,"",python,selection_keyboard +740,1266673,"train_tokenizer.py",13750,0,"o",python,content +741,1266674,"train_tokenizer.py",13751,0,"",python,selection_keyboard +742,1266813,"train_tokenizer.py",13751,0,"s",python,content +743,1266815,"train_tokenizer.py",13752,0,"",python,selection_keyboard +744,1267936,"train_tokenizer.py",13753,0,"",python,selection_command +745,1268129,"train_tokenizer.py",13754,0,"",python,selection_command +746,1271404,"train_tokenizer.py",13386,0,"",python,selection_mouse +747,1271754,"train_tokenizer.py",13386,1,"]",python,selection_mouse +748,1272229,"train_tokenizer.py",13387,0,"",python,selection_mouse +749,1272995,"train_tokenizer.py",13387,1,"[",python,selection_mouse +750,1272996,"train_tokenizer.py",13387,3,"[0]",python,selection_mouse +751,1272996,"train_tokenizer.py",13387,5,"[0].a",python,selection_mouse +752,1272996,"train_tokenizer.py",13387,83,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)",python,selection_mouse +753,1273046,"train_tokenizer.py",13387,13,"[0].astype(jn",python,selection_mouse +754,1273047,"train_tokenizer.py",13387,14,"[0].astype(jnp",python,selection_mouse +755,1273079,"train_tokenizer.py",13387,15,"[0].astype(jnp.",python,selection_mouse +756,1273080,"train_tokenizer.py",13387,16,"[0].astype(jnp.f",python,selection_mouse +757,1273103,"train_tokenizer.py",13387,18,"[0].astype(jnp.flo",python,selection_mouse +758,1273145,"train_tokenizer.py",13387,20,"[0].astype(jnp.float",python,selection_mouse +759,1273163,"train_tokenizer.py",13387,21,"[0].astype(jnp.float3",python,selection_mouse +760,1273195,"train_tokenizer.py",13387,22,"[0].astype(jnp.float32",python,selection_mouse +761,1273206,"train_tokenizer.py",13387,23,"[0].astype(jnp.float32)",python,selection_mouse +762,1273218,"train_tokenizer.py",13387,24,"[0].astype(jnp.float32) ",python,selection_mouse +763,1273251,"train_tokenizer.py",13387,25,"[0].astype(jnp.float32) /",python,selection_mouse +764,1273264,"train_tokenizer.py",13387,26,"[0].astype(jnp.float32) / ",python,selection_mouse +765,1273350,"train_tokenizer.py",13387,27,"[0].astype(jnp.float32) / 2",python,selection_mouse +766,1273351,"train_tokenizer.py",13387,28,"[0].astype(jnp.float32) / 25",python,selection_mouse +767,1273405,"train_tokenizer.py",13387,29,"[0].astype(jnp.float32) / 255",python,selection_mouse +768,1273446,"train_tokenizer.py",13387,30,"[0].astype(jnp.float32) / 255.",python,selection_mouse +769,1273447,"train_tokenizer.py",13387,31,"[0].astype(jnp.float32) / 255.0",python,selection_mouse +770,1275020,"train_tokenizer.py",13408,0,"",python,selection_mouse +771,1275229,"train_tokenizer.py",13407,0,"",python,selection_command +772,1276509,"train_tokenizer.py",13387,0,"",python,selection_mouse +773,1276832,"train_tokenizer.py",13387,78,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(",python,selection_mouse +774,1276886,"train_tokenizer.py",13387,79,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0",python,selection_mouse +775,1276886,"train_tokenizer.py",13387,80,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0,",python,selection_mouse +776,1276912,"train_tokenizer.py",13387,133,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concaten",python,selection_mouse +777,1276960,"train_tokenizer.py",13387,134,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatena",python,selection_mouse +778,1276961,"train_tokenizer.py",13387,135,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenat",python,selection_mouse +779,1277003,"train_tokenizer.py",13387,136,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate",python,selection_mouse +780,1277074,"train_tokenizer.py",13387,137,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate(",python,selection_mouse +781,1277116,"train_tokenizer.py",13387,219,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange",python,selection_mouse +782,1277118,"train_tokenizer.py",13387,220,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(",python,selection_mouse +783,1277237,"train_tokenizer.py",13387,278,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c ->",python,selection_mouse +784,1277273,"train_tokenizer.py",13387,279,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> ",python,selection_mouse +785,1277321,"train_tokenizer.py",13387,280,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h",python,selection_mouse +786,1277321,"train_tokenizer.py",13387,281,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h ",python,selection_mouse +787,1277372,"train_tokenizer.py",13387,282,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (",python,selection_mouse +788,1277411,"train_tokenizer.py",13387,283,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t",python,selection_mouse +789,1277696,"train_tokenizer.py",13387,311,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )",python,selection_mouse +790,1281393,"train_tokenizer.py",13754,0,"",python,selection_mouse +791,1281399,"train_tokenizer.py",13753,0,"",python,selection_command +792,1282888,"train_tokenizer.py",13754,0,"",python,selection_command +793,1283638,"train_tokenizer.py",13754,0,"[0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )",python,content +794,1286663,"train_tokenizer.py",13756,0,"",python,selection_mouse +795,1308581,"train_tokenizer.py",13757,0,"",python,selection_command +796,1308802,"train_tokenizer.py",13756,1,"",python,content +797,1308975,"train_tokenizer.py",13755,1,"",python,content +798,1309249,"train_tokenizer.py",13754,1,"",python,content +799,1309646,"train_tokenizer.py",13834,0,"",python,selection_command +800,1309999,"train_tokenizer.py",13833,0,"",python,selection_command +801,1310178,"train_tokenizer.py",13832,0,"",python,selection_command +802,1310370,"train_tokenizer.py",13831,0,"",python,selection_command +803,1310883,"train_tokenizer.py",13830,0,"",python,selection_command +804,1310925,"train_tokenizer.py",13829,0,"",python,selection_command +805,1310949,"train_tokenizer.py",13828,0,"",python,selection_command +806,1310973,"train_tokenizer.py",13827,0,"",python,selection_command +807,1311016,"train_tokenizer.py",13826,0,"",python,selection_command +808,1311145,"train_tokenizer.py",13825,0,"",python,selection_command +809,1311314,"train_tokenizer.py",13824,0,"",python,selection_command +810,1311490,"train_tokenizer.py",13823,0,"",python,selection_command +811,1312000,"train_tokenizer.py",13822,1,"",python,content +812,1312162,"train_tokenizer.py",13821,1,"",python,content +813,1312430,"train_tokenizer.py",13820,1,"",python,content +814,1339092,"train_tokenizer.py",13754,0,"",python,selection_mouse +815,1340212,"train_tokenizer.py",13754,0,"[]",python,content +816,1340214,"train_tokenizer.py",13755,0,"",python,selection_keyboard +817,1340410,"train_tokenizer.py",13755,0,"0",python,content +818,1340410,"train_tokenizer.py",13756,0,"",python,selection_keyboard +819,1342841,"train_tokenizer.py",13823,0,"",python,selection_mouse +820,1343856,"train_tokenizer.py",13823,0,"[]",python,content +821,1343857,"train_tokenizer.py",13824,0,"",python,selection_keyboard +822,1344038,"train_tokenizer.py",13824,0,"0",python,content +823,1344039,"train_tokenizer.py",13825,0,"",python,selection_keyboard +824,1344788,"train_tokenizer.py",13824,0,"",python,selection_command +825,1345444,"train_tokenizer.py",14021,0,"",python,selection_mouse +826,1348232,"train_tokenizer.py",13876,0,"",python,selection_mouse +827,1349989,"train_tokenizer.py",13815,0,"",python,selection_mouse +828,1351814,"train_tokenizer.py",13815,0,"_",python,content +829,1351815,"train_tokenizer.py",13816,0,"",python,selection_keyboard +830,1351995,"train_tokenizer.py",13816,0,"v",python,content +831,1351997,"train_tokenizer.py",13817,0,"",python,selection_keyboard +832,1352164,"train_tokenizer.py",13817,0,"a",python,content +833,1352166,"train_tokenizer.py",13818,0,"",python,selection_keyboard +834,1352540,"train_tokenizer.py",13818,0,"l",python,content +835,1352541,"train_tokenizer.py",13819,0,"",python,selection_keyboard +836,1354254,"train_tokenizer.py",13828,0,"",python,selection_command +837,1354472,"train_tokenizer.py",13827,0,"",python,selection_command +838,1354883,"train_tokenizer.py",13827,0,"_",python,content +839,1354885,"train_tokenizer.py",13828,0,"",python,selection_keyboard +840,1355004,"train_tokenizer.py",13828,0,"v",python,content +841,1355005,"train_tokenizer.py",13829,0,"",python,selection_keyboard +842,1355163,"train_tokenizer.py",13829,0,"a",python,content +843,1355164,"train_tokenizer.py",13830,0,"",python,selection_keyboard +844,1355256,"train_tokenizer.py",13830,0,"l",python,content +845,1355258,"train_tokenizer.py",13831,0,"",python,selection_keyboard +846,1357229,"train_tokenizer.py",13830,1,"",python,content +847,1357795,"train_tokenizer.py",13829,1,"",python,content +848,1357970,"train_tokenizer.py",13828,1,"",python,content +849,1358125,"train_tokenizer.py",13827,1,"",python,content +850,1361221,"train_tokenizer.py",13822,0,"v",python,content +851,1361221,"train_tokenizer.py",13823,0,"",python,selection_keyboard +852,1361362,"train_tokenizer.py",13823,0,"a",python,content +853,1361363,"train_tokenizer.py",13824,0,"",python,selection_keyboard +854,1361495,"train_tokenizer.py",13824,0,"l",python,content +855,1361496,"train_tokenizer.py",13825,0,"",python,selection_keyboard +856,1361825,"train_tokenizer.py",13825,0,"_",python,content +857,1361826,"train_tokenizer.py",13826,0,"",python,selection_keyboard +858,1363570,"train_tokenizer.py",13880,0,"",python,selection_mouse +859,1364862,"train_tokenizer.py",13866,0,"",python,selection_mouse +860,1365013,"train_tokenizer.py",13866,0,"v",python,content +861,1365014,"train_tokenizer.py",13867,0,"",python,selection_keyboard +862,1365218,"train_tokenizer.py",13867,0,"a",python,content +863,1365219,"train_tokenizer.py",13868,0,"",python,selection_keyboard +864,1365274,"train_tokenizer.py",13868,0,"l",python,content +865,1365275,"train_tokenizer.py",13869,0,"",python,selection_keyboard +866,1365562,"train_tokenizer.py",13869,0,"_",python,content +867,1365564,"train_tokenizer.py",13870,0,"",python,selection_keyboard +868,1365908,"train_tokenizer.py",13956,0,"",python,selection_command +869,1367030,"train_tokenizer.py",13952,0,"v",python,content +870,1367031,"train_tokenizer.py",13953,0,"",python,selection_keyboard +871,1367149,"train_tokenizer.py",13953,0,"a",python,content +872,1367150,"train_tokenizer.py",13954,0,"",python,selection_keyboard +873,1367240,"train_tokenizer.py",13954,0,"l",python,content +874,1367241,"train_tokenizer.py",13955,0,"",python,selection_keyboard +875,1367489,"train_tokenizer.py",13955,0,"_",python,content +876,1367490,"train_tokenizer.py",13956,0,"",python,selection_keyboard +877,1367908,"train_tokenizer.py",14015,0,"",python,selection_command +878,1368730,"train_tokenizer.py",14015,0,"v",python,content +879,1368731,"train_tokenizer.py",14016,0,"",python,selection_keyboard +880,1368858,"train_tokenizer.py",14016,0,"a",python,content +881,1368860,"train_tokenizer.py",14017,0,"",python,selection_keyboard +882,1368954,"train_tokenizer.py",14017,0,"l",python,content +883,1368955,"train_tokenizer.py",14018,0,"",python,selection_keyboard +884,1369144,"train_tokenizer.py",14018,0,"_",python,content +885,1369145,"train_tokenizer.py",14019,0,"",python,selection_keyboard +886,1371747,"train_tokenizer.py",14708,0,"",python,selection_mouse +887,1372379,"train_tokenizer.py",14717,0,"",python,selection_mouse +888,1373597,"train_tokenizer.py",14717,0,"\n ",python,content +889,1375553,"train_tokenizer.py",14746,0,"v",python,content +890,1375554,"train_tokenizer.py",14747,0,"",python,selection_keyboard +891,1375763,"train_tokenizer.py",14747,0,"a",python,content +892,1375764,"train_tokenizer.py",14748,0,"",python,selection_keyboard +893,1375867,"train_tokenizer.py",14748,0,"l",python,content +894,1375869,"train_tokenizer.py",14749,0,"",python,selection_keyboard +895,1376685,"train_tokenizer.py",14749,0,"_",python,content +896,1376686,"train_tokenizer.py",14750,0,"",python,selection_keyboard +897,1377052,"train_tokenizer.py",14750,0,"i",python,content +898,1377053,"train_tokenizer.py",14751,0,"",python,selection_keyboard +899,1377170,"train_tokenizer.py",14751,0,"m",python,content +900,1377170,"train_tokenizer.py",14752,0,"",python,selection_keyboard +901,1377303,"train_tokenizer.py",14752,0,"a",python,content +902,1377305,"train_tokenizer.py",14753,0,"",python,selection_keyboard +903,1377436,"train_tokenizer.py",14753,0,"g",python,content +904,1377437,"train_tokenizer.py",14754,0,"",python,selection_keyboard +905,1377596,"train_tokenizer.py",14754,0,"e",python,content +906,1377597,"train_tokenizer.py",14755,0,"",python,selection_keyboard +907,1378863,"train_tokenizer.py",14754,1,"",python,content +908,1379019,"train_tokenizer.py",14753,1,"",python,content +909,1379265,"train_tokenizer.py",14746,7,"",python,content +910,1379420,"train_tokenizer.py",14718,28,"",python,content +911,1380132,"train_tokenizer.py",14717,1,"",python,content +912,1380416,"train_tokenizer.py",14716,0,"",python,selection_command +913,1381165,"train_tokenizer.py",14687,30," ),",python,selection_command +914,1381373,"train_tokenizer.py",14611,106," np.asarray(comparison_seq.astype(np.uint8))\n ),",python,selection_command +915,1381516,"train_tokenizer.py",14556,161," true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),",python,selection_command +916,1381646,"train_tokenizer.py",14483,234," recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),",python,selection_command +917,1381796,"train_tokenizer.py",14413,304," image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),",python,selection_command +918,1382125,"train_tokenizer.py",14413,0,"",python,selection_command +919,1382292,"train_tokenizer.py",14483,0,"",python,selection_command +920,1382431,"train_tokenizer.py",14556,0,"",python,selection_command +921,1382562,"train_tokenizer.py",14611,0,"",python,selection_command +922,1382722,"train_tokenizer.py",14687,0,"",python,selection_command +923,1383084,"train_tokenizer.py",14717,0,"\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),",python,content +924,1383112,"train_tokenizer.py",14746,0,"",python,selection_command +925,1385411,"train_tokenizer.py",14746,1,"i",python,selection_command +926,1385647,"train_tokenizer.py",14746,1,"i",python,selection_command +927,1385777,"train_tokenizer.py",14746,1,"i",python,selection_command +928,1386695,"train_tokenizer.py",14746,0,"",python,selection_command +929,1388197,"train_tokenizer.py",14889,0,"v",python,content +930,1388197,"train_tokenizer.py",14816,0,"v",python,content +931,1388197,"train_tokenizer.py",14746,0,"v",python,content +932,1388198,"train_tokenizer.py",14747,0,"",python,selection_keyboard +933,1388398,"train_tokenizer.py",14892,0,"a",python,content +934,1388399,"train_tokenizer.py",14818,0,"a",python,content +935,1388399,"train_tokenizer.py",14747,0,"a",python,content +936,1388400,"train_tokenizer.py",14748,0,"",python,selection_keyboard +937,1388812,"train_tokenizer.py",14895,0,"l",python,content +938,1388812,"train_tokenizer.py",14820,0,"l",python,content +939,1388812,"train_tokenizer.py",14748,0,"l",python,content +940,1388813,"train_tokenizer.py",14749,0,"",python,selection_keyboard +941,1389029,"train_tokenizer.py",14898,0,"_",python,content +942,1389029,"train_tokenizer.py",14822,0,"_",python,content +943,1389030,"train_tokenizer.py",14749,0,"_",python,content +944,1389031,"train_tokenizer.py",14750,0,"",python,selection_keyboard +945,1389698,"train_tokenizer.py",14749,0,"",python,selection_command +946,1391635,"train_tokenizer.py",14779,0,"",python,selection_mouse +947,1393014,"train_tokenizer.py",14779,0,"v",python,content +948,1393014,"train_tokenizer.py",14780,0,"",python,selection_keyboard +949,1393178,"train_tokenizer.py",14780,0,"a",python,content +950,1393179,"train_tokenizer.py",14781,0,"",python,selection_keyboard +951,1395367,"train_tokenizer.py",14780,1,"",python,content +952,1395528,"train_tokenizer.py",14779,1,"",python,content +953,1396763,"train_tokenizer.py",14785,0,"_",python,content +954,1396764,"train_tokenizer.py",14786,0,"",python,selection_keyboard +955,1396824,"train_tokenizer.py",14786,0,"v",python,content +956,1396825,"train_tokenizer.py",14787,0,"",python,selection_keyboard +957,1396952,"train_tokenizer.py",14787,0,"a",python,content +958,1396953,"train_tokenizer.py",14788,0,"",python,selection_keyboard +959,1397045,"train_tokenizer.py",14788,0,"l",python,content +960,1397046,"train_tokenizer.py",14789,0,"",python,selection_keyboard +961,1397259,"train_tokenizer.py",14867,0,"",python,selection_command +962,1397789,"train_tokenizer.py",14866,0,"",python,selection_command +963,1398279,"train_tokenizer.py",14866,0,"_",python,content +964,1398281,"train_tokenizer.py",14867,0,"",python,selection_keyboard +965,1398446,"train_tokenizer.py",14867,0,"v",python,content +966,1398447,"train_tokenizer.py",14868,0,"",python,selection_keyboard +967,1398579,"train_tokenizer.py",14868,0,"a",python,content +968,1398580,"train_tokenizer.py",14869,0,"",python,selection_keyboard +969,1398603,"train_tokenizer.py",14869,0,"l",python,content +970,1398604,"train_tokenizer.py",14870,0,"",python,selection_keyboard +971,1401676,"train_tokenizer.py",14935,0,"",python,selection_keyboard +972,1402204,"train_tokenizer.py",14994,0,"",python,selection_command +973,1402647,"train_tokenizer.py",14993,0,"",python,selection_command +974,1403548,"train_tokenizer.py",14979,0,"v",python,content +975,1403549,"train_tokenizer.py",14980,0,"",python,selection_keyboard +976,1403683,"train_tokenizer.py",14980,0,"a",python,content +977,1403684,"train_tokenizer.py",14981,0,"",python,selection_keyboard +978,1403779,"train_tokenizer.py",14981,0,"l",python,content +979,1403780,"train_tokenizer.py",14982,0,"",python,selection_keyboard +980,1404019,"train_tokenizer.py",14982,0,"_",python,content +981,1404020,"train_tokenizer.py",14983,0,"",python,selection_keyboard +982,1407561,"train_tokenizer.py",14760,0,"",python,selection_mouse +983,1409056,"train_tokenizer.py",14769,0,"",python,selection_mouse +984,1498714,"train_tokenizer.py",14858,0,"",python,selection_mouse +985,1499321,"train_tokenizer.py",14995,0,"",python,selection_mouse +986,1499911,"train_tokenizer.py",15072,0,"",python,selection_mouse +987,1500814,"train_tokenizer.py",15046,0,"",python,selection_mouse +988,1509329,"train_tokenizer.py",15118,0,"",python,selection_mouse +989,1510136,"train_tokenizer.py",15046,0,"",python,selection_mouse +990,1511730,"train_tokenizer.py",14412,0,"",python,selection_mouse +991,1515141,"train_tokenizer.py",13316,0,"",python,selection_mouse +992,1515774,"train_tokenizer.py",13315,0,"",python,selection_command +993,1516676,"train_tokenizer.py",13281,61,"",python,content +994,1516696,"train_tokenizer.py",13301,0,"",python,selection_command +995,1519249,"train_tokenizer.py",15057,0,"",python,selection_mouse +996,1519266,"train_tokenizer.py",15056,0,"",python,selection_command +997,1519362,"train_tokenizer.py",15057,0,"",python,selection_mouse +998,1519396,"train_tokenizer.py",15056,0,"",python,selection_command +999,1519524,"train_tokenizer.py",15056,1,")",python,selection_mouse +1000,1519532,"train_tokenizer.py",15057,0,"",python,selection_command +1001,1519698,"train_tokenizer.py",15056,1,")",python,selection_mouse +1002,1519698,"train_tokenizer.py",14653,404," ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1003,1519699,"train_tokenizer.py",14040,1017," # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1004,1519699,"train_tokenizer.py",13796,1261," val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1005,1519699,"train_tokenizer.py",13736,1321," recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1006,1519885,"train_tokenizer.py",13735,1322," recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1007,1519885,"train_tokenizer.py",13648,1409," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1008,1520028,"train_tokenizer.py",13647,1410," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1009,1520030,"train_tokenizer.py",13624,1433," )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1010,1520031,"train_tokenizer.py",13498,1559," comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1011,1520031,"train_tokenizer.py",13497,1560," comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1012,1520108,"train_tokenizer.py",13414,1643," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1013,1520146,"train_tokenizer.py",13413,1644," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1014,1520184,"train_tokenizer.py",13412,1645," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1015,1520221,"train_tokenizer.py",13360,1697," recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1016,1520270,"train_tokenizer.py",13359,1698," recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1017,1520624,"train_tokenizer.py",13282,1775," gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1018,1521146,"train_tokenizer.py",13226,1831," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1019,1525271,"train_tokenizer.py",13225,0,"",python,selection_mouse +1020,1527219,"train_tokenizer.py",15057,0,"",python,selection_mouse +1021,1527225,"train_tokenizer.py",15056,0,"",python,selection_command +1022,1527532,"train_tokenizer.py",15056,1,")",python,selection_mouse +1023,1527532,"train_tokenizer.py",14906,150," np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images",python,selection_mouse +1024,1527533,"train_tokenizer.py",14513,543," true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images",python,selection_mouse +1025,1527533,"train_tokenizer.py",14321,735," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images",python,selection_mouse +1026,1527606,"train_tokenizer.py",14192,864," # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images",python,selection_mouse +1027,1527606,"train_tokenizer.py",14033,1023," # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images",python,selection_mouse +1028,1527607,"train_tokenizer.py",15057,0,"",python,selection_command +1029,1527693,"train_tokenizer.py",14011,1046," )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1030,1527735,"train_tokenizer.py",13879,1178," val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1031,1527796,"train_tokenizer.py",13733,1324," recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1032,1527863,"train_tokenizer.py",13624,1433," )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1033,1527936,"train_tokenizer.py",13415,1642," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1034,1527966,"train_tokenizer.py",13413,1644," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1035,1528000,"train_tokenizer.py",13361,1696," recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1036,1528029,"train_tokenizer.py",13359,1698," recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1037,1528088,"train_tokenizer.py",13358,1699," recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1038,1528174,"train_tokenizer.py",13281,1776," gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1039,1528599,"train_tokenizer.py",13225,1832," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1040,1539382,"train_lam.py",0,0,"",python,tab +1041,1539383,"train_lam.py",13664,0,"",python,selection_mouse +1042,1539548,"train_lam.py",13664,56," if step % args.log_image_interval == 0:\n",python,selection_mouse +1043,1539566,"train_lam.py",13664,57," if step % args.log_image_interval == 0:\n ",python,selection_mouse +1044,1539624,"train_lam.py",13664,139," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n ",python,selection_mouse +1045,1539687,"train_lam.py",13664,192," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n ",python,selection_mouse +1046,1539762,"train_lam.py",13664,275," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n ",python,selection_mouse +1047,1539812,"train_lam.py",13664,331," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n ",python,selection_mouse +1048,1539856,"train_lam.py",13664,332," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n ",python,selection_mouse +1049,1539857,"train_lam.py",13664,401," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n ",python,selection_mouse +1050,1539900,"train_lam.py",13664,402," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n ",python,selection_mouse +1051,1539920,"train_lam.py",13664,425," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n ",python,selection_mouse +1052,1539964,"train_lam.py",13664,426," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n ",python,selection_mouse +1053,1539994,"train_lam.py",13664,506," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n ",python,selection_mouse +1054,1539995,"train_lam.py",13664,507," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n ",python,selection_mouse +1055,1540045,"train_lam.py",13664,508," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n ",python,selection_mouse +1056,1540058,"train_lam.py",13664,587," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n ",python,selection_mouse +1057,1540095,"train_lam.py",13664,667," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n ",python,selection_mouse +1058,1540117,"train_lam.py",13664,910," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n ",python,selection_mouse +1059,1540181,"train_lam.py",13664,1155," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing --",python,selection_mouse +1060,1540236,"train_lam.py",13664,1195," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step",python,selection_mouse +1061,1540271,"train_lam.py",13664,1198," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % ",python,selection_mouse +1062,1540469,"train_lam.py",13664,1156," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---",python,selection_mouse +1063,1540555,"train_lam.py",13664,1118," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_image",python,selection_mouse +1064,1540555,"train_lam.py",13664,1120," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +1065,1542082,"train_lam.py",13664,1120,"",python,content +1066,1542691,"train_lam.py",13664,0," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,content +1067,1546574,"train_lam.py",14115,0,"",python,selection_mouse +1068,1546715,"train_lam.py",14110,12,"val_gt_batch",python,selection_mouse +1069,1551057,"train_tokenizer.py",0,0,"",python,tab +1070,1551058,"train_tokenizer.py",12507,0,"",python,selection_mouse +1071,1551058,"train_tokenizer.py",12506,12,"val_gt_batch",python,selection_mouse +1072,1551279,"train_tokenizer.py",12467,110," val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n",python,selection_mouse +1073,1553880,"train_lam.py",0,0,"",python,tab +1074,1553881,"train_lam.py",12951,0,"",python,selection_mouse +1075,1555035,"train_lam.py",12950,0,"",python,selection_command +1076,1555201,"train_lam.py",13015,0,"\n ",python,content +1077,1555874,"train_lam.py",13028,4,"",python,content +1078,1556081,"train_lam.py",13024,4,"",python,content +1079,1556271,"train_lam.py",13020,4,"",python,content +1080,1556569,"train_lam.py",13016,4,"",python,content +1081,1556917,"train_lam.py",13016,0," val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n",python,content +1082,1557616,"train_lam.py",13016,0,"",python,selection_command +1083,1557997,"train_lam.py",13126,0,"",python,selection_command +1084,1558342,"train_lam.py",13126,1,"",python,content +1085,1558355,"train_lam.py",13142,0,"",python,selection_command +1086,1558365,"train_lam.py",13032,0,"",python,selection_command +1087,1558562,"train_lam.py",12936,0,"",python,selection_command +1088,1558913,"train_lam.py",12920,96,"",python,content +1089,1558955,"train_lam.py",12936,0,"",python,selection_command +1090,1560986,"train_tokenizer.py",0,0,"",python,tab +1091,1560987,"train_tokenizer.py",12551,0,"",python,selection_mouse +1092,1561205,"train_tokenizer.py",4765,0,"",python,selection_command +1093,1562440,"train_lam.py",0,0,"",python,tab +1094,1562441,"train_lam.py",13002,0,"",python,selection_mouse +1095,1562574,"train_lam.py",4944,0,"",python,selection_command +1096,1564146,"train_tokenizer.py",0,0,"",python,tab +1097,1564147,"train_tokenizer.py",5533,0,"",python,selection_mouse +1098,1566519,"train_lam.py",0,0,"",python,tab +1099,1566519,"train_lam.py",5739,0,"",python,selection_mouse +1100,1566846,"train_lam.py",5756,0,"\n return val_loss, val_metrics, inputs, recon",python,content +1101,1566865,"train_lam.py",5761,0,"",python,selection_command +1102,1567279,"train_lam.py",5721,0,"",python,selection_command +1103,1567592,"train_lam.py",5717,40,"",python,content +1104,1567629,"train_lam.py",5721,0,"",python,selection_command +1105,1582647,"TERMINAL",0,0,": sbatch slurm/jobs/mihir/horeka/cau^Cl_big_runs/train_dynamics_8_nodes_filter_dark_req.sbatch",,terminal_command +1106,1585679,"TERMINAL",0,0,"salloc_node",,terminal_command +1107,1585755,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3468594\r\n",,terminal_output +1108,1585901,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +1109,1612926,"TERMINAL",0,0,"salloc: Nodes hkn0403 are ready for job\r\n",,terminal_output +1110,1613938,"TERMINAL",0,0,"]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h[tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1111,1616861,"TERMINAL",0,0,"s",,terminal_output +1112,1617060,"TERMINAL",0,0,"o",,terminal_output +1113,1617113,"TERMINAL",0,0,"u",,terminal_output +1114,1617206,"TERMINAL",0,0,"r",,terminal_output +1115,1617345,"TERMINAL",0,0,"c",,terminal_output +1116,1617543,"TERMINAL",0,0,"e ",,terminal_output +1117,1617647,"TERMINAL",0,0,".",,terminal_output +1118,1617710,"TERMINAL",0,0,"v",,terminal_output +1119,1618055,"TERMINAL",0,0,"B",,terminal_output +1120,1618612,"TERMINAL",0,0,"",,terminal_output +1121,1619662,"TERMINAL",0,0,"e",,terminal_output +1122,1619859,"TERMINAL",0,0,"nv/",,terminal_output +1123,1620333,"TERMINAL",0,0,"",,terminal_output +1124,1620944,"TERMINAL",0,0,"b",,terminal_output +1125,1621090,"TERMINAL",0,0,"in/",,terminal_output +1126,1621314,"TERMINAL",0,0,"a",,terminal_output +1127,1621557,"TERMINAL",0,0,"ctivate",,terminal_output +1128,1622070,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1129,1625282,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=coinrun-tokenizer-dev-$slurm_job_id \\n --tags tokenizer coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1130,1634640,"TERMINAL",0,0,"s",,terminal_output +1131,1634710,"TERMINAL",0,0,"h",,terminal_output +1132,1634814,"TERMINAL",0,0," ",,terminal_output +1133,1635110,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",,terminal_output +1134,1636213,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:4\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_tokenizer.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-tokenizer-dev-$slurm_job_id \\r\n --tags tokenizer coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train &\r\n\r\nchild_pid=$!\r\n\r\nwait $child_pid\r\n\r\n",,terminal_output +1135,1636372,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3441902\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1757073155\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757076755\r\nSLURM_PMI2_SRUN_PORT=46095\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468594\r\nSLURM_PTY_PORT=45497\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=115\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=44731\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468594\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=44731\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\n",,terminal_output +1136,1636514,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +1137,1659579,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1138,1660570,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +1139,1660734,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_135349-3468594\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-tokenizer-dev-3468594\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/3468594\r\n",,terminal_output +1140,1665577,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['decoder', 'encoder', 'vq']\r\nParameter counts:\r\n{'decoder': 16858736, 'encoder': 16858752, 'vq': 32768, 'total': 33750256}\r\nStarting training from step 0...\r\n",,terminal_output +1141,1673834,"TERMINAL",0,0,"2025-09-05 13:54:03.524159: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 13:54:03.524595: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 13:54:03.524628: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 13:54:03.524756: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 13:54:03.526311: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1142,1678714,"train_lam.py",0,0,"",python,tab +1143,1679586,"train_lam.py",0,0,"",python,tab +1144,1681663,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1145,1690074,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=coinrun-tokenizer-dev-$slurm_job_id \\n --tags tokenizer coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1146,1702278,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=coinrun-tokenizer-dev-$slurm_job_id \\n --tags tokenizer coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1147,1704998,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,0,"",shellscript,selection_mouse +1148,1705289,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,2,"to",shellscript,selection_mouse +1149,1705290,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,3,"tok",shellscript,selection_mouse +1150,1705292,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,4,"toke",shellscript,selection_mouse +1151,1705293,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,5,"token",shellscript,selection_mouse +1152,1705293,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,6,"tokeni",shellscript,selection_mouse +1153,1705379,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,7,"tokeniz",shellscript,selection_mouse +1154,1705432,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,8,"tokenize",shellscript,selection_mouse +1155,1705497,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,9,"tokenizer",shellscript,selection_mouse +1156,1706296,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,9,"",shellscript,content +1157,1707435,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,0,"i",shellscript,content +1158,1707436,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1145,0,"",shellscript,selection_keyboard +1159,1707908,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,1,"",shellscript,content +1160,1708125,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1144,0,"l",shellscript,content +1161,1708126,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1145,0,"",shellscript,selection_keyboard +1162,1708196,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1145,0,"a",shellscript,content +1163,1708197,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1146,0,"",shellscript,selection_keyboard +1164,1708313,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1146,0,"m",shellscript,content +1165,1708314,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1147,0,"",shellscript,selection_keyboard +1166,1708703,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1146,0,"",shellscript,selection_command +1167,1711145,"TERMINAL",0,0,"Step 0, loss: 0.2621576189994812\r\nStep 1, loss: 0.19697336852550507\r\nStep 2, loss: 0.2387561947107315\r\nStep 3, loss: 0.2858649492263794\r\nStep 4, loss: 0.23162388801574707\r\nStep 5, loss: 0.23567424714565277\r\nStep 6, loss: 0.19950804114341736\r\nStep 7, loss: 0.20673690736293793\r\nStep 8, loss: 0.20122845470905304\r\nStep 9, loss: 0.19746582210063934\r\nCalculating validation metrics...\r\n",,terminal_output +1168,1711771,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",631,0,"",shellscript,selection_mouse +1169,1714970,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,0,"",shellscript,selection_command +1170,1715849,"TERMINAL",0,0,"2025-09-05 13:54:45.579320: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1171,1717979,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1070,1,"l",shellscript,selection_command +1172,1718621,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,1,"t",shellscript,selection_command +1173,1718684,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,2,"to",shellscript,selection_command +1174,1718761,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,3,"tok",shellscript,selection_command +1175,1718892,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,4,"toke",shellscript,selection_command +1176,1718961,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,5,"token",shellscript,selection_command +1177,1719138,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,6,"tokeni",shellscript,selection_command +1178,1719273,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,7,"tokeniz",shellscript,selection_command +1179,1719363,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,8,"tokenize",shellscript,selection_command +1180,1719425,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,9,"tokenizer",shellscript,selection_command +1181,1723879,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1048,9,"lam",shellscript,content +1182,1723880,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1434,9,"tokenizer",shellscript,selection_command +1183,1725203,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1434,9,"lam",shellscript,content +1184,1725205,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1469,9,"tokenizer",shellscript,selection_command +1185,1725891,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1469,9,"lam",shellscript,content +1186,1725893,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",267,9,"tokenizer",shellscript,selection_command +1187,1726691,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",267,9,"lam",shellscript,content +1188,1726694,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",377,9,"tokenizer",shellscript,selection_command +1189,1727000,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",377,9,"lam",shellscript,content +1190,1727003,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",416,9,"tokenizer",shellscript,selection_command +1191,1727461,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",416,9,"lam",shellscript,content +1192,1748884,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1193,1755683,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",112,0,"",shellscript,selection_mouse +1194,1756707,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",111,0,"",shellscript,selection_command +1195,1757050,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",111,11,"",shellscript,content +1196,1757111,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",110,0,"",shellscript,selection_command +1197,1757245,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",110,1,"=",shellscript,selection_command +1198,1758052,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",110,1,"=",shellscript,content +1199,1758060,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",110,0,"",shellscript,selection_command +1200,1759332,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",111,0,"accelerated",shellscript,content +1201,1759348,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",111,0,"",shellscript,selection_command +1202,1760400,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",111,0,"d",shellscript,content +1203,1760401,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",112,0,"",shellscript,selection_keyboard +1204,1760495,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",112,0,"e",shellscript,content +1205,1760496,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",113,0,"",shellscript,selection_keyboard +1206,1761043,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",113,0,"v",shellscript,content +1207,1761044,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",114,0,"",shellscript,selection_keyboard +1208,1761177,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",114,0,"_",shellscript,content +1209,1761178,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",115,0,"",shellscript,selection_keyboard +1210,1770058,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3468594.0 task 0: running\r\n\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1211,1773420,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1212,1775509,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",84,0,"",shellscript,selection_mouse +1213,1775958,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",83,1,"8",shellscript,selection_mouse +1214,1776617,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",82,2,"48",shellscript,selection_mouse +1215,1777536,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",82,2,"",shellscript,content +1216,1778251,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",82,0,"0",shellscript,content +1217,1778252,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",83,0,"",shellscript,selection_keyboard +1218,1778374,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",83,0,"0",shellscript,content +1219,1778375,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",84,0,"",shellscript,selection_keyboard +1220,1778750,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",85,0,"",shellscript,selection_command +1221,1778930,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",86,0,"",shellscript,selection_command +1222,1779728,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",85,1,"",shellscript,content +1223,1779818,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",85,0,"2",shellscript,content +1224,1779819,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",86,0,"",shellscript,selection_keyboard +1225,1784576,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",87,0,"",shellscript,selection_mouse +1226,1784793,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",86,0,"",shellscript,selection_command +1227,1787229,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",110,0,"",shellscript,selection_command +1228,1787477,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",111,0,"",shellscript,selection_command +1229,1788678,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",111,0,"d",shellscript,content +1230,1788679,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",112,0,"",shellscript,selection_keyboard +1231,1788790,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",112,0,"e",shellscript,content +1232,1788791,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",113,0,"",shellscript,selection_keyboard +1233,1788971,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",113,0,"v",shellscript,content +1234,1788971,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",114,0,"",shellscript,selection_keyboard +1235,1789254,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",114,0,"_",shellscript,content +1236,1789255,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",115,0,"",shellscript,selection_keyboard +1237,1789853,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",114,0,"",shellscript,selection_command +1238,1790643,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",89,0,"",shellscript,selection_command +1239,1792199,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1240,1793325,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",90,0,"",shellscript,selection_mouse +1241,1794098,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",89,0,"",shellscript,selection_command +1242,1794241,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",90,0,"\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated",shellscript,content +1243,1794271,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",91,0,"",shellscript,selection_command +1244,1794769,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",67,0,"",shellscript,selection_command +1245,1795228,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",67,24,"",shellscript,content +1246,1795322,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",91,0,"",shellscript,selection_command +1247,1795940,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",91,36,"",shellscript,content +1248,1808921,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1249,1812195,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",173,0,"",shellscript,selection_mouse +1250,1812196,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",172,0,"",shellscript,selection_command +1251,1812652,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",173,0,"",shellscript,selection_command +1252,1812803,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",172,1,"",shellscript,content +1253,1812946,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",172,0,"1",shellscript,content +1254,1812947,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",173,0,"",shellscript,selection_keyboard +1255,1813415,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",172,0,"",shellscript,selection_command +1256,1815355,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1257,1816241,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",197,0,"",shellscript,selection_mouse +1258,1817245,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",173,0,"",shellscript,selection_mouse +1259,1817246,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",172,0,"",shellscript,selection_command +1260,1817733,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",173,0,"",shellscript,selection_command +1261,1817866,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",172,1,"",shellscript,content +1262,1818007,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",172,0,"1",shellscript,content +1263,1818008,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",173,0,"",shellscript,selection_keyboard +1264,1818376,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",172,0,"",shellscript,selection_command +1265,1820264,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1266,1825029,"TERMINAL",0,0,"s",,terminal_output +1267,1825087,"TERMINAL",0,0,"b",,terminal_output +1268,1825228,"TERMINAL",0,0,"a",,terminal_output +1269,1825413,"TERMINAL",0,0,"tc",,terminal_output +1270,1825496,"TERMINAL",0,0,"h",,terminal_output +1271,1825620,"TERMINAL",0,0," ",,terminal_output +1272,1825921,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",,terminal_output +1273,1827159,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\n[?2004l\rSubmitted batch job 3468601\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1274,1827528,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",,terminal_output +1275,1827895,"TERMINAL",0,0,"",,terminal_output +1276,1828460,"TERMINAL",0,0,"",,terminal_output +1277,1828558,"TERMINAL",0,0,"",,terminal_output +1278,1828663,"TERMINAL",0,0,"",,terminal_output +1279,1828710,"TERMINAL",0,0,"",,terminal_output +1280,1829025,"TERMINAL",0,0,"",,terminal_output +1281,1829251,"TERMINAL",0,0,"",,terminal_output +1282,1829635,"TERMINAL",0,0,"",,terminal_output +1283,1829754,"TERMINAL",0,0,"",,terminal_output +1284,1829893,"TERMINAL",0,0,"",,terminal_output +1285,1830080,"TERMINAL",0,0,"",,terminal_output +1286,1830218,"TERMINAL",0,0,"",,terminal_output +1287,1830326,"TERMINAL",0,0,"",,terminal_output +1288,1830490,"TERMINAL",0,0,"",,terminal_output +1289,1830626,"TERMINAL",0,0,"",,terminal_output +1290,1830816,"TERMINAL",0,0,"",,terminal_output +1291,1831672,"TERMINAL",0,0,"",,terminal_output +1292,1832473,"TERMINAL",0,0,"_",,terminal_output +1293,1832707,"TERMINAL",0,0,"l",,terminal_output +1294,1832811,"TERMINAL",0,0,"am_single_gpu.sh",,terminal_output +1295,1834831,"TERMINAL",0,0,"\r\n[?2004l\rSubmitted batch job 3468602\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1296,1839434,"TERMINAL",0,0,"Step 10, validation loss: 0.18187764286994934\r\nStep 10, loss: 0.14605282247066498\r\nStep 11, loss: 0.1712988018989563\r\nStep 12, loss: 0.14483679831027985\r\nStep 13, loss: 0.17534825205802917\r\nStep 14, loss: 0.17896094918251038\r\nStep 15, loss: 0.19774891436100006\r\nStep 16, loss: 0.16897030174732208\r\nStep 17, loss: 0.19002102315425873\r\nStep 18, loss: 0.1558401733636856\r\nStep 19, loss: 0.15355850756168365\r\nCalculating validation metrics...\r\nStep 20, validation loss: 0.14875298738479614\r\nStep 20, loss: 0.17584741115570068\r\nStep 21, loss: 0.14025254547595978\r\nStep 22, loss: 0.1401795744895935\r\nStep 23, loss: 0.14600728452205658\r\nStep 24, loss: 0.1357179433107376\r\nStep 25, loss: 0.13388031721115112\r\nStep 26, loss: 0.11326973885297775\r\nStep 27, loss: 0.15244996547698975\r\nStep 28, loss: 0.15289849042892456\r\nStep 29, loss: 0.13741251826286316\r\nCalculating validation metrics...\r\nStep 30, validation loss: 0.13774433732032776\r\nStep 30, loss: 0.17070329189300537\r\nStep 31, loss: 0.12287013977766037\r\nStep 32, loss: 0.11972992867231369\r\nStep 33, loss: 0.14826461672782898\r\nStep 34, loss: 0.12302180379629135\r\nStep 35, loss: 0.16600583493709564\r\nStep 36, loss: 0.11909883469343185\r\nStep 37, loss: 0.1069716215133667\r\nStep 38, loss: 0.09852730482816696\r\nStep 39, loss: 0.09655062854290009\r\nCalculating validation metrics...\r\nStep 40, validation loss: 0.11539511382579803\r\nStep 40, loss: 0.13244281709194183\r\nStep 41, loss: 0.11529324948787689\r\nStep 42, loss: 0.11761131882667542\r\nStep 43, loss: 0.13077986240386963\r\nStep 44, loss: 0.13562451303005219\r\nStep 45, loss: 0.11265181750059128\r\nStep 46, loss: 0.10642780363559723\r\nStep 47, loss: 0.10744699835777283\r\nStep 48, loss: 0.09898741543292999\r\nStep 49, loss: 0.10338829457759857\r\nCalculating validation metrics...\r\nStep 50, validation loss: 0.1128082275390625\r\nStep 50, loss: 0.12391442060470581\r\nStep 51, loss: 0.10071989893913269\r\nStep 52, loss: 0.09898854047060013\r\nStep 53, loss: 0.1408553272485733\r\nStep 54, loss: 0.1017800122499466\r\nStep 55, loss: 0.1455259621143341\r\nStep 56, loss: 0.1540316492319107\r\nStep 57, loss: 0.11099131405353546\r\nStep 58, loss: 0.1390053629875183\r\nStep 59, loss: 0.1242060661315918\r\nCalculating validation metrics...\r\nStep 60, validation loss: 0.12452632188796997\r\nStep 60, loss: 0.1098286584019661\r\nStep 61, loss: 0.1369875967502594\r\nStep 62, loss: 0.09650027006864548\r\nStep 63, loss: 0.11178049445152283\r\nStep 64, loss: 0.10354175418615341\r\nStep 65, loss: 0.13448993861675262\r\nStep 66, loss: 0.10459168255329132\r\nStep 67, loss: 0.10439125448465347\r\nStep 68, loss: 0.12007392197847366\r\nStep 69, loss: 0.11132706701755524\r\nCalculating validation metrics...\r\nStep 70, validation loss: 0.09918176382780075\r\nStep 70, loss: 0.09934762120246887\r\nStep 71, loss: 0.0928165391087532\r\nStep 72, loss: 0.07243075966835022\r\nStep 73, loss: 0.11804302781820297\r\nStep 74, loss: 0.08443430066108704\r\nStep 75, loss: 0.1209489181637764\r\nStep 76, loss: 0.09669414162635803\r\nStep 77, loss: 0.0939900279045105\r\nStep 78, loss: 0.09680383652448654\r\nStep 79, loss: 0.09500480443239212\r\nCalculating validation metrics...\r\nStep 80, validation loss: 0.10830702632665634\r\nStep 80, loss: 0.11008846759796143\r\nStep 81, loss: 0.10895497351884842\r\nStep 82, loss: 0.08713923394680023\r\nStep 83, loss: 0.1076812893152237\r\nStep 84, loss: 0.08687092363834381\r\nStep 85, loss: 0.07846973836421967\r\nStep 86, loss: 0.09073802828788757\r\nStep 87, loss: 0.10593580454587936\r\nStep 88, loss: 0.12240608036518097\r\nStep 89, loss: 0.1121804490685463\r\nCalculating validation metrics...\r\nStep 90, validation loss: 0.09691300988197327\r\nStep 90, loss: 0.094225212931633\r\nStep 91, loss: 0.09277581423521042\r\nStep 92, loss: 0.10233049094676971\r\nStep 93, loss: 0.10277530550956726\r\nStep 94, loss: 0.08379513770341873\r\nStep 95, loss: 0.08725180476903915\r\nStep 96, loss: 0.09715580195188522\r\nStep 97, loss: 0.08388146013021469\r\nStep 98, loss: 0.09100979566574097\r\nStep 99, loss: 0.08268741518259048\r\nCalculating validation metrics...\r\nStep 100, validation loss: 0.0920855924487114\r\nStep 100, loss: 0.07813277095556259\r\nStep 101, loss: 0.08778174966573715\r\nStep 102, loss: 0.10322219878435135\r\nStep 103, loss: 0.08736246079206467\r\nStep 104, loss: 0.07339254021644592\r\nStep 105, loss: 0.10562150180339813\r\nStep 106, loss: 0.09001658856868744\r\nStep 107, loss: 0.09795986115932465\r\nStep 108, loss: 0.1096615344285965\r\nStep 109, loss: 0.08526662737131119\r\nCalculating validation metrics...\r\nStep 110, validation loss: 0.09074684977531433\r\nStep 110, loss: 0.10066965222358704\r\nStep 111, loss: 0.09619095176458359\r\nStep 112, loss: 0.07353267818689346\r\nStep 113, loss: 0.08111622929573059\r\nStep 114, loss: 0.08588922023773193\r\nStep 115, loss: 0.08414331823587418\r\nStep 116, loss: 0.08831285685300827\r\nStep 117, loss: 0.07616312056779861\r\nStep 118, loss: 0.10667254775762558\r\nStep 119, loss: 0.07692010700702667\r\nCalculating validation metrics...\r\nStep 120, validation loss: 0.0764869898557663\r\nStep 120, loss: 0.07342058420181274\r\nStep 121, loss: 0.10237505286931992\r\nStep 122, loss: 0.08552222698926926\r\nStep 123, loss: 0.09123130887746811\r\nStep 124, loss: 0.07849271595478058\r\nStep 125, loss: 0.07490137964487076\r\nStep 126, loss: 0.07015444338321686\r\nStep 127, loss: 0.07650169730186462\r\nStep 128, loss: 0.08419942855834961\r\nStep 129, loss: 0.08485883474349976\r\nCalculating validation metrics...\r\nStep 130, validation loss: 0.08070800453424454\r\nStep 130, loss: 0.08979608118534088\r\nStep 131, loss: 0.07452066987752914\r\nStep 132, loss: 0.08611221611499786\r\nStep 133, loss: 0.07664957642555237\r\nStep 134, loss: 0.08507084101438522\r\nStep 135, loss: 0.07210998982191086\r\nStep 136, loss: 0.08963309228420258\r\nStep 137, loss: 0.09107119590044022\r\nStep 138, loss: 0.08308420330286026\r\nStep 139, loss: 0.08361997455358505\r\nCalculating validation metrics...\r\nStep 140, validation loss: 0.07827456295490265\r\nStep 140, loss: 0.08950675278902054\r\nStep 141, loss: 0.07164888083934784\r\nStep 142, loss: 0.07035418599843979\r\nStep 143, loss: 0.07036899030208588\r\nStep 144, loss: 0.07530650496482849\r\nStep 145, loss: 0.06379816681146622\r\nStep 146, loss: 0.07031333446502686\r\nStep 147, loss: 0.06870117038488388\r\nStep 148, loss: 0.06727264076471329\r\nStep 149, loss: 0.07310083508491516\r\nCalculating validation metrics...\r\nStep 150, validation loss: 0.07303757965564728\r\nStep 150, loss: 0.0631091371178627\r\nStep 151, loss: 0.06709204614162445\r\nStep 152, loss: 0.06451910734176636\r\nStep 153, loss: 0.07493389397859573\r\nStep 154, loss: 0.07192081958055496\r\nStep 155, loss: 0.07462528347969055\r\nStep 156, loss: 0.11415237188339233\r\nStep 157, loss: 0.08543785661458969\r\nStep 158, loss: 0.06999842077493668\r\nStep 159, loss: 0.07566685229539871\r\nCalculating validation metrics...\r\nStep 160, validation loss: 0.0731324553489685\r\nStep 160, loss: 0.07527460902929306\r\nStep 161, loss: 0.08358365297317505\r\nStep 162, loss: 0.07796592265367508\r\nStep 163, loss: 0.0758633017539978\r\nStep 164, loss: 0.06582893431186676\r\nStep 165, loss: 0.08371026068925858\r\nStep 166, loss: 0.0688929557800293\r\nStep 167, loss: 0.07535845786333084\r\nStep 168, loss: 0.06775394827127457\r\nStep 169, loss: 0.05915581062436104\r\nCalculating validation metrics...\r\nStep 170, validation loss: 0.07235527038574219\r\nStep 170, loss: 0.06919055432081223\r\nStep 171, loss: 0.08124631643295288\r\nStep 172, loss: 0.07914280891418457\r\nStep 173, loss: 0.08246780931949615\r\nStep 174, loss: 0.06925532966852188\r\nStep 175, loss: 0.07127286493778229\r\nStep 176, loss: 0.07886797189712524\r\nStep 177, loss: 0.059943653643131256\r\nStep 178, loss: 0.06457393616437912\r\nStep 179, loss: 0.06598643213510513\r\nCalculating validation metrics...\r\nStep 180, validation loss: 0.0650985911488533\r\nStep 180, loss: 0.0719107836484909\r\nStep 181, loss: 0.06947534531354904\r\nStep 182, loss: 0.0642988532781601\r\nStep 183, loss: 0.06736761331558228\r\nStep 184, loss: 0.0653526559472084\r\nStep 185, loss: 0.07156699895858765\r\nStep 186, loss: 0.06451289355754852\r\nStep 187, loss: 0.07264579087495804\r\nStep 188, loss: 0.0747564435005188\r\nStep 189, loss: 0.07572208344936371\r\nCalculating validation metrics...\r\nStep 190, validation loss: 0.07011324167251587\r\nStep 190, loss: 0.07277607172727585\r\nStep 191, loss: 0.0567333847284317\r\nStep 192, loss: 0.0741511881351471\r\nStep 193, loss: 0.06291747838258743\r\nStep 194, loss: 0.07310235500335693\r\nStep 195, loss: 0.07948039472103119\r\nStep 196, loss: 0.07027465850114822\r\nStep 197, loss: 0.06854011863470078\r\nStep 198, loss: 0.05452839285135269\r\n",,terminal_output +1297,1840846,"TERMINAL",0,0,"2",,terminal_output +1298,1841911,"TERMINAL",0,0,"",,terminal_output +1299,1842312,"TERMINAL",0,0,"Step 199, loss: 0.06645355373620987\r\nCalculating validation metrics...\r\nStep 200, validation loss: 0.06776940822601318\r\n",,terminal_output +1300,1844326,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-tokenizer-dev-3468594 at: https://wandb.ai/instant-uv/jafar/runs/3468594\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_135349-3468594/logs\r\n",,terminal_output +1301,1845734,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +1302,1859746,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1303,1876162,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +1304,1877274,"TERMINAL",0,0,"tokenizer_single_gpu.sh",,terminal_output +1305,1877907,"TERMINAL",0,0,"lam_single_gpu.sh",,terminal_output +1306,1878110,"TERMINAL",0,0,"\r",,terminal_output +1307,1960292,"TERMINAL",0,0,"q",,terminal_output +1308,1960364,"TERMINAL",0,0,"u",,terminal_output +1309,1960459,"TERMINAL",0,0,"e",,terminal_output +1310,1960611,"TERMINAL",0,0,"ue",,terminal_output +1311,1960758,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0403.localdomain: Fri Sep 5 13:58:50 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3465675 accelerat train_la tum_cte0 R 15:58:07\t 8 hkn[0703,0706-0707,0711-0715]3465676 accelerat train_la tum_cte0 R 15:58:07\t 8 hkn[0521-0528]3465677 accelerat train_la tum_cte0 R 15:58:07\t 8 hkn[0504,0520,0720,0722-0724,0728,0731]3466286 accelerat train_to tum_cte0 R 16:42:55\t 1 hkn07363466287 accelerat train_la tum_cte0 R 16:42:55\t 1 hkn07363468602 dev_accel train_la tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3468601 dev_accel train_to tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3468594 dev_accel interact tum_cte0 R\t6:15\t 1 hkn0403",,terminal_output +1312,1961722,"TERMINAL",0,0,"1888666",,terminal_output +1313,1962801,"TERMINAL",0,0,"2999777",,terminal_output +1314,1963841,"TERMINAL",0,0,"3101010888",,terminal_output +1315,1964772,"TERMINAL",0,0,"4111999",,terminal_output +1316,1965876,"TERMINAL",0,0,"52223:003:0020",,terminal_output +1317,1966285,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1318,1968484,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\n",,terminal_output +1319,1968566,"TERMINAL",0,0,"salloc: Relinquishing job allocation 3468594\r\nsalloc: Job allocation 3468594 has been revoked.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1320,1970098,"TERMINAL",0,0,"queue",,terminal_command +1321,1970194,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 13:58:59 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3465675 accelerat train_la tum_cte0 R 15:58:16\t 8 hkn[0703,0706-0707,0711-0715]3465676 accelerat train_la tum_cte0 R 15:58:16\t 8 hkn[0521-0528]3465677 accelerat train_la tum_cte0 R 15:58:16\t 8 hkn[0504,0520,0720,0722-0724,0728,0731]3466286 accelerat train_to tum_cte0 R 16:43:04\t 1 hkn07363466287 accelerat train_la tum_cte0 R 16:43:04\t 1 hkn07363468594 dev_accel interact tum_cte0 CG\t6:23\t 1 hkn04033468602 dev_accel train_la tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3468601 dev_accel train_to tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)",,terminal_output +1322,1971204,"TERMINAL",0,0,"9:0077755",,terminal_output +1323,1972239,"TERMINAL",0,0,"188866",,terminal_output +1324,1973289,"TERMINAL",0,0,"220202088",,terminal_output +1325,1974392,"TERMINAL",0,0,"411199",,terminal_output +1326,1975358,"TERMINAL",0,0,"52221010",,terminal_output +1327,1976395,"TERMINAL",0,0,"633311",,terminal_output +1328,1977514,"TERMINAL",0,0,"744422",,terminal_output +1329,1978538,"TERMINAL",0,0,"855533",,terminal_output +1330,1979521,"TERMINAL",0,0,"\r966644",,terminal_output +1331,1980622,"TERMINAL",0,0,"1077755",,terminal_output +1332,1981662,"TERMINAL",0,0,"188866",,terminal_output +1333,1982723,"TERMINAL",0,0,"299977",,terminal_output +1334,1983675,"TERMINAL",0,0,"330303088",,terminal_output +1335,1984713,"TERMINAL",0,0,"411199",,terminal_output +1336,1985779,"TERMINAL",0,0,"52222020",,terminal_output +1337,1986865,"TERMINAL",0,0,"633311",,terminal_output +1338,1987855,"TERMINAL",0,0,"744422",,terminal_output +1339,1989018,"TERMINAL",0,0,"855533",,terminal_output +1340,1989968,"TERMINAL",0,0,"966644",,terminal_output +1341,1990987,"TERMINAL",0,0,"2077755",,terminal_output +1342,1992022,"TERMINAL",0,0,"188866",,terminal_output +1343,1993089,"TERMINAL",0,0,"299977",,terminal_output +1344,1994140,"TERMINAL",0,0,"340404088",,terminal_output +1345,1995175,"TERMINAL",0,0,"411199",,terminal_output +1346,1996197,"TERMINAL",0,0,"52223030",,terminal_output +1347,1997228,"TERMINAL",0,0,"633311",,terminal_output +1348,1998035,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1349,2001079,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +1350,2001108,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1351,2003290,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1352,2007410,"TERMINAL",0,0,"queue",,terminal_command +1353,2007478,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 13:59:37 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3465675 accelerat train_la tum_cte0 R 15:58:54\t 8 hkn[0703,0706-0707,0711-0715]3465676 accelerat train_la tum_cte0 R 15:58:54\t 8 hkn[0521-0528]3465677 accelerat train_la tum_cte0 R 15:58:54\t 8 hkn[0504,0520,0720,0722-0724,0728,0731]3466286 accelerat train_to tum_cte0 R 16:43:42\t 1 hkn07363466287 accelerat train_la tum_cte0 R 16:43:42\t 1 hkn07363468602 dev_accel train_la tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)3468601 dev_accel train_to tum_cte0 PD\t0:00\t 1 (QOSMaxJobsPerUserLimit)",,terminal_output +1354,2008546,"TERMINAL",0,0,"855533",,terminal_output +1355,2009554,"TERMINAL",0,0,"966644",,terminal_output +1356,2009816,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1357,2011636,"TERMINAL",0,0,"scancel 3468601",,terminal_command +1358,2011655,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1359,2014476,"TERMINAL",0,0,"scancel 3468602",,terminal_command +1360,2014478,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1361,2016629,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1362,2022175,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_command +1363,2022245,"TERMINAL",0,0,"]633;CSubmitted batch job 3468606\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1364,2022947,"TERMINAL",0,0,"queue",,terminal_command +1365,2023062,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 13:59:52 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3465675 accelerat train_la tum_cte0 R 15:59:09\t 8 hkn[0703,0706-0707,0711-0715]3465676 accelerat train_la tum_cte0 R 15:59:09\t 8 hkn[0521-0528]3465677 accelerat train_la tum_cte0 R 15:59:09\t 8 hkn[0504,0520,0720,0722-0724,0728,0731]3466286 accelerat train_to tum_cte0 R 16:43:57\t 1 hkn07363466287 accelerat train_la tum_cte0 R 16:43:57\t 1 hkn07363468606 dev_accel train_la tum_cte0 R\t0:00\t 1 hkn0403",,terminal_output +1366,2024142,"TERMINAL",0,0,"3101010881",,terminal_output +1367,2025107,"TERMINAL",0,0,"4111992",,terminal_output +1368,2026407,"TERMINAL",0,0,"53334:014:014",,terminal_output +1369,2026951,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1370,2027462,"TERMINAL",0,0,"7444225",,terminal_output +1371,2028450,"TERMINAL",0,0,"8555336",,terminal_output +1372,2029514,"TERMINAL",0,0,"9666447",,terminal_output +1373,2030594,"TERMINAL",0,0,"4:00:00777558",,terminal_output +1374,2031615,"TERMINAL",0,0,"1888669",,terminal_output +1375,2032605,"TERMINAL",0,0,"29997710",,terminal_output +1376,2033679,"TERMINAL",0,0,"3202020881",,terminal_output +1377,2035008,"TERMINAL",0,0,"4111992",,terminal_output +1378,2035705,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1379,2036591,"TERMINAL",0,0,"idling",,terminal_command +1380,2036677,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 14:00:06 2025Partition dev_cpuonly:\t 6 nodes idle\rPartition cpuonly: 44 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 16 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 4 nodes idle\rPartition accelerated-h200:\t 5 nodes idle",,terminal_output +1381,2037684,"TERMINAL",0,0,"7",,terminal_output +1382,2037971,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1383,2038823,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1384,2040345,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",114,0,"",shellscript,selection_mouse +1385,2041935,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",115,0,"",shellscript,selection_command +1386,2042359,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",114,1,"",shellscript,content +1387,2042479,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",113,1,"",shellscript,content +1388,2042783,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",112,1,"",shellscript,content +1389,2043161,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",111,1,"",shellscript,content +1390,2052612,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",,terminal_command +1391,2052660,"TERMINAL",0,0,"]633;CSubmitted batch job 3468609\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1392,2054226,"TERMINAL",0,0,"queue",,terminal_command +1393,2054311,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 14:00:23 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3468609 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3465675 accelerat train_la tum_cte0 R 15:59:40\t 8 hkn[0703,0706-0707,0711-0715]3465676 accelerat train_la tum_cte0 R 15:59:40\t 8 hkn[0521-0528]3465677 accelerat train_la tum_cte0 R 15:59:40\t 8 hkn[0504,0520,0720,0722-0724,0728,0731]3466286 accelerat train_to tum_cte0 R 16:44:28\t 1 hkn07363466287 accelerat train_la tum_cte0 R 16:44:28\t 1 hkn07363468606 dev_accel train_la tum_cte0 R\t0:31\t 1 hkn0403",,terminal_output +1394,2055289,"TERMINAL",0,0,"422230303",,terminal_output +1395,2056401,"TERMINAL",0,0,"6333114",,terminal_output +1396,2057408,"TERMINAL",0,0,"7444225",,terminal_output +1397,2058411,"TERMINAL",0,0,"8555336",,terminal_output +1398,2059451,"TERMINAL",0,0,"9666447",,terminal_output +1399,2060609,"TERMINAL",0,0,"30777558",,terminal_output +1400,2061625,"TERMINAL",0,0,"1888669",,terminal_output +1401,2062657,"TERMINAL",0,0,"29997740",,terminal_output +1402,2063680,"TERMINAL",0,0,"3505050881",,terminal_output +1403,2064642,"TERMINAL",0,0,"4111992",,terminal_output +1404,2065688,"TERMINAL",0,0,"522240403",,terminal_output +1405,2066723,"TERMINAL",0,0,"6333114",,terminal_output +1406,2067782,"TERMINAL",0,0,"7444225",,terminal_output +1407,2068809,"TERMINAL",0,0,"8555336",,terminal_output +1408,2069915,"TERMINAL",0,0,"9666447",,terminal_output +1409,2070779,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1410,2072465,"TERMINAL",0,0,"scancel 3468609",,terminal_command +1411,2072491,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1412,2073305,"TERMINAL",0,0,"queue",,terminal_command +1413,2073377,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 14:00:43 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3465675 accelerat train_la tum_cte0 R 16:00:00\t 8 hkn[0703,0706-0707,0711-0715]3465676 accelerat train_la tum_cte0 R 16:00:00\t 8 hkn[0521-0528]3465677 accelerat train_la tum_cte0 R 16:00:00\t 8 hkn[0504,0520,0720,0722-0724,0728,0731]3466286 accelerat train_to tum_cte0 R 16:44:48\t 1 hkn07363466287 accelerat train_la tum_cte0 R 16:44:48\t 1 hkn07363468606 dev_accel train_la tum_cte0 R\t0:51\t 1 hkn0403",,terminal_output +1414,2074354,"TERMINAL",0,0,"4111992",,terminal_output +1415,2075392,"TERMINAL",0,0,"522250503",,terminal_output +1416,2076466,"TERMINAL",0,0,"6333114",,terminal_output +1417,2077597,"TERMINAL",0,0,"7444225",,terminal_output +1418,2078624,"TERMINAL",0,0,"8555336",,terminal_output +1419,2079563,"TERMINAL",0,0,"9666447",,terminal_output +1420,2080609,"TERMINAL",0,0,"50777558",,terminal_output +1421,2081656,"TERMINAL",0,0,"1888669",,terminal_output +1422,2082795,"TERMINAL",0,0,"2999771:00",,terminal_output +1423,2083740,"TERMINAL",0,0,"3101010881",,terminal_output +1424,2084787,"TERMINAL",0,0,"4111992",,terminal_output +1425,2085832,"TERMINAL",0,0,"52225:005:003",,terminal_output +1426,2086877,"TERMINAL",0,0,"6333114",,terminal_output +1427,2087959,"TERMINAL",0,0,"7444225",,terminal_output +1428,2088960,"TERMINAL",0,0,"8555336",,terminal_output +1429,2089999,"TERMINAL",0,0,"9666447",,terminal_output +1430,2091033,"TERMINAL",0,0,"1:00777558",,terminal_output +1431,2092183,"TERMINAL",0,0,"1888669",,terminal_output +1432,2093157,"TERMINAL",0,0,"29997710",,terminal_output +1433,2094575,"TERMINAL",0,0,"3212121992",,terminal_output +1434,2095581,"TERMINAL",0,0,"522210103",,terminal_output +1435,2096746,"TERMINAL",0,0,"6333114",,terminal_output +1436,2097677,"TERMINAL",0,0,"7444225",,terminal_output +1437,2098804,"TERMINAL",0,0,"8555336",,terminal_output +1438,2099818,"TERMINAL",0,0,"9666447",,terminal_output +1439,2100840,"TERMINAL",0,0,"10777558",,terminal_output +1440,2101898,"TERMINAL",0,0,"1888669",,terminal_output +1441,2102912,"TERMINAL",0,0,"29997720[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1442,2104510,"TERMINAL",0,0,"python",,terminal_command +1443,2104576,"TERMINAL",0,0,"]633;C",,terminal_output +1444,2104801,"TERMINAL",0,0,"Python 3.10.18 (main, Jun 4 2025, 17:36:27) [Clang 20.1.4 ] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n",,terminal_output +1445,2105332,"TERMINAL",0,0,"1>>> 1",,terminal_output +1446,2105614,"TERMINAL",0,0,"5",,terminal_output +1447,2105884,"TERMINAL",0,0,"*",,terminal_output +1448,2106266,"TERMINAL",0,0,"3",,terminal_output +1449,2106409,"TERMINAL",0,0,"\r\n45\r\n>>> ",,terminal_output +1450,2110759,"TERMINAL",0,0,"^D\r\n",,terminal_output +1451,2110837,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1452,2160416,"TERMINAL",0,0,"queue",,terminal_command +1453,2160501,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 14:02:10 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3465675 accelerat train_la tum_cte0 R 16:01:27\t 8 hkn[0703,0706-0707,0711-0715]3465676 accelerat train_la tum_cte0 R 16:01:27\t 8 hkn[0521-0528]3465677 accelerat train_la tum_cte0 R 16:01:27\t 8 hkn[0504,0520,0720,0722-0724,0728,0731]3466286 accelerat train_to tum_cte0 R 16:46:15\t 1 hkn07363466287 accelerat train_la tum_cte0 R 16:46:15\t 1 hkn07363468606 dev_accel train_la tum_cte0 R\t2:18\t 1 hkn0403",,terminal_output +1454,2161474,"TERMINAL",0,0,"1888669",,terminal_output +1455,2162690,"TERMINAL",0,0,"29997720",,terminal_output +1456,2163558,"TERMINAL",0,0,"3303030881",,terminal_output +1457,2164512,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1458,2168086,"TERMINAL",0,0,"logs",,terminal_command +1459,2168178,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir",,terminal_output +1460,2168730,"TERMINAL",0,0,"ls",,terminal_command +1461,2168795,"TERMINAL",0,0,"]633;C",,terminal_output +1462,2168949,"TERMINAL",0,0,"atari train_dyn_yolorun_new_arch_3358457.log train_tokenizer_batch_size_scaling_16_node_3321526.log\r\nbig_run train_lam_action_space_scaling_10_3320179.log train_tokenizer_batch_size_scaling_1_node_3318551.log\r\nbig-runs train_lam_action_space_scaling_10_3321529.log train_tokenizer_batch_size_scaling_2_node_3318552.log\r\ncausal train_lam_action_space_scaling_10_3329786.log train_tokenizer_batch_size_scaling_2_node_3330806.log\r\ncoinrun train_lam_action_space_scaling_10_3329801.log train_tokenizer_batch_size_scaling_2_node_3330848.log\r\ndata_coinrun train_lam_action_space_scaling_10_3331283.log train_tokenizer_batch_size_scaling_2_node_3331282.log\r\njafar_og_reproduction train_lam_action_space_scaling_12_3318546.log train_tokenizer_batch_size_scaling_4_node_3318553.log\r\nlam train_lam_action_space_scaling_12_3320177.log train_tokenizer_batch_size_scaling_4_node_3320175.log\r\nmaskgit train_lam_action_space_scaling_12_3321527.log train_tokenizer_batch_size_scaling_4_node_3321524.log\r\nmaskgit-maskprob-fix train_lam_action_space_scaling_12_3329787.log train_tokenizer_batch_size_scaling_8_node_3320176.log\r\npreprocess train_lam_action_space_scaling_12_3329802.log train_tokenizer_batch_size_scaling_8_node_3321525.log\r\ntrain_dyn_causal_180M_3372931.log train_lam_action_space_scaling_12_3331284.log train_tokenizer_minecraft_overfit_sample_3309656.log\r\ntrain_dyn_causal_180M_3372963.log train_lam_action_space_scaling_20_3318547.log train_tokenizer_model_size_scaling_127M_3317233.log\r\ntrain_dyn_causal_180M_3372969.log train_lam_action_space_scaling_20_3329788.log train_tokenizer_model_size_scaling_127M_3318554.log\r\ntrain_dyn_causal_180M_3373107.log train_lam_action_space_scaling_20_3329803.log train_tokenizer_model_size_scaling_140M_3313562.log\r\ntrain_dyn_causal_255M_3372932.log train_lam_action_space_scaling_20_3331285.log train_tokenizer_model_size_scaling_140M_3316019.log\r\ntrain_dyn_causal_255M_3372970.log train_lam_action_space_scaling_50_3320180.log train_tokenizer_model_size_scaling_200M_3313563.log\r\ntrain_dyn_causal_255M_3373108.log train_lam_action_space_scaling_50_3329789.log train_tokenizer_model_size_scaling_200M_3316020.log\r\ntrain_dyn_causal_356M_3372934.log train_lam_action_space_scaling_50_3329804.log train_tokenizer_model_size_scaling_227M_3317234.log\r\ntrain_dyn_causal_356M_3372971.log train_lam_action_space_scaling_50_3331286.log train_tokenizer_model_size_scaling_227M_3318555.log\r\ntrain_dyn_causal_356M_3373109.log train_lam_action_space_scaling_6_3318549.log train_tokenizer_model_size_scaling_227M_3320173.log\r\ntrain_dyn_causal_500M_3372936.log train_lam_action_space_scaling_6_3320178.log train_tokenizer_model_size_scaling_227M_3321523.log\r\ntrain_dyn_causal_500M_3372972.log train_lam_action_space_scaling_6_3321528.log train_tokenizer_model_size_scaling_37M_3313565.log\r\ntrain_dyn_causal_500M_3373110.log train_lam_action_space_scaling_6_3329790.log train_tokenizer_model_size_scaling_37M_3316022.log\r\ntrain_dyn_new_arch-bugfixed-spatial-shift_3359343.log train_lam_action_space_scaling_6_3329805.log train_tokenizer_model_size_scaling_37M_3317232.log\r\ntrain_dyn_new_arch-bugfixed-temporal-shift_3359349.log train_lam_action_space_scaling_6_3331287.log train_tokenizer_model_size_scaling_37M_3317239.log\r\ntrain_dyn_yolorun_3333026.log train_lam_action_space_scaling_8_3318550.log train_tokenizer_model_size_scaling_37M_3318556.log\r\ntrain_dyn_yolorun_3333448.log train_lam_action_space_scaling_8_3329791.log train_tokenizer_model_size_scaling_74M_3318557.log\r\ntrain_dyn_yolorun_3335345.log train_lam_action_space_scaling_8_3329806.log train_tokenizer_model_size_scaling_74M_3320174.log\r\ntrain_dyn_yolorun_3335362.log train_lam_action_space_scaling_8_3331288.log train_tokenizer_model_size_scaling_74M_3321522.log\r\ntrain_dyn_yolorun_3348592.log train_lam_minecraft_overfit_sample_3309655.log train_tokenizer_model_size_scaling_80M_3313564.log\r\ntrain_dyn_yolorun_new_arch_3351743.log train_lam_model_size_scaling_38M_3317098.log train_tokenizer_model_size_scaling_80M_3316026.log\r\ntrain_dyn_yolorun_new_arch_3352103.log train_lam_model_size_scaling_38M_3317115.log yoloruns\r\ntrain_dyn_yolorun_new_arch_3352115.log train_lam_model_size_scaling_38M_3317231.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir",,terminal_output +1463,2171194,"TERMINAL",0,0,"cd lam/",,terminal_command +1464,2171211,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam",,terminal_output +1465,2171519,"TERMINAL",0,0,"ls",,terminal_command +1466,2171576,"TERMINAL",0,0,"]633;C",,terminal_output +1467,2171649,"TERMINAL",0,0,"train_lam_minecraft_8node_3431870.log train_lam_minecraft_8node_3454944.log train_lam_minecraft_8node_darkness_filter_37M_3454953.log\r\ntrain_lam_minecraft_8node_3431875.log train_lam_minecraft_8node_3454948.log train_lam_minecraft_8node_darkness_filter_37M_3457969.log\r\ntrain_lam_minecraft_8node_3431876.log train_lam_minecraft_8node_darkness_filter_133M_3454956.log train_lam_minecraft_8node_darkness_filter_37M_3463210.log\r\ntrain_lam_minecraft_8node_3431885.log train_lam_minecraft_8node_darkness_filter_133M_3465196.log train_lam_minecraft_8node_darkness_filter_37M_3465678.log\r\ntrain_lam_minecraft_8node_3431895.log train_lam_minecraft_8node_darkness_filter_133M_3465677.log train_lam_minecraft_8node_darkness_filter_400M_3454954.log\r\ntrain_lam_minecraft_8node_3454890.log train_lam_minecraft_8node_darkness_filter_311M_3454955.log train_lam_minecraft_8node_darkness_filter_400M_3465198.log\r\ntrain_lam_minecraft_8node_3454917.log train_lam_minecraft_8node_darkness_filter_311M_3465197.log train_lam_minecraft_8node_darkness_filter_400M_3465675.log\r\ntrain_lam_minecraft_8node_3454941.log train_lam_minecraft_8node_darkness_filter_311M_3465676.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam",,terminal_output +1468,2181035,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1469,2184004,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1470,2186666,"TERMINAL",0,0,"cd ..",,terminal_command +1471,2186719,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir",,terminal_output +1472,2186974,"TERMINAL",0,0,"ls",,terminal_command +1473,2187022,"TERMINAL",0,0,"]633;Catari train_dyn_yolorun_new_arch_3358457.log train_tokenizer_batch_size_scaling_16_node_3321526.log\r\nbig_run train_lam_action_space_scaling_10_3320179.log train_tokenizer_batch_size_scaling_1_node_3318551.log\r\nbig-runs train_lam_action_space_scaling_10_3321529.log train_tokenizer_batch_size_scaling_2_node_3318552.log\r\ncausal train_lam_action_space_scaling_10_3329786.log train_tokenizer_batch_size_scaling_2_node_3330806.log\r\ncoinrun train_lam_action_space_scaling_10_3329801.log train_tokenizer_batch_size_scaling_2_node_3330848.log\r\ndata_coinrun train_lam_action_space_scaling_10_3331283.log train_tokenizer_batch_size_scaling_2_node_3331282.log\r\njafar_og_reproduction train_lam_action_space_scaling_12_3318546.log train_tokenizer_batch_size_scaling_4_node_3318553.log\r\nlam train_lam_action_space_scaling_12_3320177.log train_tokenizer_batch_size_scaling_4_node_3320175.log\r\nmaskgit train_lam_action_space_scaling_12_3321527.log train_tokenizer_batch_size_scaling_4_node_3321524.log\r\nmaskgit-maskprob-fix train_lam_action_space_scaling_12_3329787.log train_tokenizer_batch_size_scaling_8_node_3320176.log\r\npreprocess train_lam_action_space_scaling_12_3329802.log train_tokenizer_batch_size_scaling_8_node_3321525.log\r\ntrain_dyn_causal_180M_3372931.log train_lam_action_space_scaling_12_3331284.log train_tokenizer_minecraft_overfit_sample_3309656.log\r\ntrain_dyn_causal_180M_3372963.log train_lam_action_space_scaling_20_3318547.log train_tokenizer_model_size_scaling_127M_3317233.log\r\ntrain_dyn_causal_180M_3372969.log train_lam_action_space_scaling_20_3329788.log train_tokenizer_model_size_scaling_127M_3318554.log\r\ntrain_dyn_causal_180M_3373107.log train_lam_action_space_scaling_20_3329803.log train_tokenizer_model_size_scaling_140M_3313562.log\r\ntrain_dyn_causal_255M_3372932.log train_lam_action_space_scaling_20_3331285.log train_tokenizer_model_size_scaling_140M_3316019.log\r\ntrain_dyn_causal_255M_3372970.log train_lam_action_space_scaling_50_3320180.log train_tokenizer_model_size_scaling_200M_3313563.log\r\ntrain_dyn_causal_255M_3373108.log train_lam_action_space_scaling_50_3329789.log train_tokenizer_model_size_scaling_200M_3316020.log\r\ntrain_dyn_causal_356M_3372934.log train_lam_action_space_scaling_50_3329804.log train_tokenizer_model_size_scaling_227M_3317234.log\r\ntrain_dyn_causal_356M_3372971.log train_lam_action_space_scaling_50_3331286.log train_tokenizer_model_size_scaling_227M_3318555.log\r\ntrain_dyn_causal_356M_3373109.log train_lam_action_space_scaling_6_3318549.log train_tokenizer_model_size_scaling_227M_3320173.log\r\ntrain_dyn_causal_500M_3372936.log train_lam_action_space_scaling_6_3320178.log train_tokenizer_model_size_scaling_227M_3321523.log\r\ntrain_dyn_causal_500M_3372972.log train_lam_action_space_scaling_6_3321528.log train_tokenizer_model_size_scaling_37M_3313565.log\r\ntrain_dyn_causal_500M_3373110.log train_lam_action_space_scaling_6_3329790.log train_tokenizer_model_size_scaling_37M_3316022.log\r\ntrain_dyn_new_arch-bugfixed-spatial-shift_3359343.log train_lam_action_space_scaling_6_3329805.log train_tokenizer_model_size_scaling_37M_3317232.log\r\ntrain_dyn_new_arch-bugfixed-temporal-shift_3359349.log train_lam_action_space_scaling_6_3331287.log train_tokenizer_model_size_scaling_37M_3317239.log\r\ntrain_dyn_yolorun_3333026.log train_lam_action_space_scaling_8_3318550.log train_tokenizer_model_size_scaling_37M_3318556.log\r\ntrain_dyn_yolorun_3333448.log train_lam_action_space_scaling_8_3329791.log train_tokenizer_model_size_scaling_74M_3318557.log\r\ntrain_dyn_yolorun_3335345.log train_lam_action_space_scaling_8_3329806.log train_tokenizer_model_size_scaling_74M_3320174.log\r\ntrain_dyn_yolorun_3335362.log train_lam_action_space_scaling_8_3331288.log train_tokenizer_model_size_scaling_74M_3321522.log\r\ntrain_dyn_yolorun_3348592.log train_lam_minecraft_overfit_sample_3309655.log train_tokenizer_model_size_scaling_80M_3313564.log\r\ntrain_dyn_yolorun_new_arch_3351743.log train_lam_model_size_scaling_38M_3317098.log train_tokenizer_model_size_scaling_80M_3316026.log\r\ntrain_dyn_yolorun_new_arch_3352103.log train_lam_model_size_scaling_38M_3317115.log yoloruns\r\ntrain_dyn_yolorun_new_arch_3352115.log train_lam_model_size_scaling_38M_3317231.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir",,terminal_output +1474,2189895,"TERMINAL",0,0,"cd coinrun/lam/",,terminal_command +1475,2189936,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam",,terminal_output +1476,2190852,"TERMINAL",0,0,"ls",,terminal_command +1477,2190912,"TERMINAL",0,0,"]633;Ctrain_lam_1e-4_3468606.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam",,terminal_output +1478,2193103,"TERMINAL",0,0,"tail -f train_lam_1e-4_3468606.log",,terminal_command +1479,2193168,"TERMINAL",0,0,"]633;CNodelist: hkn0403\r\nCPU Utilized: 00:00:10\r\nCPU Efficiency: 0.28% of 01:00:24 core-walltime\r\nJob Wall-clock time: 00:02:31\r\nStarttime: Fri Sep 5 13:59:52 2025\r\nEndtime: Fri Sep 5 14:02:23 2025\r\nMemory Utilized: 1.47 GB (estimated maximum)\r\nMemory Efficiency: 0.00% of 0.00 MB (0.00 MB/node)\r\nEnergy Consumed: 61676 Joule / 17.1322222222222 Watthours\r\nAverage node power draw: 408.450331125828 Watt\r\n",,terminal_output +1480,2195777,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam",,terminal_output +1481,2202575,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/train_lam_1e-4_3468606.log",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=coinrun-lam-dev-$slurm_job_id \\n --tags lam coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid\n\nSLURM_JOB_USER=tum_cte0515\nSLURM_TASKS_PER_NODE=4\nSLURM_JOB_UID=999226\nSLURM_TASK_PID=3448684\nSLURM_JOB_GPUS=0\nSLURM_LOCALID=0\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\nSLURMD_NODENAME=hkn0403\nSLURM_JOB_START_TIME=1757073592\nSLURM_CLUSTER_NAME=hk\nSLURM_JOB_END_TIME=1757074792\nSLURM_CPUS_ON_NODE=24\nSLURM_JOB_CPUS_PER_NODE=24\nSLURM_GPUS_ON_NODE=1\nSLURM_GTIDS=0\nSLURM_JOB_PARTITION=dev_accelerated\nSLURM_TRES_PER_TASK=cpu=5\nSLURM_OOM_KILL_STEP=0\nSLURM_JOB_NUM_NODES=1\nSLURM_JOBID=3468606\nSLURM_JOB_QOS=normal\nSLURM_PROCID=0\nSLURM_CPUS_PER_TASK=5\nSLURM_NTASKS=4\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\nSLURM_SCRIPT_CONTEXT=prolog_task\nSLURM_NODELIST=hkn0403\nSLURM_JOB_ACCOUNT=hk-project-p0023960\nSLURM_PRIO_PROCESS=0\nSLURM_NPROCS=4\nSLURM_NNODES=1\nSLURM_SUBMIT_HOST=hkn1991.localdomain\nSLURM_JOB_ID=3468606\nSLURM_NODEID=0\nSLURM_CONF=/etc/slurm/slurm.conf\nSLURM_JOB_NAME=train_lam_1e-4\nSLURM_NTASKS_PER_NODE=4\nSLURM_JOB_GID=502226\nSLURM_JOB_NODELIST=hkn0403\nGpuFreq=control_disabled\n2025-09-05 14:00:22.768561: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:2: CUDA error: Failed call to cuDeviceGet: CUDA_ERROR_INVALID_DEVICE: invalid device ordinal\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\n backend = _init_backend(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\n backend = registration.factory()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 528, in factory\n return xla_client.make_c_api_client(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\njaxlib._jax.XlaRuntimeError: INTERNAL: no supported devices found for platform CUDA\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py"", line 184, in \n num_devices = jax.device_count()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\n return int(get_backend(backend).device_count())\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\n return _get_backend_uncached(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\n bs = backends()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\n raise RuntimeError(err_msg)\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: no supported devices found for platform CUDA (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\n2025-09-05 14:00:22.777184: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:1: CUDA error: Failed call to cuDeviceGet: CUDA_ERROR_INVALID_DEVICE: invalid device ordinal\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\n backend = _init_backend(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\n backend = registration.factory()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 528, in factory\n return xla_client.make_c_api_client(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\njaxlib._jax.XlaRuntimeError: INTERNAL: no supported devices found for platform CUDA\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py"", line 184, in \n num_devices = jax.device_count()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\n return int(get_backend(backend).device_count())\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\n return _get_backend_uncached(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\n bs = backends()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\n raise RuntimeError(err_msg)\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: no supported devices found for platform CUDA (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\n2025-09-05 14:00:22.779382: W external/xla/xla/service/platform_util.cc:220] unable to create StreamExecutor for CUDA:3: CUDA error: Failed call to cuDeviceGet: CUDA_ERROR_INVALID_DEVICE: invalid device ordinal\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\n backend = _init_backend(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\n backend = registration.factory()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 528, in factory\n return xla_client.make_c_api_client(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\njaxlib._jax.XlaRuntimeError: INTERNAL: no supported devices found for platform CUDA\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py"", line 184, in \n num_devices = jax.device_count()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\n return int(get_backend(backend).device_count())\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\n return _get_backend_uncached(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\n bs = backends()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\n raise RuntimeError(err_msg)\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: no supported devices found for platform CUDA (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 742, in backends\n backend = _init_backend(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 828, in _init_backend\n backend = registration.factory()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 528, in factory\n return xla_client.make_c_api_client(\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jaxlib/xla_client.py"", line 153, in make_c_api_client\n return _xla.get_c_api_client(plugin_name, options, distributed_client)\njaxlib._jax.XlaRuntimeError: INTERNAL: Getting local topologies failed: Error 1: GetKeyValue() timed out with key: cuda:local_topology/cuda/1 and duration: 2m\n\nError 2: GetKeyValue() timed out with key: cuda:local_topology/cuda/2 and duration: 2m\n\nError 3: GetKeyValue() timed out with key: cuda:local_topology/cuda/3 and duration: 2m\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py"", line 184, in \n num_devices = jax.device_count()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 907, in device_count\n return int(get_backend(backend).device_count())\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 876, in get_backend\n return _get_backend_uncached(platform)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 855, in _get_backend_uncached\n bs = backends()\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/xla_bridge.py"", line 758, in backends\n raise RuntimeError(err_msg)\nRuntimeError: Unable to initialize backend 'cuda': INTERNAL: Getting local topologies failed: Error 1: GetKeyValue() timed out with key: cuda:local_topology/cuda/1 and duration: 2m\n\nError 2: GetKeyValue() timed out with key: cuda:local_topology/cuda/2 and duration: 2m\n\nError 3: GetKeyValue() timed out with key: cuda:local_topology/cuda/3 and duration: 2m (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\nsrun: error: hkn0403: tasks 1-3: Exited with exit code 1\nsrun: error: hkn0403: task 0: Exited with exit code 1\n\n============================= JOB FEEDBACK =============================\n\nJob ID: 3468606\nCluster: hk\nUser/Group: tum_cte0515/hk-project-p0023960\nAccount: hk-project-p0023960\nState: FAILED (exit code 1)\nPartition: dev_accelerated\nNodes: 1\nCores per node: 24\nNodelist: hkn0403\nCPU Utilized: 00:00:10\nCPU Efficiency: 0.28% of 01:00:24 core-walltime\nJob Wall-clock time: 00:02:31\nStarttime: Fri Sep 5 13:59:52 2025\nEndtime: Fri Sep 5 14:02:23 2025\nMemory Utilized: 1.47 GB (estimated maximum)\nMemory Efficiency: 0.00% of 0.00 MB (0.00 MB/node)\nEnergy Consumed: 61676 Joule / 17.1322222222222 Watthours\nAverage node power draw: 408.450331125828 Watt\n",log,tab +1482,2218119,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1483,2219592,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",614,0,"",shellscript,selection_mouse +1484,2220156,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",616,0,"",shellscript,selection_mouse +1485,2220178,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",615,0,"",shellscript,selection_command +1486,2220702,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",590,0,"",shellscript,selection_mouse +1487,2220718,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",589,0,"",shellscript,selection_command +1488,2221154,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",590,0,"",shellscript,selection_mouse +1489,2221155,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",589,0,"",shellscript,selection_command +1490,2221278,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",589,1,"4",shellscript,selection_mouse +1491,2221288,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",590,0,"",shellscript,selection_command +1492,2221420,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",589,1,"4",shellscript,selection_mouse +1493,2221421,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",581,9,"cuda/12.4",shellscript,selection_mouse +1494,2221421,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",579,11,"l/cuda/12.4",shellscript,selection_mouse +1495,2221421,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",578,12,"el/cuda/12.4",shellscript,selection_mouse +1496,2221422,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",576,14,"evel/cuda/12.4",shellscript,selection_mouse +1497,2221422,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",575,15,"devel/cuda/12.4",shellscript,selection_mouse +1498,2221422,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",573,17,"d devel/cuda/12.4",shellscript,selection_mouse +1499,2221461,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",572,18,"ad devel/cuda/12.4",shellscript,selection_mouse +1500,2221484,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",571,19,"oad devel/cuda/12.4",shellscript,selection_mouse +1501,2221536,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",569,21,"nload devel/cuda/12.4",shellscript,selection_mouse +1502,2221635,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",567,23," unload devel/cuda/12.4",shellscript,selection_mouse +1503,2221636,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",566,24,"e unload devel/cuda/12.4",shellscript,selection_mouse +1504,2221636,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",565,25,"le unload devel/cuda/12.4",shellscript,selection_mouse +1505,2221664,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",564,26,"ule unload devel/cuda/12.4",shellscript,selection_mouse +1506,2221753,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",563,27,"dule unload devel/cuda/12.4",shellscript,selection_mouse +1507,2221754,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",533,57,"dule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4",shellscript,selection_mouse +1508,2221757,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",532,58,"odule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4",shellscript,selection_mouse +1509,2222137,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",532,0,"",shellscript,selection_mouse +1510,2223114,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",112,0,"",shellscript,selection_mouse +1511,2226474,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1512,2230737,"TERMINAL",0,0,"dev",,terminal_command +1513,2230768,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1514,2232531,"TERMINAL",0,0,"am ""added val lo^C logic to tokenizer""",,terminal_command +1515,2232576,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1516,2234360,"TERMINAL",0,0,"salloc_node",,terminal_command +1517,2234411,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3468617\r\n",,terminal_output +1518,2234508,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +1519,2239128,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1520,2244942,"TERMINAL",0,0,"s",,terminal_output +1521,2245079,"TERMINAL",0,0,"ou",,terminal_output +1522,2245186,"TERMINAL",0,0,"r",,terminal_output +1523,2245317,"TERMINAL",0,0,"c",,terminal_output +1524,2245459,"TERMINAL",0,0,"e",,terminal_output +1525,2245597,"TERMINAL",0,0," ",,terminal_output +1526,2245850,"TERMINAL",0,0,".",,terminal_output +1527,2246045,"TERMINAL",0,0,"v",,terminal_output +1528,2246279,"TERMINAL",0,0,"\t",,terminal_output +1529,2247309,"TERMINAL",0,0,"b",,terminal_output +1530,2247478,"TERMINAL",0,0,"\t",,terminal_output +1531,2248103,"TERMINAL",0,0,"\t",,terminal_output +1532,2250701,"TERMINAL",0,0,"a",,terminal_output +1533,2250894,"TERMINAL",0,0,"c",,terminal_output +1534,2251214,"TERMINAL",0,0,"\t",,terminal_output +1535,2261558,"TERMINAL",0,0,"salloc: Nodes hkn0403 are ready for job\r\n",,terminal_output +1536,2261699,"TERMINAL",0,0,"source .v\tb\t\tac\t",,terminal_output +1537,2262380,"TERMINAL",0,0,"]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h[tum_cte0515@hkn0403 jasmine]$ source .venv/bin/activate",,terminal_output +1538,2266291,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1539,2271217,"TERMINAL",0,0,"s",,terminal_output +1540,2271390,"TERMINAL",0,0,"h",,terminal_output +1541,2271454,"TERMINAL",0,0," ",,terminal_output +1542,2271768,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +1543,2273168,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-lam-dev-$slurm_job_id \\r\n --tags lam coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train &\r\n\r\nchild_pid=$!\r\n\r\nwait $child_pid\r\n\r\n",,terminal_output +1544,2273323,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3453268\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1757073804\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757077404\r\nSLURM_PMI2_SRUN_PORT=36059\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468617\r\nSLURM_PTY_PORT=33607\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=43381\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468617\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=43381\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\n",,terminal_output +1545,2273464,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +1546,2280564,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1547,2281268,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +1548,2281877,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_140410-3468617\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-lam-dev-3468617\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/3468617\r\n",,terminal_output +1549,2285712,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['action_in', 'action_up', 'decoder', 'encoder', 'patch_up', 'vq']\r\nParameter counts:\r\n{'action_in': 768, 'action_up': 16896, 'decoder': 17474816, 'encoder': 17228832, 'patch_up': 393728, 'vq': 192, 'total': 35115232}\r\nStarting training from step 0...\r\n",,terminal_output +1550,2294711,"TERMINAL",0,0,"2025-09-05 14:04:24.346926: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:04:24.348081: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:04:24.348099: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:04:24.348564: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1551,2343528,"TERMINAL",0,0,"Step 0, loss: 0.29082542657852173\r\nStep 1, loss: 0.2048691064119339\r\nStep 2, loss: 0.19221149384975433\r\nStep 3, loss: 0.20193631947040558\r\nStep 4, loss: 0.1634262055158615\r\nStep 5, loss: 0.16224892437458038\r\nStep 6, loss: 0.1465519517660141\r\nStep 7, loss: 0.13969092071056366\r\nStep 8, loss: 0.13379880785942078\r\nStep 9, loss: 0.13682898879051208\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\n",,terminal_output +1552,2361660,"TERMINAL",0,0,"Step 10, validation loss: 0.11593025177717209\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py"", line 400, in \r\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4625, in concatenate\r\n arrays_out = [lax.concatenate(arrays_out[i:i+k], axis)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4625, in \r\n arrays_out = [lax.concatenate(arrays_out[i:i+k], axis)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 2006, in concatenate\r\n return concatenate_p.bind(*operands, dimension=dimension)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\nTypeError: Cannot concatenate arrays with shapes that differ in dimensions other than the one being concatenated: concatenating along dimension 1 for shapes (16, 64, 64, 3), (15, 64, 64, 3).\r\n",,terminal_output +1553,2363418,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-lam-dev-3468617 at: https://wandb.ai/instant-uv/jafar/runs/3468617\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_140410-3468617/logs\r\n",,terminal_output +1554,2364178,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +1555,2364462,"TERMINAL",0,0,"srun: error: hkn0403: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1556,2502183,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import create_dataloader_iterator\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(\n model: LatentActionModel, inputs: dict, training: bool = True\n) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n # --- Compute loss ---\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n gt_future_frames = gt[:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@nnx.jit\ndef train_step(\n lam: LatentActionModel,\n optimizer: nnx.Optimizer,\n inputs: dict,\n action_last_active: jax.Array,\n rng: jax.Array,\n) -> tuple[jax.Array, jax.Array, jax.Array, dict]:\n def loss_fn(\n model: LatentActionModel,\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n model.train()\n return lam_loss_fn(model, inputs, training=True)\n\n # --- Update model ---\n (loss, (recon, idx_counts, metrics)), grads = nnx.value_and_grad(\n loss_fn, has_aux=True\n )(lam)\n optimizer.update(grads)\n\n # --- Reset inactive latent actions ---\n codebook = lam.vq.codebook\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook.value\n )\n lam.vq.codebook.value = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return loss, recon, action_last_active, metrics\n\n@nnx.jit\ndef val_step(lam: LatentActionModel, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n lam.eval()\n (loss, (recon, _, metrics)) = lam_loss_fn(lam, inputs)\n return loss, recon, metrics\n\ndef calculate_validation_metrics(val_dataloader):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n print(f""Calculating validation metrics..."")\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(lam, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n # Count parameters\n _, params, _ = nnx.split(lam, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(lam, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n action_last_active = jnp.zeros(args.num_latents, dtype=jnp.int32)\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n inputs = dict(videos=videos, rng=_rng)\n rng, _rng = jax.random.split(rng)\n loss, recon, action_last_active, metrics = train_step(\n lam, optimizer, inputs, action_last_active, _rng\n )\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +1557,2502183,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13871,0,"",python,selection_command +1558,2513005,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13928,0,"",python,selection_mouse +1559,2513233,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13762,0,"",python,selection_command +1560,2516443,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13774,0,"",python,selection_mouse +1561,2516624,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12414,0,"",python,selection_command +1562,2524578,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12527,0,"",python,selection_mouse +1563,2525178,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12535,0,"",python,selection_mouse +1564,2525788,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12490,0,"",python,selection_mouse +1565,2535919,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",4702,0,"",python,selection_mouse +1566,2536061,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",4699,5,"recon",python,selection_mouse +1567,2564585,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",0,0,"",python,tab +1568,2564586,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13871,0,"",python,selection_command +1569,2568167,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,0,"",python,selection_mouse +1570,2568195,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13951,0,"",python,selection_command +1571,2568644,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,0,"",python,selection_mouse +1572,2568650,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13951,0,"",python,selection_command +1573,2568893,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13951,1,")",python,selection_mouse +1574,2568893,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13950,1,"1",python,selection_mouse +1575,2568894,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13945,6,"axis=1",python,selection_mouse +1576,2568894,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13942,9,"), axis=1",python,selection_mouse +1577,2568894,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13938,13,"_seq), axis=1",python,selection_mouse +1578,2568895,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13935,16,"con_seq), axis=1",python,selection_mouse +1579,2568895,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,0,"",python,selection_command +1580,2568952,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13929,23,"eq, recon_seq), axis=1)",python,selection_mouse +1581,2568968,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13918,34,"enate((gt_seq, recon_seq), axis=1)",python,selection_mouse +1582,2568983,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13912,40,"concatenate((gt_seq, recon_seq), axis=1)",python,selection_mouse +1583,2569011,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13908,44,"jnp.concatenate((gt_seq, recon_seq), axis=1)",python,selection_mouse +1584,2569043,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,34,"\n comparison_se",python,selection_mouse +1585,2569061,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,31,"\n comparison",python,selection_mouse +1586,2569148,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,29,"\n comparis",python,selection_mouse +1587,2569149,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,27,"\n compar",python,selection_mouse +1588,2569149,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,26,"\n compa",python,selection_mouse +1589,2569150,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,25,"\n comp",python,selection_mouse +1590,2569174,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,24,"\n com",python,selection_mouse +1591,2569193,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,23,"\n co",python,selection_mouse +1592,2569228,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,22,"\n c",python,selection_mouse +1593,2569238,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,21,"\n ",python,selection_mouse +1594,2569271,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,20,"\n ",python,selection_mouse +1595,2569289,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,19,"\n ",python,selection_mouse +1596,2569328,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,18,"\n ",python,selection_mouse +1597,2569374,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13952,17,"\n ",python,selection_mouse +1598,2570224,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13889,0,"",python,selection_mouse +1599,2570369,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13871,20," ",python,selection_mouse +1600,2570544,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13871,82," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n",python,selection_mouse +1601,2571384,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13889,0,"",python,selection_mouse +1602,2571562,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13871,20," ",python,selection_mouse +1603,2576155,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13929,0,"",python,selection_mouse +1604,2576325,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13925,6,"gt_seq",python,selection_mouse +1605,2578964,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13774,0,"",python,selection_mouse +1606,2579067,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13771,6,"inputs",python,selection_mouse +1607,2580227,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13939,0,"",python,selection_mouse +1608,2580843,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13938,0,"",python,selection_mouse +1609,2581010,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13933,9,"recon_seq",python,selection_mouse +1610,2582345,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13854,0,"",python,selection_mouse +1611,2582506,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13851,5,"recon",python,selection_mouse +1612,2619265,"train_tokenizer.py",0,0,"",python,tab +1613,2620937,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",0,0,"",python,tab +1614,2621920,"train_tokenizer.py",0,0,"",python,tab +1615,2622676,"train_lam.py",0,0,"",python,tab +1616,2643145,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1617,2645123,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1760,0,"",shellscript,selection_mouse +1618,2646321,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1792,0,"",shellscript,selection_mouse +1619,2646468,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1791,1,"\n",shellscript,selection_mouse +1620,2646501,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1775,17,"\nwait $child_pid\n",shellscript,selection_mouse +1621,2646627,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1771,21,"=$!\n\nwait $child_pid\n",shellscript,selection_mouse +1622,2646628,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1769,23,"id=$!\n\nwait $child_pid\n",shellscript,selection_mouse +1623,2646658,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1768,24,"pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +1624,2646668,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1767,25,"_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +1625,2646689,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1766,26,"d_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +1626,2646735,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1765,27,"ld_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +1627,2646783,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1764,28,"ild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +1628,2646822,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1763,29,"hild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +1629,2646873,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1762,30,"child_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +1630,2647297,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1762,30,"",shellscript,content +1631,2647434,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1761,1,"",shellscript,content +1632,2647725,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1760,1,"",shellscript,content +1633,2648133,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1759,1,"",shellscript,content +1634,2648554,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1758,1,"",shellscript,content +1635,2649999,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1636,2651231,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1754,0,"",shellscript,selection_mouse +1637,2652142,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1752,0,"",shellscript,selection_command +1638,2652943,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1737,17,"",shellscript,content +1639,2653311,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1735,0,"",shellscript,selection_command +1640,2653810,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1724,13,"",shellscript,content +1641,2654460,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1723,0,"",shellscript,selection_command +1642,2654810,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1723,1,"",shellscript,content +1643,2655441,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1721,0,"",shellscript,selection_command +1644,2656643,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1722,0,"",shellscript,selection_command +1645,2656803,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1721,1,"",shellscript,content +1646,2656877,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1720,1,"",shellscript,content +1647,2657255,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1719,0,"",shellscript,selection_command +1648,2659801,"models/lam.py",0,0,"",python,tab +1649,2716870,"models/lam.py",2257,0,"",python,selection_mouse +1650,2716890,"models/lam.py",2256,0,"",python,selection_command +1651,2720065,"TERMINAL",0,0,"",,terminal_focus +1652,2722202,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command +1653,2722208,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1654,2743587,"TERMINAL",0,0,"srun",,terminal_focus +1655,2753617,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",0,0,"",python,tab +1656,2753618,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13871,0,"",python,selection_command +1657,2755446,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13928,0,"",python,selection_mouse +1658,2755618,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13925,6,"gt_seq",python,selection_mouse +1659,2806228,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14038,0,"",python,selection_mouse +1660,2806749,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14098,0,"",python,selection_mouse +1661,2806758,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14097,0,"",python,selection_command +1662,2818094,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13439,0,"",python,selection_mouse +1663,2820360,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13515,0,"",python,selection_command +1664,2820524,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13557,0,"",python,selection_command +1665,2820683,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13607,0,"",python,selection_command +1666,2820833,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13644,0,"",python,selection_command +1667,2821336,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13676,0,"",python,selection_command +1668,2821367,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13716,0,"",python,selection_command +1669,2821426,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13772,0,"",python,selection_command +1670,2821453,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13849,0,"",python,selection_command +1671,2821473,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13901,0,"",python,selection_command +1672,2821494,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13983,0,"",python,selection_command +1673,2821593,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14038,0,"",python,selection_command +1674,2821742,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14097,0,"",python,selection_command +1675,2822152,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14098,0,"\n if args.val_data_dir and step % args.val_interval == 0:",python,content +1676,2822197,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14119,0,"",python,selection_command +1677,2822776,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14195,0,"",python,selection_command +1678,2823106,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14119,0,"",python,selection_command +1679,2824096,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14195,0,"",python,selection_command +1680,2824769,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14175,86," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0",python,selection_command +1681,2824968,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14175,146," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)",python,selection_command +1682,2825134,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14175,232," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)",python,selection_command +1683,2825248,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14175,291," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(",python,selection_command +1684,2825378,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14175,364," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""",python,selection_command +1685,2825531,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14175,386," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )",python,selection_command +1686,2826036,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14195,0,"",python,selection_command +1687,2826279,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14560,0," ",python,content +1688,2826279,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14491,0," ",python,content +1689,2826279,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14428,0," ",python,content +1690,2826279,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14342,0," ",python,content +1691,2826279,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14282,0," ",python,content +1692,2826279,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14195,0," ",python,content +1693,2826876,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14198,0,"",python,selection_command +1694,2827342,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14289,0,"",python,selection_command +1695,2827527,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14353,0,"",python,selection_command +1696,2827694,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14443,0,"",python,selection_command +1697,2827846,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14506,0,"",python,selection_command +1698,2828043,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14583,0,"",python,selection_command +1699,2836964,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15546,0,"",python,selection_mouse +1700,2836976,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15545,0,"",python,selection_command +1701,2837644,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15332,0,"",python,selection_mouse +1702,2838157,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15217,0,"",python,selection_mouse +1703,2838197,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15216,0,"",python,selection_command +1704,2841490,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15247,0,"",python,selection_command +1705,2842437,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15218,77," val_image=wandb.Image(np.asarray(gt_seq_val[0])),",python,selection_command +1706,2843058,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15247,0,"",python,selection_command +1707,2843259,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15325,0,"",python,selection_command +1708,2843399,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15406,0,"",python,selection_command +1709,2843538,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15465,0,"",python,selection_command +1710,2843684,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15545,0,"",python,selection_command +1711,2843846,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15571,0,"",python,selection_command +1712,2844896,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15572,0,"\n ",python,content +1713,2845304,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15597,0,"i",python,content +1714,2845305,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15598,0,"",python,selection_keyboard +1715,2845394,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15598,0,"f",python,content +1716,2845395,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15599,0,"",python,selection_keyboard +1717,2845523,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15599,0," ",python,content +1718,2845527,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15600,0,"",python,selection_keyboard +1719,2846201,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15599,1,"",python,content +1720,2846480,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15598,1,"",python,content +1721,2846820,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15597,1,"",python,content +1722,2847076,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15597,0,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",python,content +1723,2848327,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15597,55,"",python,content +1724,2848775,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15596,0,"",python,selection_command +1725,2848969,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15597,0,"\n if args.val_data_dir and step % args.val_interval == 0:",python,content +1726,2848991,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15618,0,"",python,selection_command +1727,2850340,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15618,0," ",python,content +1728,2850906,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15621,0,"",python,selection_command +1729,2851236,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15596,0,"",python,selection_command +1730,2851575,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15573,25,"",python,content +1731,2851617,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15597,0,"",python,selection_command +1732,2852064,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15652,0,"",python,selection_command +1733,2852495,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15652,0,"\n ",python,content +1734,2854957,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15681,0,"l",python,content +1735,2854958,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15682,0,"",python,selection_keyboard +1736,2855137,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15682,0,"o",python,content +1737,2855138,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15683,0,"",python,selection_keyboard +1738,2855260,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15683,0,"g",python,content +1739,2855261,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15684,0,"",python,selection_keyboard +1740,2856280,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15684,0,"_",python,content +1741,2856281,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15685,0,"",python,selection_keyboard +1742,2856559,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15685,0,"i",python,content +1743,2856560,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15686,0,"",python,selection_keyboard +1744,2856657,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15686,0,"m",python,content +1745,2856658,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15687,0,"",python,selection_keyboard +1746,2857381,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15687,0,"a",python,content +1747,2857383,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15688,0,"",python,selection_keyboard +1748,2857477,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15688,0,"g",python,content +1749,2857478,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15689,0,"",python,selection_keyboard +1750,2857593,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15689,0,"e",python,content +1751,2857594,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15690,0,"",python,selection_keyboard +1752,2857750,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15690,0,"s",python,content +1753,2857751,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15691,0,"",python,selection_keyboard +1754,2858207,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15691,0,".",python,content +1755,2858207,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15692,0,"",python,selection_keyboard +1756,2859000,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15692,0,"u",python,content +1757,2859001,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15693,0,"",python,selection_keyboard +1758,2859186,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15693,0,"p",python,content +1759,2859187,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15694,0,"",python,selection_keyboard +1760,2859321,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15694,0,"d",python,content +1761,2859322,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15695,0,"",python,selection_keyboard +1762,2859539,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15695,0,"a",python,content +1763,2859540,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15696,0,"",python,selection_keyboard +1764,2859877,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15696,0,"t",python,content +1765,2859879,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15697,0,"",python,selection_keyboard +1766,2860030,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15697,0,"e",python,content +1767,2860031,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15698,0,"",python,selection_keyboard +1768,2860619,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15698,0,"()",python,content +1769,2860620,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15699,0,"",python,selection_keyboard +1770,2861762,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15700,0,"",python,selection_command +1771,2864086,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15699,0,"",python,selection_mouse +1772,2864411,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15699,0,"\n \n ",python,content +1773,2865507,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15546,0,"",python,selection_mouse +1774,2866145,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15545,0,"",python,selection_command +1775,2867561,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15516,30," ),",python,selection_command +1776,2867797,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15436,110," np.asarray(val_comparison_seq.astype(np.uint8))\n ),",python,selection_command +1777,2867942,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15377,169," val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),",python,selection_command +1778,2868107,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15296,250," val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),",python,selection_command +1779,2868211,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15218,328," val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),",python,selection_command +1780,2869328,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15187,359," ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),",python,selection_command +1781,2870165,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15218,328," val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),",python,selection_command +1782,2872010,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15700,32,"",python,content +1783,2872010,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15218,329,"",python,content +1784,2872076,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15242,0,"",python,selection_command +1785,2872856,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15268,0,"",python,selection_command +1786,2873042,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15348,0,"",python,selection_command +1787,2873210,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15371,0,"",python,selection_command +1788,2874640,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15396,0,"",python,selection_command +1789,2875032,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15371,0,"",python,selection_command +1790,2875242,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15348,0,"",python,selection_command +1791,2875550,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15371,0,"",python,selection_command +1792,2875747,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15371,0,"\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),",python,content +1793,2875794,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15400,0,"",python,selection_command +1794,2876355,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15371,0,"",python,selection_command +1795,2877606,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15371,1,"",python,content +1796,2877626,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15399,0,"",python,selection_command +1797,2879542,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15352,0,"",python,selection_command +1798,2880028,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15370,0,"",python,selection_command +1799,2881530,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15370,0,"d",python,content +1800,2881532,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15371,0,"",python,selection_keyboard +1801,2881866,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15371,0,"i",python,content +1802,2881868,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15372,0,"",python,selection_keyboard +1803,2881975,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15372,0,"c",python,content +1804,2881976,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15373,0,"",python,selection_keyboard +1805,2882170,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15373,0,"t",python,content +1806,2882171,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15374,0,"",python,selection_keyboard +1807,2882846,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15374,0,"()",python,content +1808,2882847,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15375,0,"",python,selection_keyboard +1809,2883193,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15374,0,"",python,selection_command +1810,2883387,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15427,0,"",python,selection_command +1811,2883690,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15505,0,"",python,selection_command +1812,2883824,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15586,0,"",python,selection_command +1813,2883993,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15645,0,"",python,selection_command +1814,2884171,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15704,0,"",python,selection_command +1815,2885481,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15705,0,"",python,selection_command +1816,2887161,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15704,1,"",python,content +1817,2887608,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15704,0,")",python,content +1818,2887609,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15705,0,"",python,selection_keyboard +1819,2888008,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15704,0,"",python,selection_command +1820,2889311,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15624,0,"",python,selection_command +1821,2889800,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15704,0,"",python,selection_command +1822,2890472,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15675,30," ))",python,selection_command +1823,2890660,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15595,110," np.asarray(val_comparison_seq.astype(np.uint8))\n ))",python,selection_command +1824,2890793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15536,169," val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ))",python,selection_command +1825,2891004,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15455,250," val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ))",python,selection_command +1826,2891092,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15377,328," val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ))",python,selection_command +1827,2891385,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15405,0,"",python,selection_command +1828,2891544,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15703,0," ",python,content +1829,2891545,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15627,0," ",python,content +1830,2891545,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15564,0," ",python,content +1831,2891545,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15483,0," ",python,content +1832,2891545,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15405,0," ",python,content +1833,2891934,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15408,0,"",python,selection_command +1834,2892143,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15490,0,"",python,selection_command +1835,2892512,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15575,0,"",python,selection_command +1836,2892716,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15638,0,"",python,selection_command +1837,2892859,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15722,0,"",python,selection_command +1838,2893180,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15638,0,"",python,selection_command +1839,2893355,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15575,0,"",python,selection_command +1840,2893523,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15490,0,"",python,selection_command +1841,2893660,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15408,0,"",python,selection_command +1842,2893793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15355,0,"",python,selection_command +1843,2894139,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15376,0,"",python,selection_command +1844,2894318,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15375,1,"",python,content +1845,2894703,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15374,0,"",python,selection_command +1846,2894890,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15426,0,"",python,selection_command +1847,2895243,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15374,0,"",python,selection_command +1848,2895693,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15373,0,"",python,selection_command +1849,2895822,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15372,0,"",python,selection_command +1850,2895952,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15371,0,"",python,selection_command +1851,2896082,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15370,0,"",python,selection_command +1852,2897523,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15370,0,"\n ",python,content +1853,2897954,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15402,0,"",python,selection_command +1854,2898126,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15440,0,"",python,selection_command +1855,2898997,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15409,81," val_image=wandb.Image(np.asarray(gt_seq_val[0])),",python,selection_command +1856,2899209,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15409,166," val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),",python,selection_command +1857,2899343,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15409,229," val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(",python,selection_command +1858,2899459,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15409,313," val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))",python,selection_command +1859,2900002,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15441,0,"",python,selection_command +1860,2900428,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15675,0," ",python,content +1861,2900428,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15608,0," ",python,content +1862,2900428,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15523,0," ",python,content +1863,2900428,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15441,0," ",python,content +1864,2900699,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15444,0,"",python,selection_command +1865,2900882,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15530,0,"",python,selection_command +1866,2901084,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15619,0,"",python,selection_command +1867,2901239,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15686,0,"",python,selection_command +1868,2901395,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15772,0,"",python,selection_command +1869,2902275,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15771,1,"",python,content +1870,2903795,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15770,0,"",python,selection_command +1871,2903943,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15682,0,"",python,selection_command +1872,2904610,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15770,0,"",python,selection_command +1873,2904774,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15801,0,"",python,selection_command +1874,2911136,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15772,0,"",python,selection_mouse +1875,2911139,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15771,0,"",python,selection_command +1876,2912251,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15683,0,"",python,selection_command +1877,2913479,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15738,0,"",python,selection_command +1878,2913744,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15738,0,"\n ",python,content +1879,2914928,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15739,40," )",python,content +1880,2914929,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15776,0,"",python,selection_keyboard +1881,2915200,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15775,0,"",python,selection_command +1882,2928645,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15013,0,"",python,selection_mouse +1883,2929233,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15144,0,"",python,selection_mouse +1884,2929752,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15154,0,"",python,selection_mouse +1885,2930386,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15243,0,"",python,selection_mouse +1886,2930415,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15242,0,"",python,selection_command +1887,2933443,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +1888,2934038,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-lam-dev-$slurm_job_id \\r\n --tags lam coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train\r\n\r\n",,terminal_output +1889,2934161,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3453268\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1757073804\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757077404\r\nSLURM_PMI2_SRUN_PORT=36059\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468617\r\nSLURM_PTY_PORT=33607\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=43381\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468617\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=43381\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\n",,terminal_output +1890,2934305,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +1891,2935112,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",0,0,"",python,tab +1892,2937812,"train_lam.py",0,0,"",python,tab +1893,2941591,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1894,2941913,"train_lam.py",3451,0,"",python,selection_mouse +1895,2942068,"train_lam.py",3447,7,"outputs",python,selection_mouse +1896,2942581,"TERMINAL",0,0,"wandb: creating run\r\nwandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_141511-3468617\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Resuming run coinrun-lam-dev-3468617\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/3468617\r\n",,terminal_output +1897,2946437,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['action_in', 'action_up', 'decoder', 'encoder', 'patch_up', 'vq']\r\nParameter counts:\r\n{'action_in': 768, 'action_up': 16896, 'decoder': 17474816, 'encoder': 17228832, 'patch_up': 393728, 'vq': 192, 'total': 35115232}\r\nStarting training from step 0...\r\n",,terminal_output +1898,2955252,"TERMINAL",0,0,"2025-09-05 14:15:24.946120: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:15:24.947256: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:15:24.947274: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:15:24.947732: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1899,2978344,"train_lam.py",4906,0,"",python,selection_mouse +1900,2978362,"train_lam.py",4905,0,"",python,selection_command +1901,2985793,"train_lam.py",4905,0,",",python,content +1902,2985795,"train_lam.py",4906,0,"",python,selection_keyboard +1903,2985918,"train_lam.py",4906,0," ",python,content +1904,2985918,"train_lam.py",4907,0,"",python,selection_keyboard +1905,2986110,"train_lam.py",4907,0,"t",python,content +1906,2986110,"train_lam.py",4908,0,"",python,selection_keyboard +1907,2989269,"train_lam.py",4908,0,"r",python,content +1908,2989270,"train_lam.py",4909,0,"",python,selection_keyboard +1909,2990396,"train_lam.py",4909,0,"a",python,content +1910,2990396,"train_lam.py",4910,0,"",python,selection_keyboard +1911,2990397,"train_lam.py",4910,0,"i",python,content +1912,2990397,"train_lam.py",4911,0,"",python,selection_keyboard +1913,2990447,"train_lam.py",4911,0,"n",python,content +1914,2990449,"train_lam.py",4912,0,"",python,selection_keyboard +1915,2991388,"train_lam.py",4912,0,"i",python,content +1916,2991389,"train_lam.py",4913,0,"",python,selection_keyboard +1917,2991488,"train_lam.py",4913,0,"n",python,content +1918,2991489,"train_lam.py",4914,0,"",python,selection_keyboard +1919,2991610,"train_lam.py",4914,0,"g",python,content +1920,2991611,"train_lam.py",4915,0,"",python,selection_keyboard +1921,2992178,"train_lam.py",4907,8,"training=",python,content +1922,2992659,"train_lam.py",4916,0,"F",python,content +1923,2992660,"train_lam.py",4917,0,"",python,selection_keyboard +1924,2992838,"train_lam.py",4917,0,"a",python,content +1925,2992840,"train_lam.py",4918,0,"",python,selection_keyboard +1926,2992986,"train_lam.py",4918,0,"l",python,content +1927,2992986,"train_lam.py",4919,0,"",python,selection_keyboard +1928,2993011,"train_lam.py",4919,0,"s",python,content +1929,2993012,"train_lam.py",4920,0,"",python,selection_keyboard +1930,2993158,"train_lam.py",4920,0,"e",python,content +1931,2993159,"train_lam.py",4921,0,"",python,selection_keyboard +1932,2993676,"train_lam.py",4920,0,"",python,selection_command +1933,3001007,"TERMINAL",0,0,"Step 0, loss: 0.29082542657852173\r\nStep 1, loss: 0.2048691064119339\r\nStep 2, loss: 0.19221149384975433\r\nStep 3, loss: 0.20193631947040558\r\nStep 4, loss: 0.1634262055158615\r\nStep 5, loss: 0.16224892437458038\r\nStep 6, loss: 0.1465519517660141\r\nStep 7, loss: 0.13969092071056366\r\nStep 8, loss: 0.13379880785942078\r\nStep 9, loss: 0.13682898879051208\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\n",,terminal_output +1934,3012504,"train_lam.py",0,0,"",python,tab +1935,3017538,"TERMINAL",0,0,"Step 10, validation loss: 0.11593025177717209\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py"", line 400, in \r\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4625, in concatenate\r\n arrays_out = [lax.concatenate(arrays_out[i:i+k], axis)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4625, in \r\n arrays_out = [lax.concatenate(arrays_out[i:i+k], axis)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 2006, in concatenate\r\n return concatenate_p.bind(*operands, dimension=dimension)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\nTypeError: Cannot concatenate arrays with shapes that differ in dimensions other than the one being concatenated: concatenating along dimension 1 for shapes (16, 64, 64, 3), (15, 64, 64, 3).\r\n",,terminal_output +1936,3019368,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-lam-dev-3468617 at: https://wandb.ai/instant-uv/jafar/runs/3468617\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_141511-3468617/logs\r\n",,terminal_output +1937,3020049,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +1938,3020372,"TERMINAL",0,0,"srun: error: hkn0403: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1939,3067007,"train_tokenizer.py",0,0,"",python,tab +1940,3113585,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1941,3119991,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1267,0,"",shellscript,selection_mouse +1942,3121451,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1271,0,"",shellscript,selection_mouse +1943,3121976,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1244,0,"",shellscript,selection_mouse +1944,3122072,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1243,0,"",shellscript,selection_command +1945,3122893,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1224,31,"",shellscript,content +1946,3122936,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1228,0,"",shellscript,selection_command +1947,3124121,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1948,3124986,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1213,0,"",shellscript,selection_mouse +1949,3125694,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1198,31,"",shellscript,content +1950,3125727,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1202,0,"",shellscript,selection_command +1951,3174009,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",0,0,"",python,tab +1952,3174009,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13887,0,"",python,selection_command +1953,3180599,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13918,0,"",python,selection_mouse +1954,3182702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14222,0,"",python,selection_mouse +1955,3185490,"train_lam.py",0,0,"",python,tab +1956,3187938,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1957,3188977,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1337,0,"",shellscript,selection_mouse +1958,3189793,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1484,0,"",shellscript,selection_mouse +1959,3190388,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1554,0,"",shellscript,selection_mouse +1960,3191153,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1576,0,"",shellscript,selection_mouse +1961,3193371,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1561,44,"",shellscript,content +1962,3193411,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1565,0,"",shellscript,selection_command +1963,3196831,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +1964,3197573,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-lam-dev-$slurm_job_id \\r\n --tags lam coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train\r\n\r\n",,terminal_output +1965,3197710,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3453268\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1757073804\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757077404\r\nSLURM_PMI2_SRUN_PORT=36059\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468617\r\nSLURM_PTY_PORT=33607\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=43381\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468617\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=43381\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\n",,terminal_output +1966,3197819,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +1967,3201374,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1968,3205002,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1969,3205858,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_141934-s9w1z3xb\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-lam-dev-3468617\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/s9w1z3xb\r\n",,terminal_output +1970,3207780,"train_lam.py",0,0,"",python,tab +1971,3209728,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1972,3209877,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['action_in', 'action_up', 'decoder', 'encoder', 'patch_up', 'vq']\r\nParameter counts:\r\n{'action_in': 768, 'action_up': 16896, 'decoder': 17474816, 'encoder': 17228832, 'patch_up': 393728, 'vq': 192, 'total': 35115232}\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py"", line 331, in \r\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/utils/dataloader.py"", line 170, in create_dataloader_iterator\r\n for x in os.listdir(data_dir)\r\nFileNotFoundError: [Errno 2] No such file or directory: ''\r\n",,terminal_output +1973,3210418,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-lam-dev-3468617 at: https://wandb.ai/instant-uv/jafar/runs/s9w1z3xb\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_141934-s9w1z3xb/logs\r\n",,terminal_output +1974,3210999,"TERMINAL",0,0,"srun: error: hkn0403: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +1975,3217336,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +1976,3218261,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1561,0," --val_data_dir $array_records_dir_val \\n",shellscript,content +1977,3218325,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1565,0,"",shellscript,selection_command +1978,3225566,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1561,44,"",shellscript,content +1979,3225609,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1565,0,"",shellscript,selection_command +1980,3225929,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1589,0,"",shellscript,selection_command +1981,3226109,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1610,0,"",shellscript,selection_command +1982,3226259,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1646,0,"",shellscript,selection_command +1983,3227680,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",0,0,"",python,tab +1984,3229093,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14114,0,"",python,selection_mouse +1985,3229107,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14113,0,"",python,selection_command +1986,3234027,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14143,0,"",python,selection_command +1987,3235060,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13453,0,"",python,selection_command +1988,3235442,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12836,0,"",python,selection_command +1989,3236680,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10784,0,"",python,selection_command +1990,3238227,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10666,0,"",python,selection_command +1991,3239837,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10614,0,"\n ",python,content +1992,3240969,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10615,4,"",python,content +1993,3241520,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10615,1,"",python,content +1994,3241549,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10619,0,"",python,selection_command +1995,3242027,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10614,0,"\n ",python,content +1996,3242911,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10619,0,"d",python,content +1997,3242912,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10620,0,"",python,selection_keyboard +1998,3243322,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10619,1,"",python,content +1999,3243575,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10615,4,"",python,content +2000,3243769,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10614,1,"",python,content +2001,3244016,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10613,0,"",python,selection_command +2002,3244258,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10690,0,"",python,selection_command +2003,3244642,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10732,0,"\n ",python,content +2004,3245505,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10737,0,"i",python,content +2005,3245506,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10738,0,"",python,selection_keyboard +2006,3245556,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10738,0,"f",python,content +2007,3245557,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10739,0,"",python,selection_keyboard +2008,3245759,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10739,0," ",python,content +2009,3245760,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10740,0,"",python,selection_keyboard +2010,3246772,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10740,0,"a",python,content +2011,3246773,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10741,0,"",python,selection_keyboard +2012,3246974,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10741,0,"r",python,content +2013,3246975,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10742,0,"",python,selection_keyboard +2014,3247175,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10742,0,"g",python,content +2015,3247176,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10743,0,"",python,selection_keyboard +2016,3247304,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10743,0,"s",python,content +2017,3247306,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10744,0,"",python,selection_keyboard +2018,3247484,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10744,0,".",python,content +2019,3247485,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10745,0,"",python,selection_keyboard +2020,3248607,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10745,0,"v",python,content +2021,3248607,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10746,0,"",python,selection_keyboard +2022,3248773,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10746,0,"a",python,content +2023,3248773,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10747,0,"",python,selection_keyboard +2024,3248821,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10747,0,"l",python,content +2025,3248822,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10748,0,"",python,selection_keyboard +2026,3249139,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10748,0,"_",python,content +2027,3249139,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10749,0,"",python,selection_keyboard +2028,3249352,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10749,0,"d",python,content +2029,3249352,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10750,0,"",python,selection_keyboard +2030,3249505,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10750,0,"a",python,content +2031,3249506,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10751,0,"",python,selection_keyboard +2032,3249631,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10751,0,"t",python,content +2033,3249631,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10752,0,"",python,selection_keyboard +2034,3249718,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10752,0,"a",python,content +2035,3249719,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10753,0,"",python,selection_keyboard +2036,3251563,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10745,8,"val_data_dir",python,content +2037,3252489,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10757,0,":",python,content +2038,3252490,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10758,0,"",python,selection_keyboard +2039,3252705,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10757,0,"",python,selection_command +2040,3253308,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10783,0,"",python,selection_command +2041,3253601,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10763,0,"",python,selection_command +2042,3253940,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10763,0," ",python,content +2043,3254304,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10766,0,"",python,selection_command +2044,3255100,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10745,0,"",python,selection_command +2045,3255580,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",1888,0,"",python,selection_command +2046,3256959,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10745,0,"",python,selection_command +2047,3260528,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10767,0,"",python,selection_command +2048,3260983,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10814,0,"",python,selection_command +2049,3261887,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10767,0,"",python,selection_command +2050,3262039,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10745,0,"",python,selection_command +2051,3262368,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10163,0,"",python,selection_command +2052,3263083,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10128,0,"",python,selection_command +2053,3263438,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9899,0,"",python,selection_command +2054,3265552,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9712,0,"",python,selection_command +2055,3266677,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9899,0,"",python,selection_command +2056,3267176,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9873,0,"",python,selection_command +2057,3267456,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9862,0,"",python,selection_command +2058,3267509,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9785,0,"",python,selection_command +2059,3267773,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9744,0,"",python,selection_command +2060,3267942,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9712,0,"",python,selection_command +2061,3268064,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9686,0,"",python,selection_command +2062,3268260,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9675,0,"",python,selection_command +2063,3268718,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9676,0,"\n ",python,content +2064,3269617,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9681,0,"i",python,content +2065,3269618,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9682,0,"",python,selection_keyboard +2066,3269734,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9682,0,"f",python,content +2067,3269735,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9683,0,"",python,selection_keyboard +2068,3270042,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9683,0," ",python,content +2069,3270043,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9684,0,"",python,selection_keyboard +2070,3270511,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9684,0,"a",python,content +2071,3270512,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9685,0,"",python,selection_keyboard +2072,3270677,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9685,0,"r",python,content +2073,3270678,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9686,0,"",python,selection_keyboard +2074,3270824,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9686,0,"g",python,content +2075,3270825,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9687,0,"",python,selection_keyboard +2076,3270959,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9687,0,"s",python,content +2077,3270960,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9688,0,"",python,selection_keyboard +2078,3271078,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9688,0,".",python,content +2079,3271079,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9689,0,"",python,selection_keyboard +2080,3271272,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9689,0,"v",python,content +2081,3271273,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9690,0,"",python,selection_keyboard +2082,3271409,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9690,0,"a",python,content +2083,3271410,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9691,0,"",python,selection_keyboard +2084,3271509,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9691,0,"l",python,content +2085,3271510,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9692,0,"",python,selection_keyboard +2086,3271806,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9692,0,"_",python,content +2087,3271808,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9693,0,"",python,selection_keyboard +2088,3271940,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9693,0,"d",python,content +2089,3271941,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9694,0,"",python,selection_keyboard +2090,3272112,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9694,0,"a",python,content +2091,3272114,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9695,0,"",python,selection_keyboard +2092,3272225,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9695,0,"t",python,content +2093,3272226,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9696,0,"",python,selection_keyboard +2094,3272310,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9696,0,"a",python,content +2095,3272312,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9697,0,"",python,selection_keyboard +2096,3272578,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9697,0,"_",python,content +2097,3272579,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9698,0,"",python,selection_keyboard +2098,3272774,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9698,0,"d",python,content +2099,3272775,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9699,0,"",python,selection_keyboard +2100,3272910,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9699,0,"i",python,content +2101,3272912,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9700,0,"",python,selection_keyboard +2102,3272987,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9700,0,"r",python,content +2103,3272989,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9701,0,"",python,selection_keyboard +2104,3273506,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9700,0,"",python,selection_command +2105,3274423,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9701,0,"",python,selection_command +2106,3274926,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9701,0,":",python,content +2107,3274927,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9702,0,"",python,selection_keyboard +2108,3275062,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9701,0,"",python,selection_command +2109,3275824,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9727,0,"",python,selection_command +2110,3276130,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9703,25," handler_registry.add(",python,selection_command +2111,3276343,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9703,57," handler_registry.add(\n ""val_dataloader_state"",",python,selection_command +2112,3276844,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9703,98," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,",python,selection_command +2113,3276884,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9703,180," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),",python,selection_command +2114,3276929,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9703,186," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_command +2115,3276946,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9703,212," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(",python,selection_command +2116,3277004,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9703,244," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",",python,selection_command +2117,3277054,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9703,288," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,",python,selection_command +2118,3277149,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9703,370," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),",python,selection_command +2119,3277326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9703,376," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_command +2120,3277564,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9707,0,"",python,selection_command +2121,3277757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10078,0," ",python,content +2122,3277757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10000,0," ",python,content +2123,3277757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9956,0," ",python,content +2124,3277757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9924,0," ",python,content +2125,3277757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9894,0," ",python,content +2126,3277757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9888,0," ",python,content +2127,3277757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9810,0," ",python,content +2128,3277757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9769,0," ",python,content +2129,3277757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9737,0," ",python,content +2130,3277757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9707,0," ",python,content +2131,3278090,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9710,0,"",python,selection_command +2132,3280450,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9746,0,"",python,selection_command +2133,3280743,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9953,0,"",python,selection_command +2134,3281094,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10194,0,"",python,selection_command +2135,3281839,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9953,0,"",python,selection_command +2136,3281986,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9746,0,"",python,selection_command +2137,3282188,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9689,0,"",python,selection_command +2138,3282633,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",6060,0,"",python,selection_command +2139,3284884,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9689,0,"",python,selection_command +2140,3285022,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9746,0,"",python,selection_command +2141,3285575,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",9953,0,"",python,selection_command +2142,3285876,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10194,0,"",python,selection_command +2143,3286863,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10229,0,"",python,selection_command +2144,3287244,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10811,0,"",python,selection_command +2145,3288292,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10833,0,"",python,selection_command +2146,3288559,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",10880,0,"",python,selection_command +2147,3289058,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11043,0,"",python,selection_command +2148,3290276,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11478,0,"",python,selection_command +2149,3305765,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11377,0,"",python,selection_command +2150,3305957,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11283,0,"",python,selection_command +2151,3306458,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11246,0,"",python,selection_command +2152,3306496,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11200,0,"",python,selection_command +2153,3306542,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11153,0,"",python,selection_command +2154,3306622,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11088,0,"",python,selection_command +2155,3307183,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11153,0,"",python,selection_command +2156,3307504,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11088,0,"",python,selection_command +2157,3307856,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11136,0,"\n ",python,content +2158,3309307,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11145,0,"c",python,content +2159,3309307,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11146,0,"",python,selection_keyboard +2160,3309409,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11146,0,"o",python,content +2161,3309410,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11147,0,"",python,selection_keyboard +2162,3309517,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11147,0,"m",python,content +2163,3309518,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11148,0,"",python,selection_keyboard +2164,3309710,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11148,0,"p",python,content +2165,3309711,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11149,0,"",python,selection_keyboard +2166,3309828,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11149,0,"o",python,content +2167,3309830,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11150,0,"",python,selection_keyboard +2168,3310044,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11150,0,"s",python,content +2169,3310045,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11151,0,"",python,selection_keyboard +2170,3310301,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11151,0,"i",python,content +2171,3310302,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11152,0,"",python,selection_keyboard +2172,3310409,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11152,0,"t",python,content +2173,3310409,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11153,0,"",python,selection_keyboard +2174,3310660,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11153,0,"e",python,content +2175,3310661,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11154,0,"",python,selection_keyboard +2176,3310904,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11154,0,"_",python,content +2177,3310904,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11155,0,"",python,selection_keyboard +2178,3311192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11155,0,"a",python,content +2179,3311193,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11156,0,"",python,selection_keyboard +2180,3311926,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11145,11,"",python,content +2181,3312187,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11137,8,"",python,content +2182,3312776,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11137,0," ",python,content +2183,3313239,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11141,0," ",python,content +2184,3313826,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11145,0,"r",python,content +2185,3313827,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11146,0,"",python,selection_keyboard +2186,3314026,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11146,0,"e",python,content +2187,3314027,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11147,0,"",python,selection_keyboard +2188,3314472,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11147,0,"s",python,content +2189,3314473,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11148,0,"",python,selection_keyboard +2190,3314569,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11148,0,"t",python,content +2191,3314570,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11149,0,"",python,selection_keyboard +2192,3314764,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11149,0,"o",python,content +2193,3314765,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11150,0,"",python,selection_keyboard +2194,3314854,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11150,0,"r",python,content +2195,3314854,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11151,0,"",python,selection_keyboard +2196,3314987,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11151,0,"e",python,content +2197,3314988,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11152,0,"",python,selection_keyboard +2198,3315172,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11152,0,"_",python,content +2199,3315173,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11153,0,"",python,selection_keyboard +2200,3315501,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11153,0,"a",python,content +2201,3315502,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11154,0,"",python,selection_keyboard +2202,3315741,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11154,0,"r",python,content +2203,3315741,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11155,0,"",python,selection_keyboard +2204,3315900,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11155,0,"g",python,content +2205,3315901,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11156,0,"",python,selection_keyboard +2206,3316054,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11156,0,"s",python,content +2207,3316055,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11157,0,"",python,selection_keyboard +2208,3319434,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11157,0," ",python,content +2209,3319436,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11158,0,"",python,selection_keyboard +2210,3319625,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11158,0,"=",python,content +2211,3319626,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11159,0,"",python,selection_keyboard +2212,3319721,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11159,0," ",python,content +2213,3319721,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11160,0,"",python,selection_keyboard +2214,3321584,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11159,0,"",python,selection_command +2215,3322159,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11603,0,"",python,selection_mouse +2216,3322160,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11602,0,"",python,selection_command +2217,3322403,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11602,1,",",python,selection_mouse +2218,3322409,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11603,0,"",python,selection_command +2219,3322508,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11501,102," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2220,3322509,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11500,103," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2221,3322509,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11499,104," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2222,3322544,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11398,205," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2223,3322643,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11304,299," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2224,3322821,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11303,300," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2225,3322821,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11265,338," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2226,3322822,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11264,339," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2227,3322822,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11262,341," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2228,3322856,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11260,343," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2229,3322882,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11259,344," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2230,3322905,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11258,345," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2231,3322969,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11257,346," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2232,3323352,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11257,0,"",python,selection_mouse +2233,3324702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11272,0,"",python,selection_mouse +2234,3324984,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11272,37,"cp.args.Composite(\n mo",python,selection_mouse +2235,3325510,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11309,0,"",python,selection_mouse +2236,3326287,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11402,0,"",python,selection_mouse +2237,3326449,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11401,16,"dataloader_state",python,selection_mouse +2238,3326815,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11401,121,"dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state",python,selection_mouse +2239,3327203,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11512,0,"",python,selection_mouse +2240,3327701,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11312,0,"",python,selection_mouse +2241,3333471,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11411,0,"",python,selection_mouse +2242,3334475,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11098,0,"",python,selection_mouse +2243,3334991,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11160,0,"",python,selection_mouse +2244,3334992,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11159,0,"",python,selection_command +2245,3336174,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11603,0,"",python,selection_mouse +2246,3336187,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11602,0,"",python,selection_command +2247,3336426,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11602,1,",",python,selection_mouse +2248,3336426,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11499,103," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_mouse +2249,3336427,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11498,104," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_mouse +2250,3336427,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11496,106," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_mouse +2251,3336427,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11393,209," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_mouse +2252,3336427,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11391,211," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_mouse +2253,3336427,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11390,212," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_mouse +2254,3336488,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11603,0,"",python,selection_command +2255,3336489,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11294,309," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2256,3336489,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11292,311," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2257,3336489,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11291,312," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2258,3336560,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11254,349," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2259,3339325,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11160,0,"",python,selection_mouse +2260,3339350,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11159,0,"",python,selection_command +2261,3339787,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11160,0,"",python,selection_mouse +2262,3339793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11159,0,"",python,selection_command +2263,3341294,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11160,0,"",python,selection_command +2264,3342939,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11160,0," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,content +2265,3344446,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11235,0,"",python,selection_mouse +2266,3346977,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11593,0,"",python,selection_mouse +2267,3347908,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11175,0,"",python,selection_mouse +2268,3348062,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11172,4,"args",python,selection_mouse +2269,3348476,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11172,4,"",python,content +2270,3348687,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11171,1,"",python,content +2271,3348859,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11170,1,"",python,content +2272,3349003,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11169,1,"",python,content +2273,3349301,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11168,1,"",python,content +2274,3349825,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11159,9,"",python,content +2275,3350309,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11158,1,"",python,content +2276,3351570,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11159,0,"",python,selection_mouse +2277,3351690,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11159,0," ",python,content +2278,3351691,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11160,0,"",python,selection_keyboard +2279,3352292,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11159,0,"",python,selection_command +2280,3353100,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11492,0,"",python,selection_mouse +2281,3353113,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11491,0,"",python,selection_command +2282,3355130,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11492,0,"",python,selection_command +2283,3355410,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11491,1,"",python,content +2284,3357642,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11490,0,"",python,selection_mouse +2285,3357863,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11490,0," ",python,content +2286,3359494,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11490,4,"",python,content +2287,3360506,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11136,0,"",python,selection_mouse +2288,3360923,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11136,0,"\n ",python,content +2289,3361271,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11145,0,"i",python,content +2290,3361272,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11146,0,"",python,selection_keyboard +2291,3361371,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11146,0,"f",python,content +2292,3361372,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11147,0,"",python,selection_keyboard +2293,3361492,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11147,0," ",python,content +2294,3361493,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11148,0,"",python,selection_keyboard +2295,3361909,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11148,0,"a",python,content +2296,3361910,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11149,0,"",python,selection_keyboard +2297,3362136,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11149,0,"r",python,content +2298,3362137,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11150,0,"",python,selection_keyboard +2299,3362249,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11150,0,"g",python,content +2300,3362251,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11151,0,"",python,selection_keyboard +2301,3362376,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11151,0,"s",python,content +2302,3362377,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11152,0,"",python,selection_keyboard +2303,3362526,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11152,0,".",python,content +2304,3362527,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11153,0,"",python,selection_keyboard +2305,3364026,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11153,0,"v",python,content +2306,3364027,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11154,0,"",python,selection_keyboard +2307,3364159,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11154,0,"a",python,content +2308,3364160,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11155,0,"",python,selection_keyboard +2309,3364292,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11155,0,"l",python,content +2310,3364294,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11156,0,"",python,selection_keyboard +2311,3364610,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11156,0," ",python,content +2312,3364611,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11157,0,"",python,selection_keyboard +2313,3365125,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11156,1,"",python,content +2314,3365483,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11156,0,"_",python,content +2315,3365484,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11157,0,"",python,selection_keyboard +2316,3366173,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11153,4,"val_data_dir",python,content +2317,3366845,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11165,0,":",python,content +2318,3366847,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11166,0,"",python,selection_keyboard +2319,3367070,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11165,0,"",python,selection_command +2320,3367372,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11195,0,"",python,selection_command +2321,3367942,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11175,0,"",python,selection_command +2322,3368708,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11174,0,"",python,selection_command +2323,3369307,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11167,42," restore_args = ocp.args.Composite(",python,selection_command +2324,3369539,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11167,136," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore",python,selection_command +2325,3369705,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11167,237," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore",python,selection_command +2326,3369826,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11167,340," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore",python,selection_command +2327,3369988,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11167,354," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_command +2328,3370250,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11175,0,"",python,selection_command +2329,3370441,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11520,0," ",python,content +2330,3370441,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11421,0," ",python,content +2331,3370441,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11320,0," ",python,content +2332,3370441,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11226,0," ",python,content +2333,3370441,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11175,0," ",python,content +2334,3370665,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11178,0,"",python,selection_command +2335,3370873,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11225,0,"",python,selection_command +2336,3371065,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11323,0,"",python,selection_command +2337,3371184,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11428,0,"",python,selection_command +2338,3371344,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11535,0,"",python,selection_command +2339,3371476,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11553,0,"",python,selection_command +2340,3372109,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11535,0,"",python,selection_command +2341,3372367,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11541,0,"\n ",python,content +2342,3372833,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11554,4,"",python,content +2343,3373359,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11550,4,"",python,content +2344,3373677,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11550,0,"e",python,content +2345,3373678,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11551,0,"",python,selection_keyboard +2346,3373834,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11551,0,"l",python,content +2347,3373836,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11552,0,"",python,selection_keyboard +2348,3374053,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11552,0,"s",python,content +2349,3374054,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11553,0,"",python,selection_keyboard +2350,3374259,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11553,0,"e",python,content +2351,3374261,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11554,0,"",python,selection_keyboard +2352,3374339,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11554,0,":",python,content +2353,3374340,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11555,0,"",python,selection_keyboard +2354,3374526,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11555,0," ",python,content +2355,3374527,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11556,0,"",python,selection_keyboard +2356,3374966,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11556,0,"\n ",python,content +2357,3375792,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11557,12,"",python,content +2358,3375976,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11542,0,"",python,selection_command +2359,3376090,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11524,0,"",python,selection_command +2360,3376225,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11417,0,"",python,selection_command +2361,3376637,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11524,0,"",python,selection_command +2362,3376995,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11524,17," )",python,selection_command +2363,3377191,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11417,124," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_command +2364,3377340,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11312,229," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_command +2365,3377462,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11214,327," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_command +2366,3377591,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11167,374," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,selection_command +2367,3377823,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11167,0,"",python,selection_command +2368,3378018,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11137,0,"",python,selection_command +2369,3378140,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11167,0,"",python,selection_command +2370,3378370,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11214,0,"",python,selection_command +2371,3378526,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11312,0,"",python,selection_command +2372,3378736,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11417,0,"",python,selection_command +2373,3378846,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11524,0,"",python,selection_command +2374,3379017,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11542,0,"",python,selection_command +2375,3379433,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11557,0,"",python,selection_command +2376,3379598,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11557,0,"\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )",python,content +2377,3379642,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11570,0,"",python,selection_command +2378,3380041,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11557,0,"",python,selection_command +2379,3380375,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11557,1,"",python,content +2380,3380435,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11569,0,"",python,selection_command +2381,3380492,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11616,0,"",python,selection_command +2382,3380642,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11714,0,"",python,selection_command +2383,3380851,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11819,0,"",python,selection_command +2384,3381407,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11807,107,"",python,content +2385,3381443,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11823,0,"",python,selection_command +2386,3381534,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11718,0,"",python,selection_command +2387,3381787,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11823,0,"",python,selection_command +2388,3482021,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11935,0,"",python,selection_mouse +2389,3482122,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11935,37,"ocp.args.Composite(\n m",python,selection_mouse +2390,3482124,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11935,36,"ocp.args.Composite(\n ",python,selection_mouse +2391,3482164,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11935,130,"ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ",python,selection_mouse +2392,3482258,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11935,231,"ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n ",python,selection_mouse +2393,3482422,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11935,332,"ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),",python,selection_mouse +2394,3485548,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11935,332,"",python,content +2395,3485566,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11934,0,"",python,selection_command +2396,3486033,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11935,0,"",python,selection_command +2397,3486540,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11935,0,"r",python,content +2398,3486541,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11936,0,"",python,selection_keyboard +2399,3486750,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11936,0,"e",python,content +2400,3486751,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11937,0,"",python,selection_keyboard +2401,3487094,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11937,0,"s",python,content +2402,3487095,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11938,0,"",python,selection_keyboard +2403,3488090,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11938,0,"t",python,content +2404,3488090,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11939,0,"",python,selection_keyboard +2405,3488307,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11939,0,"o",python,content +2406,3488308,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11940,0,"",python,selection_keyboard +2407,3488362,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11940,0,"r",python,content +2408,3488363,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11941,0,"",python,selection_keyboard +2409,3488626,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11941,0,"e",python,content +2410,3488627,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11942,0,"",python,selection_keyboard +2411,3488862,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11942,0,"_",python,content +2412,3488864,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11943,0,"",python,selection_keyboard +2413,3489067,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11943,0,"a",python,content +2414,3489069,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11944,0,"",python,selection_keyboard +2415,3489268,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11944,0,"r",python,content +2416,3489269,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11945,0,"",python,selection_keyboard +2417,3489662,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11945,0,"g",python,content +2418,3489663,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11946,0,"",python,selection_keyboard +2419,3489705,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11946,0,"s",python,content +2420,3489706,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11947,0,"",python,selection_keyboard +2421,3499616,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11930,0,"",python,selection_mouse +2422,3500459,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11933,0,"",python,selection_mouse +2423,3500636,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11930,4,"args",python,selection_mouse +2424,3501555,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11945,0,"",python,selection_mouse +2425,3501703,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11935,12,"restore_args",python,selection_mouse +2426,3508293,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11945,0,"",python,selection_mouse +2427,3509457,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12100,0,"",python,selection_mouse +2428,3510072,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11892,0,"",python,selection_mouse +2429,3510851,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11891,0,"",python,selection_command +2430,3511018,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12141,0,"",python,selection_command +2431,3512740,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12081,0,"",python,selection_command +2432,3513990,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12132,0,"\n ",python,content +2433,3514557,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12141,0,"i",python,content +2434,3514558,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12142,0,"",python,selection_keyboard +2435,3514686,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12142,0,"f",python,content +2436,3514688,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12143,0,"",python,selection_keyboard +2437,3514874,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12143,0," ",python,content +2438,3514875,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12144,0,"",python,selection_keyboard +2439,3516087,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12143,1,"",python,content +2440,3516258,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12142,1,"",python,content +2441,3516366,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12141,1,"",python,content +2442,3516900,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12140,0,"",python,selection_command +2443,3517355,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12080,0,"",python,selection_command +2444,3517855,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12024,0,"",python,selection_command +2445,3517897,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11965,0,"",python,selection_command +2446,3517924,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11955,0,"",python,selection_command +2447,3517958,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11925,0,"",python,selection_command +2448,3518003,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11879,0,"",python,selection_command +2449,3518004,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11832,0,"",python,selection_command +2450,3518041,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11814,0,"",python,selection_command +2451,3518086,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11709,0,"",python,selection_command +2452,3518116,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11611,0,"",python,selection_command +2453,3518133,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11564,0,"",python,selection_command +2454,3518368,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11549,0,"",python,selection_command +2455,3518549,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11531,0,"",python,selection_command +2456,3518682,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11424,0,"",python,selection_command +2457,3518833,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11319,0,"",python,selection_command +2458,3518972,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11221,0,"",python,selection_command +2459,3519103,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11174,0,"",python,selection_command +2460,3519217,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11144,0,"",python,selection_command +2461,3519661,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11174,0,"",python,selection_command +2462,3519833,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11221,0,"",python,selection_command +2463,3519958,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11319,0,"",python,selection_command +2464,3520099,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11424,0,"",python,selection_command +2465,3520693,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11531,0,"",python,selection_command +2466,3520728,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11549,0,"",python,selection_command +2467,3520728,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11564,0,"",python,selection_command +2468,3520728,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11611,0,"",python,selection_command +2469,3520734,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11709,0,"",python,selection_command +2470,3520764,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11814,0,"",python,selection_command +2471,3520789,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11832,0,"",python,selection_command +2472,3520815,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11879,0,"",python,selection_command +2473,3520932,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11925,0,"",python,selection_command +2474,3521081,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11955,0,"",python,selection_command +2475,3521232,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",11965,0,"",python,selection_command +2476,3521399,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12024,0,"",python,selection_command +2477,3521566,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12080,0,"",python,selection_command +2478,3521747,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12140,0,"",python,selection_command +2479,3522053,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12141,0,"\n if args.val_data_dir:",python,content +2480,3522074,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12150,0,"",python,selection_command +2481,3522594,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12140,0,"",python,selection_command +2482,3522959,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12133,9,"",python,content +2483,3523006,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12141,0,"",python,selection_command +2484,3523035,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12171,0,"",python,selection_command +2485,3524650,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12171,0," ",python,content +2486,3524977,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12174,0,"",python,selection_command +2487,3525190,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12144,0,"",python,selection_command +2488,3631200,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12016,0,"",python,selection_mouse +2489,3631201,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12015,0,"",python,selection_command +2490,3631865,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12162,0,"",python,selection_mouse +2491,3631870,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12161,0,"",python,selection_command +2492,3632698,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12154,0,"",python,selection_mouse +2493,3632839,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12149,12,"val_data_dir",python,selection_mouse +2494,3633352,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12182,0,"",python,selection_mouse +2495,3633932,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12162,0,"",python,selection_mouse +2496,3633937,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12161,0,"",python,selection_command +2497,3634533,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12191,0,"",python,selection_mouse +2498,3635208,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12194,0,"",python,selection_mouse +2499,3635326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12190,8,"restored",python,selection_mouse +2500,3637175,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12239,0,"",python,selection_mouse +2501,3637858,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12183,0,"",python,selection_mouse +2502,3638358,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12156,0,"",python,selection_mouse +2503,3640232,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12175,0,"",python,selection_command +2504,3640927,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12200,0,"",python,selection_command +2505,3641825,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12524,0,"",python,selection_command +2506,3643416,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12507,0,"",python,selection_command +2507,3643976,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12508,0,"\n ",python,content +2508,3645907,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12509,4,"",python,content +2509,3646015,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12509,0,"\n if args.val_data_dir:",python,content +2510,3646047,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12518,0,"",python,selection_command +2511,3647074,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12514,4,"",python,content +2512,3647657,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12513,0,"",python,selection_command +2513,3647743,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12539,0,"",python,selection_command +2514,3648585,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12536,22," dataloader_val = (",python,selection_command +2515,3648754,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12536,92," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)",python,selection_command +2516,3648912,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12536,125," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator",python,selection_command +2517,3649057,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12536,131," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_command +2518,3650244,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12540,0,"",python,selection_command +2519,3650469,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12666,0," ",python,content +2520,3650470,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12637,0," ",python,content +2521,3650470,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12567,0," ",python,content +2522,3650470,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12540,0," ",python,content +2523,3650966,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12543,0,"",python,selection_command +2524,3651694,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12555,0,"",python,selection_command +2525,3652674,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",12661,0,"",python,selection_command +2526,3653334,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13337,0,"",python,selection_command +2527,3654413,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13377,0,"",python,selection_command +2528,3655124,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13406,0,"",python,selection_command +2529,3655614,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13415,0,"",python,selection_command +2530,3655969,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13461,0,"",python,selection_command +2531,3656893,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13501,0,"",python,selection_command +2532,3657089,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13511,0,"",python,selection_command +2533,3657388,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13524,0,"",python,selection_command +2534,3657624,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13538,0,"",python,selection_command +2535,3657792,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13560,0,"",python,selection_command +2536,3658008,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13590,0,"",python,selection_command +2537,3658325,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13632,0,"",python,selection_command +2538,3658738,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13650,0,"",python,selection_command +2539,3659084,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13758,0,"",python,selection_command +2540,3660293,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13994,0,"",python,selection_command +2541,3660693,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14023,0,"",python,selection_command +2542,3661549,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14032,0,"",python,selection_command +2543,3661924,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14113,0,"",python,selection_command +2544,3663087,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14124,0,"",python,selection_command +2545,3663531,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14164,0,"",python,selection_command +2546,3663959,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14289,0,"",python,selection_command +2547,3665098,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14684,0,"",python,selection_command +2548,3667761,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14713,0,"",python,selection_command +2549,3668113,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14722,0,"",python,selection_command +2550,3668491,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14763,0,"",python,selection_command +2551,3668992,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14769,0,"",python,selection_command +2552,3669342,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14857,0,"",python,selection_command +2553,3669500,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14863,0,"",python,selection_command +2554,3669683,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14911,0,"",python,selection_command +2555,3670470,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15001,0,"",python,selection_command +2556,3670667,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15068,0,"",python,selection_command +2557,3670848,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15833,0,"",python,selection_command +2558,3671603,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15862,0,"",python,selection_command +2559,3671967,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15871,0,"",python,selection_command +2560,3672769,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16002,0,"",python,selection_command +2561,3673783,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16042,0,"",python,selection_command +2562,3674150,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16088,0,"",python,selection_command +2563,3674557,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16131,0,"",python,selection_command +2564,3674863,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16177,0,"",python,selection_command +2565,3676100,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16259,0,"",python,selection_command +2566,3676772,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16546,0,"",python,selection_command +2567,3677893,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17019,0,"",python,selection_command +2568,3679293,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16992,0,"",python,selection_command +2569,3689882,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16589,0,"",python,selection_mouse +2570,3692615,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16610,0,"\n ",python,content +2571,3696699,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16627,0,"r",python,content +2572,3696700,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16628,0,"",python,selection_keyboard +2573,3696902,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16628,0,"e",python,content +2574,3696903,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16629,0,"",python,selection_keyboard +2575,3697092,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16629,0,"s",python,content +2576,3697093,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16630,0,"",python,selection_keyboard +2577,3697632,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16630,0,"o",python,content +2578,3697633,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16631,0,"",python,selection_keyboard +2579,3697743,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16631,0,"r",python,content +2580,3697744,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16632,0,"",python,selection_keyboard +2581,3697956,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16632,0,"e",python,content +2582,3697957,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16633,0,"",python,selection_keyboard +2583,3698220,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16632,1,"",python,content +2584,3698347,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16631,1,"",python,content +2585,3698930,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16627,4,"",python,content +2586,3699430,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16627,0,"c",python,content +2587,3699431,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16628,0,"",python,selection_keyboard +2588,3700159,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16628,0,"k",python,content +2589,3700161,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16629,0,"",python,selection_keyboard +2590,3700376,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16629,0,"p",python,content +2591,3700377,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16630,0,"",python,selection_keyboard +2592,3700477,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16630,0,"t",python,content +2593,3700478,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16631,0,"",python,selection_keyboard +2594,3700755,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16631,0,"_",python,content +2595,3700756,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16632,0,"",python,selection_keyboard +2596,3700975,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16632,0,"a",python,content +2597,3700977,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16633,0,"",python,selection_keyboard +2598,3701142,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16633,0,"r",python,content +2599,3701143,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16634,0,"",python,selection_keyboard +2600,3701275,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16634,0,"g",python,content +2601,3701276,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,0,"",python,selection_keyboard +2602,3701366,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,0,"s",python,content +2603,3701367,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16636,0,"",python,selection_keyboard +2604,3702578,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,1,"",python,content +2605,3702719,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16634,1,"",python,content +2606,3702866,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16633,1,"",python,content +2607,3702983,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16632,1,"",python,content +2608,3703513,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16632,0,"m",python,content +2609,3703514,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16633,0,"",python,selection_keyboard +2610,3703654,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16633,0,"a",python,content +2611,3703655,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16634,0,"",python,selection_keyboard +2612,3703797,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16634,0,"g",python,content +2613,3703799,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,0,"",python,selection_keyboard +2614,3703875,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,0,"a",python,content +2615,3703877,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16636,0,"",python,selection_keyboard +2616,3704205,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,1,"",python,content +2617,3704333,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16634,1,"",python,content +2618,3704498,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16634,0,"n",python,content +2619,3704499,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,0,"",python,selection_keyboard +2620,3704542,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,0,"a",python,content +2621,3704542,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16636,0,"",python,selection_keyboard +2622,3704793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16636,0,"g",python,content +2623,3704794,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16637,0,"",python,selection_keyboard +2624,3704918,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16637,0,"e",python,content +2625,3704919,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16638,0,"",python,selection_keyboard +2626,3705077,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16638,0,"r",python,content +2627,3705078,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16639,0,"",python,selection_keyboard +2628,3705175,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16639,0,"_",python,content +2629,3705175,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16640,0,"",python,selection_keyboard +2630,3705402,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16640,0,"a",python,content +2631,3705403,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16641,0,"",python,selection_keyboard +2632,3705569,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16641,0,"r",python,content +2633,3705570,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16642,0,"",python,selection_keyboard +2634,3705689,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16642,0,"g",python,content +2635,3705690,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16643,0,"",python,selection_keyboard +2636,3705825,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16643,0,"s",python,content +2637,3705826,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16644,0,"",python,selection_keyboard +2638,3706947,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16644,0,"=",python,content +2639,3706948,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16645,0,"",python,selection_keyboard +2640,3709073,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17207,0,"",python,selection_mouse +2641,3709459,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17206,0,"",python,selection_command +2642,3710242,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17229,0,"",python,selection_command +2643,3710716,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17206,0,"",python,selection_command +2644,3712302,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17181,26," ),",python,selection_command +2645,3712518,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17124,83," val_iterator # type: ignore\n ),",python,selection_command +2646,3712588,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17030,177," val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),",python,selection_command +2647,3712731,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17003,204," ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),",python,selection_command +2648,3712895,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16944,263," train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),",python,selection_command +2649,3713044,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16848,359," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),",python,selection_command +2650,3713170,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16758,449," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),",python,selection_command +2651,3713301,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16713,494," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),",python,selection_command +2652,3713703,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16713,495,"",python,content +2653,3713733,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16733,0,"",python,selection_command +2654,3713804,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16707,0,"",python,selection_command +2655,3713964,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16666,0,"",python,selection_command +2656,3714092,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16631,0,"",python,selection_command +2657,3714740,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16645,0,"",python,selection_command +2658,3715355,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16644,0,"",python,selection_command +2659,3715356,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16645,0,"\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),",python,content +2660,3715377,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16666,0,"",python,selection_command +2661,3717626,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16666,1,"",python,content +2662,3717718,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16666,1,"",python,content +2663,3717868,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16666,1,"",python,content +2664,3718177,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16666,1,"",python,content +2665,3718743,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16662,4,"",python,content +2666,3718905,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16658,4,"",python,content +2667,3719043,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16654,4,"",python,content +2668,3719366,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16650,4,"",python,content +2669,3719766,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16646,4,"",python,content +2670,3720117,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16645,1,"",python,content +2671,3720511,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16644,1,"",python,content +2672,3721172,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16643,0,"",python,selection_command +2673,3721276,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16697,0,"",python,selection_command +2674,3721408,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16787,0,"",python,selection_command +2675,3721709,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16883,0,"",python,selection_command +2676,3721944,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16935,0,"",python,selection_command +2677,3722094,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16969,0,"",python,selection_command +2678,3722260,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17063,0,"",python,selection_command +2679,3722425,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17113,0,"",python,selection_command +2680,3722833,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17114,0,"",python,selection_command +2681,3722966,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17113,1,"",python,content +2682,3723545,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17112,0,"",python,selection_command +2683,3726449,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17113,0,"",python,selection_command +2684,3727410,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17113,0,")",python,content +2685,3727411,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17114,0,"",python,selection_keyboard +2686,3727635,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17113,0,"",python,selection_command +2687,3729041,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17140,0,"",python,selection_command +2688,3729205,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17180,0,"",python,selection_command +2689,3729359,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17203,0,"",python,selection_command +2690,3729550,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17221,0,"",python,selection_command +2691,3730005,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17205,18,"",python,content +2692,3730021,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17221,0,"",python,selection_command +2693,3730142,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17198,0,"",python,selection_command +2694,3731702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17172,0,"",python,selection_command +2695,3732085,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17181,0,"\n ",python,content +2696,3733602,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17202,0,"a",python,content +2697,3733603,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17203,0,"",python,selection_keyboard +2698,3733776,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17203,0,"r",python,content +2699,3733777,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17204,0,"",python,selection_keyboard +2700,3733963,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17204,0,"g",python,content +2701,3733964,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17205,0,"",python,selection_keyboard +2702,3734060,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17205,0,"s",python,content +2703,3734061,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17206,0,"",python,selection_keyboard +2704,3734451,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17206,0,"=",python,content +2705,3734452,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17207,0,"",python,selection_keyboard +2706,3735367,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17207,0,"c",python,content +2707,3735368,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17208,0,"",python,selection_keyboard +2708,3735541,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17208,0,"k",python,content +2709,3735542,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17209,0,"",python,selection_keyboard +2710,3735795,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17209,0,"p",python,content +2711,3735796,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17210,0,"",python,selection_keyboard +2712,3736041,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17210,0,"t",python,content +2713,3736042,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17211,0,"",python,selection_keyboard +2714,3736845,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17211,0,"_",python,content +2715,3736846,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17212,0,"",python,selection_keyboard +2716,3737137,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17212,0,"m",python,content +2717,3737138,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17213,0,"",python,selection_keyboard +2718,3738720,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17213,0,"a",python,content +2719,3738721,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17214,0,"",python,selection_keyboard +2720,3738807,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17214,0,"n",python,content +2721,3738808,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17215,0,"",python,selection_keyboard +2722,3742198,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17207,8,"ckpt_manager_args",python,content +2723,3743508,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17181,0,"",python,selection_mouse +2724,3744178,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17065,0,"",python,selection_mouse +2725,3744875,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17114,0,"",python,selection_mouse +2726,3746544,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16689,0,"",python,selection_mouse +2727,3748075,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16637,0,"",python,selection_mouse +2728,3748629,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16584,0,"",python,selection_mouse +2729,3748708,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16583,0,"",python,selection_command +2730,3749049,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16610,0,"\n ",python,content +2731,3750467,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16627,0,"i",python,content +2732,3750468,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16628,0,"",python,selection_keyboard +2733,3750576,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16628,0,"f",python,content +2734,3750577,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16629,0,"",python,selection_keyboard +2735,3750625,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16629,0," ",python,content +2736,3750626,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16630,0,"",python,selection_keyboard +2737,3751764,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16630,0,"a",python,content +2738,3751765,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16631,0,"",python,selection_keyboard +2739,3752008,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16631,0,"r",python,content +2740,3752009,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16632,0,"",python,selection_keyboard +2741,3752077,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16632,0,"g",python,content +2742,3752079,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16633,0,"",python,selection_keyboard +2743,3752238,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16633,0,"s",python,content +2744,3752240,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16634,0,"",python,selection_keyboard +2745,3752387,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16634,0,".",python,content +2746,3752388,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,0,"",python,selection_keyboard +2747,3753416,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,0,"v",python,content +2748,3753419,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16636,0,"",python,selection_keyboard +2749,3753574,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16636,0,"a",python,content +2750,3753575,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16637,0,"",python,selection_keyboard +2751,3753642,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16637,0,"l",python,content +2752,3753643,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16638,0,"",python,selection_keyboard +2753,3753946,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16638,0,"_",python,content +2754,3753948,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16639,0,"",python,selection_keyboard +2755,3754302,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16639,0,"d",python,content +2756,3754303,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16640,0,"",python,selection_keyboard +2757,3754475,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16640,0,"a",python,content +2758,3754477,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16641,0,"",python,selection_keyboard +2759,3754575,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16641,0,"t",python,content +2760,3754576,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16642,0,"",python,selection_keyboard +2761,3754693,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16642,0,"a",python,content +2762,3754694,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16643,0,"",python,selection_keyboard +2763,3758369,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16635,8,"val_data_dir",python,content +2764,3759292,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16647,0,":",python,content +2765,3759293,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16648,0,"",python,selection_keyboard +2766,3759594,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16647,0,"",python,selection_command +2767,3759967,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16685,0,"",python,selection_command +2768,3760672,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,53," ckpt_manager_args=ocp.args.Composite(",python,selection_command +2769,3760858,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,143," ckpt_manager_args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore",python,selection_command +2770,3760994,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,239," ckpt_manager_args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore",python,selection_command +2771,3761126,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,298," ckpt_manager_args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore",python,selection_command +2772,3761293,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,325," ckpt_manager_args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),",python,selection_command +2773,3761422,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,419," ckpt_manager_args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore",python,selection_command +2774,3761559,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,476," ckpt_manager_args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore",python,selection_command +2775,3761703,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,503," ckpt_manager_args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ))",python,selection_command +2776,3762025,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16665,0,"",python,selection_command +2777,3762191,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17150,0," ",python,content +2778,3762191,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17097,0," ",python,content +2779,3762192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16999,0," ",python,content +2780,3762192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16972,0," ",python,content +2781,3762192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16917,0," ",python,content +2782,3762192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16817,0," ",python,content +2783,3762192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16727,0," ",python,content +2784,3762192,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16665,0," ",python,content +2785,3762420,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16668,0,"",python,selection_command +2786,3765586,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16726,0,"",python,selection_command +2787,3766086,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16820,0,"",python,selection_command +2788,3766168,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16920,0,"",python,selection_command +2789,3766287,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16983,0,"",python,selection_command +2790,3766287,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17014,0,"",python,selection_command +2791,3766287,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17112,0,"",python,selection_command +2792,3766343,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17173,0,"",python,selection_command +2793,3766400,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17204,0,"",python,selection_command +2794,3766566,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17245,0,"",python,selection_command +2795,3766849,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17204,0,"",python,selection_command +2796,3766987,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17173,0,"",python,selection_command +2797,3767316,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17184,0,"\n ",python,content +2798,3767776,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17209,4,"",python,content +2799,3768134,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17205,4,"",python,content +2800,3768926,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17201,4,"",python,content +2801,3769171,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17201,0,"e",python,content +2802,3769171,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17202,0,"",python,selection_keyboard +2803,3769276,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17202,0,"l",python,content +2804,3769278,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17203,0,"",python,selection_keyboard +2805,3769478,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17203,0,"s",python,content +2806,3769479,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17204,0,"",python,selection_keyboard +2807,3769625,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17204,0,"e",python,content +2808,3769627,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17205,0,"",python,selection_keyboard +2809,3769737,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17205,0,":",python,content +2810,3769738,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17206,0,"",python,selection_keyboard +2811,3769958,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17206,0," ",python,content +2812,3769958,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17207,0,"",python,selection_keyboard +2813,3770742,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17207,0,"\n ",python,content +2814,3770871,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17208,20,"",python,content +2815,3771060,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17185,0,"",python,selection_command +2816,3771264,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17154,0,"",python,selection_command +2817,3771398,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17093,0,"",python,selection_command +2818,3771541,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16995,0,"",python,selection_command +2819,3771675,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16964,0,"",python,selection_command +2820,3771800,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16901,0,"",python,selection_command +2821,3771941,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16801,0,"",python,selection_command +2822,3772074,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16707,0,"",python,selection_command +2823,3772238,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,0,"",python,selection_command +2824,3772959,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16650,0,"",python,selection_command +2825,3773425,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16651,0,"",python,selection_command +2826,3773455,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16652,0,"",python,selection_command +2827,3773540,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16653,0,"",python,selection_command +2828,3773549,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16654,0,"",python,selection_command +2829,3773591,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16655,0,"",python,selection_command +2830,3773591,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16656,0,"",python,selection_command +2831,3773616,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16657,0,"",python,selection_command +2832,3773645,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16658,0,"",python,selection_command +2833,3773687,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16659,0,"",python,selection_command +2834,3773709,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16660,0,"",python,selection_command +2835,3773724,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16661,0,"",python,selection_command +2836,3773793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16662,0,"",python,selection_command +2837,3773794,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16663,0,"",python,selection_command +2838,3773825,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16664,0,"",python,selection_command +2839,3773875,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16665,0,"",python,selection_command +2840,3773876,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16666,0,"",python,selection_command +2841,3773915,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16667,0,"",python,selection_command +2842,3773949,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16668,0,"",python,selection_command +2843,3774013,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16669,0,"",python,selection_command +2844,3774068,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16670,0,"",python,selection_command +2845,3774075,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16671,0,"",python,selection_command +2846,3774076,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16672,0,"",python,selection_command +2847,3774086,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16673,0,"",python,selection_command +2848,3774124,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16674,0,"",python,selection_command +2849,3774183,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16675,0,"",python,selection_command +2850,3774189,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16676,0,"",python,selection_command +2851,3774207,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16677,0,"",python,selection_command +2852,3774297,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16678,0,"",python,selection_command +2853,3774326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16679,0,"",python,selection_command +2854,3774338,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16680,0,"",python,selection_command +2855,3774380,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16681,0,"",python,selection_command +2856,3774387,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16682,0,"",python,selection_command +2857,3774391,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16683,0,"",python,selection_command +2858,3774537,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16684,0,"",python,selection_command +2859,3774664,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16685,0,"",python,selection_command +2860,3775533,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16686,0,"",python,selection_command +2861,3775719,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16686,0," ",python,content +2862,3775720,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16687,0,"",python,selection_keyboard +2863,3775895,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16688,0,"",python,selection_command +2864,3776093,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16689,0,"",python,selection_command +2865,3776465,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16688,0,"",python,selection_command +2866,3776641,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16688,0," ",python,content +2867,3776642,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16689,0,"",python,selection_keyboard +2868,3776981,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16749,0,"",python,selection_command +2869,3778198,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16843,0,"",python,selection_command +2870,3781825,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17185,0,"",python,selection_mouse +2871,3782526,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17185,0,"\n ",python,content +2872,3784618,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,0,"",python,selection_mouse +2873,3784905,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,1,"m",python,selection_mouse +2874,3784906,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,97,"model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n tra",python,selection_mouse +2875,3784906,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,199,"model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n t",python,selection_mouse +2876,3784907,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,259,"model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),",python,selection_mouse +2877,3784981,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,299,"model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloa",python,selection_mouse +2878,3785044,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,300,"model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataload",python,selection_mouse +2879,3785075,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,399,"model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_itera",python,selection_mouse +2880,3785125,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,400,"model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterat",python,selection_mouse +2881,3785974,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,448,"model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )",python,selection_mouse +2882,3786325,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17156,28," ",python,content +2883,3786326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17095,32," ",python,content +2884,3786326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16997,28," ",python,content +2885,3786326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16966,28," ",python,content +2886,3786326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16903,32," ",python,content +2887,3786326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16803,28," ",python,content +2888,3786326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,28," ",python,content +2889,3787000,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17212,0,"",python,selection_command +2890,3787425,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17210,0,"",python,selection_mouse +2891,3787861,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17266,0,"",python,selection_mouse +2892,3787892,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17265,0,"",python,selection_command +2893,3788483,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16846,0,"",python,selection_mouse +2894,3789519,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17242,0,"",python,selection_mouse +2895,3790808,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17238,4,"",python,content +2896,3791255,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17204,0,"",python,selection_command +2897,3792437,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17203,0,"",python,selection_command +2898,3793126,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17180,33," )",python,selection_command +2899,3793337,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17115,98," val_iterator # type: ignore\n )",python,selection_command +2900,3793468,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17013,200," val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )",python,selection_command +2901,3793591,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16978,235," ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )",python,selection_command +2902,3793730,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16911,302," train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )",python,selection_command +2903,3793847,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16807,406," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )",python,selection_command +2904,3794017,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,504," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )",python,selection_command +2905,3794411,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16741,0,"",python,selection_command +2906,3794702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17208,4,"",python,content +2907,3794702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17147,4,"",python,content +2908,3794702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17041,4,"",python,content +2909,3794702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17006,4,"",python,content +2910,3794702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16943,4,"",python,content +2911,3794702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16835,4,"",python,content +2912,3794702,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,4,"",python,content +2913,3795061,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16736,0,"",python,selection_command +2914,3803032,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,93," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore",python,selection_command +2915,3803276,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,193," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore",python,selection_command +2916,3803783,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,256," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore",python,selection_command +2917,3803825,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,287," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),",python,selection_command +2918,3803858,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,385," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore",python,selection_command +2919,3803957,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,446," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore",python,selection_command +2920,3804108,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,476," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )",python,selection_command +2921,3804293,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,502," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )",python,selection_command +2922,3804585,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16737,0,"",python,selection_command +2923,3804850,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17206,4,"",python,content +2924,3804850,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17180,4,"",python,content +2925,3804850,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17123,4,"",python,content +2926,3804850,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17021,4,"",python,content +2927,3804850,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16990,4,"",python,content +2928,3804850,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16931,4,"",python,content +2929,3804850,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16827,4,"",python,content +2930,3804850,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16733,4,"",python,content +2931,3805019,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16732,0,"",python,selection_command +2932,3810558,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",15926,0,"",python,selection_mouse +2933,3813718,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17179,0,"",python,selection_mouse +2934,3813757,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17178,0,"",python,selection_command +2935,3814782,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17158,21," )",python,selection_command +2936,3815105,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17132,47," )\n )",python,selection_command +2937,3815232,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17075,104," val_iterator # type: ignore\n )\n )",python,selection_command +2938,3815397,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16981,198," val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )",python,selection_command +2939,3815508,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16954,225," ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )",python,selection_command +2940,3815642,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16895,284," train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )",python,selection_command +2941,3815793,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16799,380," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )",python,selection_command +2942,3815899,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,470," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )",python,selection_command +2943,3816022,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,530," ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )",python,selection_command +2944,3816242,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16649,0,"",python,selection_command +2945,3816372,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16709,0,"",python,selection_command +2946,3816507,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16799,0,"",python,selection_command +2947,3816640,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16895,0,"",python,selection_command +2948,3817139,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16954,0,"",python,selection_command +2949,3817169,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",16981,0,"",python,selection_command +2950,3817204,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17075,0,"",python,selection_command +2951,3817300,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17132,0,"",python,selection_command +2952,3817461,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17158,0,"",python,selection_command +2953,3817667,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17180,0,"",python,selection_command +2954,3817989,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17202,0,"\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )",python,content +2955,3818025,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17223,0,"",python,selection_command +2956,3818608,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17200,0,"",python,selection_command +2957,3818809,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17223,0,"",python,selection_command +2958,3818992,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17283,0,"",python,selection_command +2959,3819115,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17373,0,"",python,selection_command +2960,3819291,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17469,0,"",python,selection_command +2961,3819428,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17528,0,"",python,selection_command +2962,3819575,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17555,0,"",python,selection_command +2963,3820121,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17535,151,"",python,content +2964,3820225,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17559,0,"",python,selection_command +2965,3820636,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17535,26,"",python,content +2966,3820690,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17555,0,"",python,selection_command +2967,3823268,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17528,0,"",python,selection_command +2968,3823518,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17534,0,"",python,selection_command +2969,3823614,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17533,1,"",python,content +2970,3824100,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17532,0,"",python,selection_command +2971,3827127,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17578,0,"",python,selection_mouse +2972,3828402,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17556,0,"",python,selection_command +2973,3829457,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17556,1,"",python,content +2974,3829523,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",17572,0,"",python,selection_command +2975,3833569,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +2976,3837275,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +2977,3838139,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-lam-dev-$slurm_job_id \\r\n --tags lam coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train\r\n\r\n",,terminal_output +2978,3838307,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3453268\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1757073804\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757077404\r\nSLURM_PMI2_SRUN_PORT=36059\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468617\r\nSLURM_PTY_PORT=33607\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=43381\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468617\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=43381\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\n",,terminal_output +2979,3838374,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2980,3846411,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +2981,3847178,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +2982,3847235,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_143016-r1m8au6o\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-lam-dev-3468617\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/r1m8au6o\r\n",,terminal_output +2983,3851201,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['action_in', 'action_up', 'decoder', 'encoder', 'patch_up', 'vq']\r\nParameter counts:\r\n{'action_in': 768, 'action_up': 16896, 'decoder': 17474816, 'encoder': 17228832, 'patch_up': 393728, 'vq': 192, 'total': 35115232}\r\nStarting training from step 0...\r\n",,terminal_output +2984,3859719,"TERMINAL",0,0,"2025-09-05 14:30:29.358975: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:30:29.360132: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:30:29.360150: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:30:29.360616: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2985,3879006,"TERMINAL",0,0,"bash",,terminal_focus +2986,3882875,"TERMINAL",0,0,"runner",,terminal_command +2987,3905391,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-base.sbatch",,terminal_command +2988,3905446,"TERMINAL",0,0,"]633;C",,terminal_output +2989,3905526,"TERMINAL",0,0,"[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l",,terminal_output +2990,3905616,"TERMINAL",0,0,"[?25l""slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-base.sbatch"" 72L, 2051B▽ Pzz\[0%m [>c]10;?]11;?#!/usr/bin/env bash#SBATCH --nodes=8#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:41,1Top[?25h",,terminal_output +2991,3905817,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +2992,3906226,"TERMINAL",0,0,"Step 0, loss: 0.29082542657852173\r\nStep 1, loss: 0.2048691064119339\r\nStep 2, loss: 0.19221149384975433\r\nStep 3, loss: 0.20193631947040558\r\nStep 4, loss: 0.1634262055158615\r\nStep 5, loss: 0.16224892437458038\r\nStep 6, loss: 0.1465519517660141\r\nStep 7, loss: 0.13969092071056366\r\nStep 8, loss: 0.13379880785942078\r\nStep 9, loss: 0.13682898879051208\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py"", line 412, in \r\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4625, in concatenate\r\n arrays_out = [lax.concatenate(arrays_out[i:i+k], axis)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4625, in \r\n arrays_out = [lax.concatenate(arrays_out[i:i+k], axis)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 2006, in concatenate\r\n return concatenate_p.bind(*operands, dimension=dimension)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\nTypeError: Cannot concatenate arrays with shapes that differ in dimensions other than the one being concatenated: concatenating along dimension 1 for shapes (16, 64, 64, 3), (15, 64, 64, 3).\r\n",,terminal_output +2993,3907115,"TERMINAL",0,0,"[?25lj 2,0-1[?25h",,terminal_output +2994,3907603,"TERMINAL",0,0,"[?25lj 3,1 [?25h[?25lj 4[?25h[?25lj 5[?25h[?25lj #SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log6,13%[?25h",,terminal_output +2995,3907670,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_37M7,14%[?25h[?25lj \r\n#SBATCH --requeue8,16%[?25h",,terminal_output +2996,3907719,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout9,17%[?25h",,terminal_output +2997,3907820,"TERMINAL",0,0,"[?25lj \r\n10,19%[?25h[?25lj \r\n# --- signal trap to requeue job before timeout ---11,110%[?25h",,terminal_output +2998,3907879,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-lam-dev-3468617 at: https://wandb.ai/instant-uv/jafar/runs/r1m8au6o\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_143016-r1m8au6o/logs\r\n",,terminal_output +2999,3908636,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 10 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +3000,3908873,"TERMINAL",0,0,"srun: error: hkn0403: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +3001,3910366,"TERMINAL",0,0,"[?25lk #SBATCH --cpus-per-task=510,19%[?25h",,terminal_output +3002,3910517,"TERMINAL",0,0,"[?25lk #SBATCH --partition=accelerated9,17%[?25h",,terminal_output +3003,3911002,"TERMINAL",0,0,"[?25lk #SBATCH --time=48:00:008,16%[?25h",,terminal_output +3004,3911609,"TERMINAL",0,0,"[?25lk #SBATCH --ntasks-per-node=47,14%[?25h",,terminal_output +3005,3912082,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --requeue8,16%[?25h",,terminal_output +3006,3912241,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout9,17%[?25h",,terminal_output +3007,3914092,"TERMINAL",0,0,"[?25lj \r\n10,19%[?25h",,terminal_output +3008,3914272,"TERMINAL",0,0,"[?25lj \r\n# --- signal trap to requeue job before timeout ---11,110%[?25h",,terminal_output +3009,3914833,"TERMINAL",0,0,"[?25lj \r\nrequeue_job() {12,112%[?25h",,terminal_output +3010,3915298,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +3011,3916454,"TERMINAL",0,0,"[?25l: :requeueslurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-base.sbatch 12,1 15%:Wq:w:q!:wq\r\n:[Command Line] 6,0-1 Bot[?25h",,terminal_output +3012,3916506,"TERMINAL",0,0,"[?25l:q[?25h",,terminal_output +3013,3917659,"TERMINAL",0,0,"[?25l^[",,terminal_output +3014,3917718,"TERMINAL",0,0," ^[ [?25h",,terminal_output +3015,3917782,"TERMINAL",0,0,"[?25l^[",,terminal_output +3016,3917899,"TERMINAL",0,0," ^[ [?25h",,terminal_output +3017,3919070,"TERMINAL",0,0,"[?25lgq[?25h",,terminal_output +3018,3919888,"TERMINAL",0,0,"[?25lq [?25h",,terminal_output +3019,3922408,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +3020,3923192,"TERMINAL",0,0,"q",,terminal_output +3021,3923632,"TERMINAL",0,0,"!",,terminal_output +3022,3924045,"TERMINAL",0,0,"\r[?25loutput=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_37M\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout# --- signal trap to requeue job before timeout ---\r\nrequeue_job() {12,112%[?25h",,terminal_output +3023,3925869,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +3024,3925970,"TERMINAL",0,0,"q",,terminal_output +3025,3926225,"TERMINAL",0,0,"!",,terminal_output +3026,3926453,"TERMINAL",0,0,"\r",,terminal_output +3027,3926883,"TERMINAL",0,0,"[?25l[?2004l[>4;m[?1004l[?2004l[?1l>[?25h[>4;m[?1049l]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +3028,3933769,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-400M.sbatch",,terminal_command +3029,3933884,"TERMINAL",0,0,"]633;C[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-400M.sbatch"" 78L, 2192B▽ Pzz\[0%m [>c]10;?]11;?#!/usr/bin/env bash#SBATCH --nodes=8#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:41,1Top[?25h",,terminal_output +3030,3934025,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +3031,3934696,"TERMINAL",0,0,"[?25lj 2,0-1[?25h",,terminal_output +3032,3935283,"TERMINAL",0,0,"[?25lj 3,1 [?25h[?25lj 4[?25h[?25lj 5[?25h",,terminal_output +3033,3935379,"TERMINAL",0,0,"[?25lj #SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log6,12%[?25h",,terminal_output +3034,3935689,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_400M7,14%[?25h",,terminal_output +3035,3936132,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --reservation=llmtum8,15%[?25h",,terminal_output +3036,3936316,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --requeue9,17%[?25h",,terminal_output +3037,3936964,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout10,18%[?25h",,terminal_output +3038,3937103,"TERMINAL",0,0,"[?25lj \r\n11,110%[?25h",,terminal_output +3039,3937742,"TERMINAL",0,0,"[?25lj \r\n# --- signal trap to requeue job before timeout ---12,111%[?25h",,terminal_output +3040,3946291,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +3041,3946838,"TERMINAL",0,0,"q",,terminal_output +3042,3947123,"TERMINAL",0,0,"!",,terminal_output +3043,3947342,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m[?1004l[?2004l[?1l>[?25h[>4;m[?1049l]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +3044,3952364,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-400M.sbatch",,terminal_command +3045,3952441,"TERMINAL",0,0,"]633;C[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-400M.sbatch"" 78L, 2192B▽ Pzz\[0%m [>c]10;?]11;?#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_400M#SBATCH --reservation=llmtum\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# --- signal trap to requeue job before timeout ---12,111%[?25h",,terminal_output +3046,3952592,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +3047,3953290,"TERMINAL",0,0,"[?25lj \r\nrequeue_job() {13,112%[?25h",,terminal_output +3048,3953732,"TERMINAL",0,0,"[?25lk #SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log12,111%[?25h",,terminal_output +3049,3954477,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +3050,3954691,"TERMINAL",0,0,"[?25ld \r\nrequeue_job() {12,111%[?25h",,terminal_output +3051,3955459,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +3052,3955567,"TERMINAL",0,0,"q",,terminal_output +3053,3955835,"TERMINAL",0,0,"!",,terminal_output +3054,3956030,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m[?1004l[?2004l[?1l>[?25h[>4;m[?1049l]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +3055,3961533,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-311M.sbatch",,terminal_command +3056,3961609,"TERMINAL",0,0,"]633;C[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-311M.sbatch"" 78L, 2191B▽ Pzz\[0%m [>c]10;?]11;?#!/usr/bin/env bash#SBATCH --nodes=8#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:41,1Top[?25h",,terminal_output +3057,3961749,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +3058,3962315,"TERMINAL",0,0,"[?25lj 2,0-1[?25h",,terminal_output +3059,3963075,"TERMINAL",0,0,"[?25lj 3,1 [?25h",,terminal_output +3060,3963129,"TERMINAL",0,0,"[?25lj 4[?25h",,terminal_output +3061,3963204,"TERMINAL",0,0,"[?25lj 5[?25h[?25lj #SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log6,12%[?25h",,terminal_output +3062,3963392,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_311M7,14%[?25h",,terminal_output +3063,3963509,"TERMINAL",0,0,"[?25l\r\n#SBATCH --reservation=llmtum8,15%[?25h[?25lj \r\n#SBATCH --requeue9,17%[?25h",,terminal_output +3064,3963692,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout10,18%[?25h",,terminal_output +3065,3963835,"TERMINAL",0,0,"[?25lj \r\n11,110%[?25h",,terminal_output +3066,3964358,"TERMINAL",0,0,"[?25lj \r\n# --- signal trap to requeue job before timeout ---12,111%[?25h",,terminal_output +3067,3965835,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +3068,3965908,"TERMINAL",0,0,"[?25ld \r\nrequeue_job() {12,111%[?25h",,terminal_output +3069,3966599,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +3070,3967131,"TERMINAL",0,0,"q",,terminal_output +3071,3967478,"TERMINAL",0,0,"!",,terminal_output +3072,3967676,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m[?1004l[?2004l[?1l>[?25h[>4;m[?1049l]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +3073,3981099,"TERMINAL",0,0,"vim slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-133M.sbatch",,terminal_command +3074,3981209,"TERMINAL",0,0,"]633;C[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-133M.sbatch"" 78L, 2191B▽ Pzz\[0%m [>c]10;?]11;?#!/usr/bin/env bash#SBATCH --nodes=8#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:41,1Top[?25h",,terminal_output +3075,3981676,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +3076,3982125,"TERMINAL",0,0,"[?25lj 2,0-1[?25h",,terminal_output +3077,3982265,"TERMINAL",0,0,"[?25lj 3,1 [?25h",,terminal_output +3078,3982456,"TERMINAL",0,0,"[?25lj 4[?25h",,terminal_output +3079,3982595,"TERMINAL",0,0,"[?25lj 5[?25h",,terminal_output +3080,3982741,"TERMINAL",0,0,"[?25lj #SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log6,12%[?25h",,terminal_output +3081,3982825,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_133M7,14%[?25h[?25lj \r\n#SBATCH --reservation=llmtum8,15%[?25h",,terminal_output +3082,3983518,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --requeue9,17%[?25h",,terminal_output +3083,3984031,"TERMINAL",0,0,"[?25lj \r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout10,18%[?25h",,terminal_output +3084,3984342,"TERMINAL",0,0,"[?25lj \r\n11,110%[?25h",,terminal_output +3085,3984691,"TERMINAL",0,0,"[?25lj \r\n# --- signal trap to requeue job before timeout ---12,111%[?25h",,terminal_output +3086,3985109,"TERMINAL",0,0,"[?25lj \r\nrequeue_job() {13,112%[?25h",,terminal_output +3087,3986425,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +3088,3986575,"TERMINAL",0,0,"[?25ld \r\necho ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""13,113%[?25h",,terminal_output +3089,3988168,"TERMINAL",0,0,"[?25lu1 more line; before #1 2 seconds ago#SBATCH --requeue13,112%\r1 more line; before #1 2 seconds ago13,112%[?25h",,terminal_output +3090,3989224,"TERMINAL",0,0,"[?25lk #SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log12,111%[?25h",,terminal_output +3091,3990853,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +3092,3990989,"TERMINAL",0,0,"[?25ld \r\nrequeue_job() {12,111%[?25h",,terminal_output +3093,3992477,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +3094,3992800,"TERMINAL",0,0,"q",,terminal_output +3095,3993115,"TERMINAL",0,0,"!",,terminal_output +3096,3993807,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m[?1004l[?2004l[?1l>[?25h[>4;m[?1049l]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +3097,4000551,"TERMINAL",0,0,"queue",,terminal_command +3098,4000609,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 14:32:49 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3465675 accelerat train_la tum_cte0 R 16:32:06\t 8 hkn[0703,0706-0707,0711-0715]3465676 accelerat train_la tum_cte0 R 16:32:06\t 8 hkn[0521-0528]3465677 accelerat train_la tum_cte0 R 16:32:06\t 8 hkn[0504,0520,0720,0722-0724,0728,0731]3466286 accelerat train_to tum_cte0 R 17:16:54\t 1 hkn07363466287 accelerat train_la tum_cte0 R 17:16:54\t 1 hkn07363468617 dev_accel interact tum_cte0 R29:25\t 1 hkn0403",,terminal_output +3099,4001717,"TERMINAL",0,0,"50777556",,terminal_output +3100,4002857,"TERMINAL",0,0,"1888667",,terminal_output +3101,4003682,"TERMINAL",0,0,"2999778",,terminal_output +3102,4003949,"TERMINAL",0,0,"srun",,terminal_focus +3103,4004280,"TERMINAL",0,0,"3101010889",,terminal_output +3104,4004413,"TERMINAL",0,0,"s",,terminal_output +3105,4005115,"TERMINAL",0,0,"c",,terminal_output +3106,4005257,"TERMINAL",0,0,"a",,terminal_output +3107,4005345,"TERMINAL",0,0,"n",,terminal_output +3108,4005490,"TERMINAL",0,0,"c",,terminal_output +3109,4005566,"TERMINAL",0,0,"e",,terminal_output +3110,4005710,"TERMINAL",0,0,"l",,terminal_output +3111,4005823,"TERMINAL",0,0,"41119930",,terminal_output +3112,4005866,"TERMINAL",0,0," ",,terminal_output +3113,4006093,"TERMINAL",0,0,"3465675",,terminal_output +3114,4006787,"TERMINAL",0,0,"52227:007:001",,terminal_output +3115,4007166,"TERMINAL",0,0,"3465675\r\n[?2004l\r]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +3116,4007871,"TERMINAL",0,0,"6CG344223",,terminal_output +3117,4008567,"TERMINAL",0,0,"855334",,terminal_output +3118,4009339,"TERMINAL",0,0,"watch",,terminal_focus +3119,4009806,"TERMINAL",0,0,"966445",,terminal_output +3120,4010685,"TERMINAL",0,0,"srun",,terminal_focus +3121,4010955,"TERMINAL",0,0,"3:0077556",,terminal_output +3122,4011334,"TERMINAL",0,0,"sc",,terminal_output +3123,4011444,"TERMINAL",0,0,"n",,terminal_output +3124,4011524,"TERMINAL",0,0,"188667",,terminal_output +3125,4012306,"TERMINAL",0,0,"",,terminal_output +3126,4012483,"TERMINAL",0,0,"a",,terminal_output +3127,4012632,"TERMINAL",0,0,"n",,terminal_output +3128,4012683,"TERMINAL",0,0,"c",,terminal_output +3129,4012742,"TERMINAL",0,0,"el ",,terminal_output +3130,4012742,"TERMINAL",0,0,"299778",,terminal_output +3131,4013187,"TERMINAL",0,0,"3465676",,terminal_output +3132,4013965,"TERMINAL",0,0,"32020889",,terminal_output +3133,4014065,"TERMINAL",0,0,"3465676\r\n[?2004l\r]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +3134,4014817,"TERMINAL",0,0,"4CG19940",,terminal_output +3135,4016291,"TERMINAL",0,0,"scancel 3465676",,terminal_output +3136,4016844,"TERMINAL",0,0,"5210101",,terminal_output +3137,4016940,"TERMINAL",0,0,"63112",,terminal_output +3138,4017036,"TERMINAL",0,0,"7",,terminal_output +3139,4017624,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +3140,4018142,"TERMINAL",0,0,"7CG223",,terminal_output +3141,4018831,"TERMINAL",0,0,"8334",,terminal_output +3142,4020443,"TERMINAL",0,0,"9445",,terminal_output +3143,4021627,"TERMINAL",0,0,"10556",,terminal_output +3144,4022397,"TERMINAL",0,0,"1667",,terminal_output +3145,4023460,"TERMINAL",0,0,"2778",,terminal_output +3146,4024557,"TERMINAL",0,0,"3889",,terminal_output +3147,4025781,"TERMINAL",0,0,"49950",,terminal_output +3148,4026691,"TERMINAL",0,0,"520201",,terminal_output +3149,4027809,"TERMINAL",0,0,"6112",,terminal_output +3150,4028775,"TERMINAL",0,0,"7223",,terminal_output +3151,4029864,"TERMINAL",0,0,"8334",,terminal_output +3152,4030890,"TERMINAL",0,0,"9445",,terminal_output +3153,4031667,"TERMINAL",0,0,"20667",,terminal_output +3154,4032880,"TERMINAL",0,0,"2778",,terminal_output +3155,4033686,"TERMINAL",0,0,"3889",,terminal_output +3156,4034979,"TERMINAL",0,0,"49930:00",,terminal_output +3157,4035956,"TERMINAL",0,0,"530301",,terminal_output +3158,4037353,"TERMINAL",0,0,"6112",,terminal_output +3159,4038069,"TERMINAL",0,0,"7223",,terminal_output +3160,4039583,"TERMINAL",0,0,"8334",,terminal_output +3161,4040631,"TERMINAL",0,0,"9445",,terminal_output +3162,4041529,"TERMINAL",0,0,"30556",,terminal_output +3163,4042424,"TERMINAL",0,0,"1667",,terminal_output +3164,4043485,"TERMINAL",0,0,"2778",,terminal_output +3165,4044569,"TERMINAL",0,0,"3889",,terminal_output +3166,4045840,"TERMINAL",0,0,"49910",,terminal_output +3167,4046968,"TERMINAL",0,0,"540401",,terminal_output +3168,4048094,"TERMINAL",0,0,"672112",,terminal_output +3169,4048937,"TERMINAL",0,0,"\r7223",,terminal_output +3170,4049781,"TERMINAL",0,0,"8334",,terminal_output +3171,4050876,"TERMINAL",0,0,"9445",,terminal_output +3172,4051779,"TERMINAL",0,0,"40556",,terminal_output +3173,4052496,"TERMINAL",0,0,"1667",,terminal_output +3174,4053930,"TERMINAL",0,0,"2778",,terminal_output +3175,4054854,"TERMINAL",0,0,"\r3889",,terminal_output +3176,4055869,"TERMINAL",0,0,"4505021",,terminal_output +3177,4056926,"TERMINAL",0,0,"6112",,terminal_output +3178,4058025,"TERMINAL",0,0,"71 hkn0504223",,terminal_output +3179,4058948,"TERMINAL",0,0,"\r86286to R 17:17:57367la38617 dev_accel interact 30:24403",,terminal_output +3180,4060074,"TERMINAL",0,0,"9445",,terminal_output +3181,4061097,"TERMINAL",0,0,"50556",,terminal_output +3182,4062224,"TERMINAL",0,0,"1667",,terminal_output +3183,4063250,"TERMINAL",0,0,"2778",,terminal_output +3184,4064317,"TERMINAL",0,0,"3889",,terminal_output +3185,4065165,"TERMINAL",0,0,"49930",,terminal_output +3186,4065419,"TERMINAL",0,0,"watch",,terminal_focus +3187,4066489,"TERMINAL",0,0,"58:008:001",,terminal_output +3188,4066834,"TERMINAL",0,0,"6112",,terminal_output +3189,4068379,"TERMINAL",0,0,"7223",,terminal_output +3190,4068625,"TERMINAL",0,0,"srun",,terminal_focus +3191,4068995,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0403 jasmine]$ \r(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +3192,4068995,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 14:33:58 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 17:18:03\t 1 hkn07363466287 accelerat train_la tum_cte0 R 17:18:03\t 1 hkn07363468617 dev_accel interact tum_cte0 R30:34\t 1 hkn0403",,terminal_output +3193,4069520,"TERMINAL",0,0,"9445",,terminal_output +3194,4069876,"TERMINAL",0,0,"q",,terminal_output +3195,4070325,"TERMINAL",0,0,"ueu",,terminal_output +3196,4070372,"TERMINAL",0,0,"e",,terminal_output +3197,4070868,"TERMINAL",0,0,"4:00556",,terminal_output +3198,4071035,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0403.localdomain: Fri Sep 5 14:34:00 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 17:18:05\t 1 hkn07363466287 accelerat train_la tum_cte0 R 17:18:05\t 1 hkn07363468617 dev_accel interact tum_cte0 R30:36\t 1 hkn0403",,terminal_output +3199,4072265,"TERMINAL",0,0,"1667",,terminal_output +3200,4072308,"TERMINAL",0,0,"1667",,terminal_output +3201,4073273,"TERMINAL",0,0,"2778",,terminal_output +3202,4073282,"TERMINAL",0,0,"2778",,terminal_output +3203,4073813,"TERMINAL",0,0,"3889",,terminal_output +3204,4073892,"TERMINAL",0,0,"3889",,terminal_output +3205,4074818,"TERMINAL",0,0,"49940",,terminal_output +3206,4074920,"TERMINAL",0,0,"49940",,terminal_output +3207,4075967,"TERMINAL",0,0,"510101",,terminal_output +3208,4075967,"TERMINAL",0,0,"510101",,terminal_output +3209,4077424,"TERMINAL",0,0,"6112",,terminal_output +3210,4077527,"TERMINAL",0,0,"6112",,terminal_output +3211,4078775,"TERMINAL",0,0,"7223",,terminal_output +3212,4078775,"TERMINAL",0,0,"7223",,terminal_output +3213,4079531,"TERMINAL",0,0,"8334",,terminal_output +3214,4079639,"TERMINAL",0,0,"8334",,terminal_output +3215,4080572,"TERMINAL",0,0,"9445",,terminal_output +3216,4080574,"TERMINAL",0,0,"9445",,terminal_output +3217,4080949,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +3218,4081210,"TERMINAL",0,0,"10556",,terminal_output +3219,4082705,"TERMINAL",0,0,"1667",,terminal_output +3220,4084098,"TERMINAL",0,0,"2778",,terminal_output +3221,4085230,"TERMINAL",0,0,"39950",,terminal_output +3222,4086211,"TERMINAL",0,0,"520201",,terminal_output +3223,4087003,"TERMINAL",0,0,"6112",,terminal_output +3224,4087698,"TERMINAL",0,0,"7223",,terminal_output +3225,4089157,"TERMINAL",0,0,"8334",,terminal_output +3226,4090243,"TERMINAL",0,0,"9445",,terminal_output +3227,4091390,"TERMINAL",0,0,"20556",,terminal_output +3228,4092329,"TERMINAL",0,0,"1667",,terminal_output +3229,4093121,"TERMINAL",0,0,"2778",,terminal_output +3230,4093854,"TERMINAL",0,0,"3889",,terminal_output +3231,4095216,"TERMINAL",0,0,"4991:00",,terminal_output +3232,4096353,"TERMINAL",0,0,"530301",,terminal_output +3233,4097374,"TERMINAL",0,0,"6112",,terminal_output +3234,4098465,"TERMINAL",0,0,"7223",,terminal_output +3235,4099486,"TERMINAL",0,0,"8334",,terminal_output +3236,4100574,"TERMINAL",0,0,"9445",,terminal_output +3237,4101659,"TERMINAL",0,0,"30556",,terminal_output +3238,4102638,"TERMINAL",0,0,"1667",,terminal_output +3239,4103732,"TERMINAL",0,0,"2778",,terminal_output +3240,4104226,"TERMINAL",0,0,"3889",,terminal_output +3241,4105672,"TERMINAL",0,0,"49910",,terminal_output +3242,4106735,"TERMINAL",0,0,"540401",,terminal_output +3243,4107715,"TERMINAL",0,0,"6112",,terminal_output +3244,4108922,"TERMINAL",0,0,"7223",,terminal_output +3245,4109717,"TERMINAL",0,0,"8334",,terminal_output +3246,4110569,"TERMINAL",0,0,"9445",,terminal_output +3247,4111582,"TERMINAL",0,0,"41667",,terminal_output +3248,4112791,"TERMINAL",0,0,"2778",,terminal_output +3249,4113743,"TERMINAL",0,0,"3889",,terminal_output +3250,4114654,"TERMINAL",0,0,"49920",,terminal_output +3251,4115988,"TERMINAL",0,0,"550501",,terminal_output +3252,4117059,"TERMINAL",0,0,"6112",,terminal_output +3253,4118228,"TERMINAL",0,0,"7223",,terminal_output +3254,4118974,"TERMINAL",0,0,"8334",,terminal_output +3255,4120043,"TERMINAL",0,0,"9445",,terminal_output +3256,4121029,"TERMINAL",0,0,"50556",,terminal_output +3257,4121710,"TERMINAL",0,0,"1667",,terminal_output +3258,4123138,"TERMINAL",0,0,"2778",,terminal_output +3259,4124324,"TERMINAL",0,0,"3889",,terminal_output +3260,4125010,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",0,0,"",python,tab +3261,4125011,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14428,0,"",python,selection_command +3262,4125525,"TERMINAL",0,0,"49930",,terminal_output +3263,4126372,"TERMINAL",0,0,"59:009:001",,terminal_output +3264,4127171,"TERMINAL",0,0,"6112",,terminal_output +3265,4128430,"TERMINAL",0,0,"7223",,terminal_output +3266,4129080,"TERMINAL",0,0,"8334",,terminal_output +3267,4130118,"TERMINAL",0,0,"9445",,terminal_output +3268,4131301,"TERMINAL",0,0,"5:00556",,terminal_output +3269,4132189,"TERMINAL",0,0,"1667",,terminal_output +3270,4133265,"TERMINAL",0,0,"2778",,terminal_output +3271,4134208,"TERMINAL",0,0,"3889",,terminal_output +3272,4134895,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14874,0,"",python,selection_mouse +3273,4135324,"TERMINAL",0,0,"49940",,terminal_output +3274,4136373,"TERMINAL",0,0,"511112",,terminal_output +3275,4136868,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14485,0,"",python,selection_mouse +3276,4138037,"TERMINAL",0,0,"7223",,terminal_output +3277,4139106,"TERMINAL",0,0,"8334",,terminal_output +3278,4139950,"TERMINAL",0,0,"9445",,terminal_output +3279,4140899,"TERMINAL",0,0,"10556",,terminal_output +3280,4141949,"TERMINAL",0,0,"1667",,terminal_output +3281,4143123,"TERMINAL",0,0,"2778",,terminal_output +3282,4144258,"TERMINAL",0,0,"3889",,terminal_output +3283,4145220,"TERMINAL",0,0,"49950",,terminal_output +3284,4146404,"TERMINAL",0,0,"520201",,terminal_output +3285,4147338,"TERMINAL",0,0,"6112",,terminal_output +3286,4147741,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13076,0,"",python,selection_mouse +3287,4148357,"TERMINAL",0,0,"7223",,terminal_output +3288,4149365,"TERMINAL",0,0,"8334",,terminal_output +3289,4150405,"TERMINAL",0,0,"9445",,terminal_output +3290,4151520,"TERMINAL",0,0,"20556",,terminal_output +3291,4152516,"TERMINAL",0,0,"1667",,terminal_output +3292,4153341,"TERMINAL",0,0,"2778",,terminal_output +3293,4154567,"TERMINAL",0,0,"3889",,terminal_output +3294,4155617,"TERMINAL",0,0,"4992:00",,terminal_output +3295,4156052,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14427,0,"",python,selection_mouse +3296,4156068,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14426,0,"",python,selection_command +3297,4156402,"TERMINAL",0,0,"530301",,terminal_output +3298,4157694,"TERMINAL",0,0,"6112",,terminal_output +3299,4158663,"TERMINAL",0,0,"7223",,terminal_output +3300,4159609,"TERMINAL",0,0,"8334",,terminal_output +3301,4160431,"TERMINAL",0,0,"9445",,terminal_output +3302,4161809,"TERMINAL",0,0,"30667",,terminal_output +3303,4162443,"TERMINAL",0,0,"2778",,terminal_output +3304,4163531,"TERMINAL",0,0,"3889",,terminal_output +3305,4164905,"TERMINAL",0,0,"49910",,terminal_output +3306,4166032,"TERMINAL",0,0,"540401",,terminal_output +3307,4166992,"TERMINAL",0,0,"6112",,terminal_output +3308,4167620,"TERMINAL",0,0,"7223",,terminal_output +3309,4168834,"TERMINAL",0,0,"8334",,terminal_output +3310,4169682,"TERMINAL",0,0,"9445",,terminal_output +3311,4171386,"TERMINAL",0,0,"40556",,terminal_output +3312,4172333,"TERMINAL",0,0,"1667",,terminal_output +3313,4173482,"TERMINAL",0,0,"2778",,terminal_output +3314,4174485,"TERMINAL",0,0,"3889",,terminal_output +3315,4175582,"TERMINAL",0,0,"49920",,terminal_output +3316,4176534,"TERMINAL",0,0,"550501",,terminal_output +3317,4177424,"TERMINAL",0,0,"6112",,terminal_output +3318,4177683,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13201,0,"",python,selection_command +3319,4178202,"TERMINAL",0,0,"7223",,terminal_output +3320,4179306,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13272,0,"",python,selection_mouse +3321,4179630,"TERMINAL",0,0,"8334",,terminal_output +3322,4180271,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13295,0,"\n ",python,content +3323,4180488,"TERMINAL",0,0,"9445",,terminal_output +3324,4180941,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13308,0,"p",python,content +3325,4180941,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13309,0,"",python,selection_keyboard +3326,4181076,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13309,0,"r",python,content +3327,4181077,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13310,0,"",python,selection_keyboard +3328,4181170,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13310,0,"i",python,content +3329,4181172,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13311,0,"",python,selection_keyboard +3330,4181206,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13311,0,"n",python,content +3331,4181207,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13312,0,"",python,selection_keyboard +3332,4181290,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13312,0,"t",python,content +3333,4181291,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13313,0,"",python,selection_keyboard +3334,4182307,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13313,0,"()",python,content +3335,4182308,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13314,0,"",python,selection_keyboard +3336,4182525,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13314,0,"r",python,content +3337,4182526,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13315,0,"",python,selection_keyboard +3338,4182614,"TERMINAL",0,0,"50556",,terminal_output +3339,4182691,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13315,0,"e",python,content +3340,4182692,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13316,0,"",python,selection_keyboard +3341,4182699,"TERMINAL",0,0,"1667",,terminal_output +3342,4182864,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13316,0,"c",python,content +3343,4182866,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13317,0,"",python,selection_keyboard +3344,4182927,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13317,0,"o",python,content +3345,4182927,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13318,0,"",python,selection_keyboard +3346,4183122,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13318,0,"n",python,content +3347,4183123,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13319,0,"",python,selection_keyboard +3348,4183278,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13319,0,".",python,content +3349,4183279,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13320,0,"",python,selection_keyboard +3350,4183409,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13320,0,"s",python,content +3351,4183409,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13321,0,"",python,selection_keyboard +3352,4183541,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13321,0,"h",python,content +3353,4183542,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13322,0,"",python,selection_keyboard +3354,4183620,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13322,0,"a",python,content +3355,4183621,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13323,0,"",python,selection_keyboard +3356,4183709,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13323,0,"p",python,content +3357,4183709,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13324,0,"",python,selection_keyboard +3358,4183852,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13324,0,"e",python,content +3359,4183853,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13325,0,"",python,selection_keyboard +3360,4184309,"TERMINAL",0,0,"2778",,terminal_output +3361,4184376,"TERMINAL",0,0,"39930",,terminal_output +3362,4185878,"TERMINAL",0,0,"520:0020:001",,terminal_output +3363,4186864,"TERMINAL",0,0,"6112",,terminal_output +3364,4187960,"TERMINAL",0,0,"7223",,terminal_output +3365,4188664,"TERMINAL",0,0,"8334",,terminal_output +3366,4190237,"TERMINAL",0,0,"9445",,terminal_output +3367,4191426,"TERMINAL",0,0,"6:00556",,terminal_output +3368,4191958,"TERMINAL",0,0,"queue",,terminal_output +3369,4192357,"TERMINAL",0,0,"1667",,terminal_output +3370,4193209,"TERMINAL",0,0,"scancel 3465677",,terminal_output +3371,4194040,"TERMINAL",0,0,"6",,terminal_output +3372,4194068,"TERMINAL",0,0,"2778",,terminal_output +3373,4194483,"TERMINAL",0,0,"5",,terminal_output +3374,4194990,"TERMINAL",0,0,"h slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +3375,4194991,"TERMINAL",0,0,"3889",,terminal_output +3376,4195473,"TERMINAL",0,0,"49940",,terminal_output +3377,4196224,"TERMINAL",0,0,"510101",,terminal_output +3378,4197291,"TERMINAL",0,0,"6112",,terminal_output +3379,4198130,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-lam-dev-$slurm_job_id \\r\n --tags lam coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train\r\n\r\n",,terminal_output +3380,4198288,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3453268\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1757073804\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757077404\r\nSLURM_PMI2_SRUN_PORT=36059\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468617\r\nSLURM_PTY_PORT=33607\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=43381\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468617\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=43381\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\n",,terminal_output +3381,4198348,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +3382,4198560,"TERMINAL",0,0,"7223",,terminal_output +3383,4199241,"TERMINAL",0,0,"8334",,terminal_output +3384,4200340,"TERMINAL",0,0,"9445",,terminal_output +3385,4201567,"TERMINAL",0,0,"10556",,terminal_output +3386,4202721,"TERMINAL",0,0,"1667",,terminal_output +3387,4203560,"TERMINAL",0,0,"2778",,terminal_output +3388,4204193,"TERMINAL",0,0,"3889",,terminal_output +3389,4204614,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +3390,4205252,"TERMINAL",0,0,"49950",,terminal_output +3391,4205426,"TERMINAL",0,0,"wandb: creating run\r\nwandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_143614-dq49i06x\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-lam-dev-3468617\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/dq49i06x\r\n",,terminal_output +3392,4206233,"TERMINAL",0,0,"520201",,terminal_output +3393,4207530,"TERMINAL",0,0,"6112",,terminal_output +3394,4208657,"TERMINAL",0,0,"7334",,terminal_output +3395,4209227,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['action_in', 'action_up', 'decoder', 'encoder', 'patch_up', 'vq']\r\nParameter counts:\r\n{'action_in': 768, 'action_up': 16896, 'decoder': 17474816, 'encoder': 17228832, 'patch_up': 393728, 'vq': 192, 'total': 35115232}\r\nStarting training from step 0...\r\n",,terminal_output +3396,4209477,"TERMINAL",0,0,"9445",,terminal_output +3397,4211013,"TERMINAL",0,0,"20556",,terminal_output +3398,4212144,"TERMINAL",0,0,"1667",,terminal_output +3399,4213163,"TERMINAL",0,0,"2778",,terminal_output +3400,4213983,"TERMINAL",0,0,"3889",,terminal_output +3401,4214601,"TERMINAL",0,0,"4993:00",,terminal_output +3402,4215724,"TERMINAL",0,0,"530301",,terminal_output +3403,4217182,"TERMINAL",0,0,"6112",,terminal_output +3404,4218328,"TERMINAL",0,0,"7223",,terminal_output +3405,4218329,"TERMINAL",0,0,"2025-09-05 14:36:27.445001: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:36:27.446185: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:36:27.446204: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:36:27.446679: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3406,4219307,"TERMINAL",0,0,"8334",,terminal_output +3407,4220324,"TERMINAL",0,0,"9445",,terminal_output +3408,4221460,"TERMINAL",0,0,"30556",,terminal_output +3409,4222482,"TERMINAL",0,0,"1667",,terminal_output +3410,4223509,"TERMINAL",0,0,"2778",,terminal_output +3411,4224604,"TERMINAL",0,0,"3889",,terminal_output +3412,4225328,"TERMINAL",0,0,"49910",,terminal_output +3413,4226374,"TERMINAL",0,0,"540401",,terminal_output +3414,4227307,"TERMINAL",0,0,"6112",,terminal_output +3415,4228696,"TERMINAL",0,0,"7223",,terminal_output +3416,4229781,"TERMINAL",0,0,"8334",,terminal_output +3417,4230906,"TERMINAL",0,0,"9445",,terminal_output +3418,4231401,"TERMINAL",0,0,"40556",,terminal_output +3419,4233206,"TERMINAL",0,0,"1778",,terminal_output +3420,4234237,"TERMINAL",0,0,"3889",,terminal_output +3421,4235181,"TERMINAL",0,0,"49920",,terminal_output +3422,4236101,"TERMINAL",0,0,"550501",,terminal_output +3423,4237005,"TERMINAL",0,0,"6112",,terminal_output +3424,4238051,"TERMINAL",0,0,"7223",,terminal_output +3425,4239276,"TERMINAL",0,0,"8334",,terminal_output +3426,4240190,"TERMINAL",0,0,"9445",,terminal_output +3427,4241232,"TERMINAL",0,0,"50556",,terminal_output +3428,4242069,"TERMINAL",0,0,"1667",,terminal_output +3429,4243254,"TERMINAL",0,0,"2778",,terminal_output +3430,4244021,"TERMINAL",0,0,"3889",,terminal_output +3431,4245216,"TERMINAL",0,0,"49930",,terminal_output +3432,4246202,"TERMINAL",0,0,"51:001:001",,terminal_output +3433,4247570,"TERMINAL",0,0,"6112",,terminal_output +3434,4248131,"TERMINAL",0,0,"7223",,terminal_output +3435,4249211,"TERMINAL",0,0,"8334",,terminal_output +3436,4250664,"TERMINAL",0,0,"9445",,terminal_output +3437,4251794,"TERMINAL",0,0,"7:00556",,terminal_output +3438,4252691,"TERMINAL",0,0,"1667",,terminal_output +3439,4253696,"TERMINAL",0,0,"2778",,terminal_output +3440,4254603,"TERMINAL",0,0,"3889",,terminal_output +3441,4255968,"TERMINAL",0,0,"4101041",,terminal_output +3442,4257281,"TERMINAL",0,0,"6112",,terminal_output +3443,4258154,"TERMINAL",0,0,"7223",,terminal_output +3444,4259365,"TERMINAL",0,0,"8334",,terminal_output +3445,4260322,"TERMINAL",0,0,"9445",,terminal_output +3446,4261179,"TERMINAL",0,0,"10556",,terminal_output +3447,4262089,"TERMINAL",0,0,"1667",,terminal_output +3448,4263248,"TERMINAL",0,0,"2778",,terminal_output +3449,4264260,"TERMINAL",0,0,"3889",,terminal_output +3450,4264623,"TERMINAL",0,0,"Step 0, loss: 0.29082542657852173\r\n(12, 15, 64, 64, 3)\r\nStep 1, loss: 0.2048691064119339\r\n(12, 15, 64, 64, 3)\r\nStep 2, loss: 0.19221149384975433\r\n(12, 15, 64, 64, 3)\r\nStep 3, loss: 0.20193631947040558\r\n(12, 15, 64, 64, 3)\r\nStep 4, loss: 0.1634262055158615\r\n(12, 15, 64, 64, 3)\r\nStep 5, loss: 0.16224892437458038\r\n(12, 15, 64, 64, 3)\r\nStep 6, loss: 0.1465519517660141\r\n(12, 15, 64, 64, 3)\r\nStep 7, loss: 0.13969092071056366\r\n(12, 15, 64, 64, 3)\r\nStep 8, loss: 0.13379880785942078\r\n(12, 15, 64, 64, 3)\r\nStep 9, loss: 0.13682898879051208\r\n(12, 15, 64, 64, 3)\r\njax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py"", line 413, in \r\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4625, in concatenate\r\n arrays_out = [lax.concatenate(arrays_out[i:i+k], axis)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/numpy/lax_numpy.py"", line 4625, in \r\n arrays_out = [lax.concatenate(arrays_out[i:i+k], axis)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/lax/lax.py"", line 2006, in concatenate\r\n return concatenate_p.bind(*operands, dimension=dimension)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 536, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 552, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 562, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1066, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 91, in apply_primitive\r\n outs = fun(*args)\r\nTypeError: Cannot concatenate arrays with shapes that differ in dimensions other than the one being concatenated: concatenating along dimension 1 for shapes (16, 64, 64, 3), (15, 64, 64, 3).\r\n",,terminal_output +3451,4264757,"TERMINAL",0,0,"49950",,terminal_output +3452,4266532,"TERMINAL",0,0,"520201",,terminal_output +3453,4266533,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-lam-dev-3468617 at: https://wandb.ai/instant-uv/jafar/runs/dq49i06x\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_143614-dq49i06x/logs\r\n",,terminal_output +3454,4267243,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 10 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +3455,4267313,"TERMINAL",0,0,"srun: error: hkn0403: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +3456,4267313,"TERMINAL",0,0,"6112",,terminal_output +3457,4268250,"TERMINAL",0,0,"7223",,terminal_output +3458,4269297,"TERMINAL",0,0,"8334",,terminal_output +3459,4270508,"TERMINAL",0,0,"9445",,terminal_output +3460,4271493,"TERMINAL",0,0,"20556",,terminal_output +3461,4272147,"TERMINAL",0,0,"1667",,terminal_output +3462,4273602,"TERMINAL",0,0,"2778",,terminal_output +3463,4274814,"TERMINAL",0,0,"3889",,terminal_output +3464,4275680,"TERMINAL",0,0,"4994:00",,terminal_output +3465,4276706,"TERMINAL",0,0,"530301",,terminal_output +3466,4277907,"TERMINAL",0,0,"6112",,terminal_output +3467,4279008,"TERMINAL",0,0,"7223",,terminal_output +3468,4280076,"TERMINAL",0,0,"8334",,terminal_output +3469,4281168,"TERMINAL",0,0,"9556",,terminal_output +3470,4281853,"TERMINAL",0,0,"31667",,terminal_output +3471,4283013,"TERMINAL",0,0,"2778",,terminal_output +3472,4284070,"TERMINAL",0,0,"3889",,terminal_output +3473,4284891,"TERMINAL",0,0,"49910",,terminal_output +3474,4286057,"TERMINAL",0,0,"540401",,terminal_output +3475,4287251,"TERMINAL",0,0,"6112",,terminal_output +3476,4288031,"TERMINAL",0,0,"7223",,terminal_output +3477,4289354,"TERMINAL",0,0,"8334",,terminal_output +3478,4290079,"TERMINAL",0,0,"9445",,terminal_output +3479,4291499,"TERMINAL",0,0,"40556",,terminal_output +3480,4292346,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +3481,4292574,"TERMINAL",0,0,"1667",,terminal_output +3482,4293522,"TERMINAL",0,0,"2778",,terminal_output +3483,4294557,"TERMINAL",0,0,"3889",,terminal_output +3484,4295657,"TERMINAL",0,0,"49920",,terminal_output +3485,4296670,"TERMINAL",0,0,"550501",,terminal_output +3486,4297040,"TERMINAL",0,0,"6112",,terminal_output +3487,4298225,"TERMINAL",0,0,"7223",,terminal_output +3488,4299640,"TERMINAL",0,0,"8334",,terminal_output +3489,4300631,"TERMINAL",0,0,"9445",,terminal_output +3490,4301816,"TERMINAL",0,0,"50556",,terminal_output +3491,4302783,"TERMINAL",0,0,"1667",,terminal_output +3492,4303904,"TERMINAL",0,0,"2778",,terminal_output +3493,4305020,"TERMINAL",0,0,"3889",,terminal_output +3494,4306340,"train_lam.py",0,0,"",python,tab +3495,4306598,"TERMINAL",0,0,"49930",,terminal_output +3496,4306937,"TERMINAL",0,0,"52:012:012",,terminal_output +3497,4307992,"TERMINAL",0,0,"7223",,terminal_output +3498,4309123,"TERMINAL",0,0,"8334",,terminal_output +3499,4310079,"TERMINAL",0,0,"9445",,terminal_output +3500,4311049,"TERMINAL",0,0,"8:00556",,terminal_output +3501,4312116,"TERMINAL",0,0,"1667",,terminal_output +3502,4313341,"TERMINAL",0,0,"2778",,terminal_output +3503,4314237,"TERMINAL",0,0,"3889",,terminal_output +3504,4315223,"TERMINAL",0,0,"49940",,terminal_output +3505,4316381,"TERMINAL",0,0,"510101",,terminal_output +3506,4317311,"TERMINAL",0,0,"6112",,terminal_output +3507,4318138,"TERMINAL",0,0,"7223",,terminal_output +3508,4319379,"train_lam.py",4864,0,"",python,selection_mouse +3509,4319565,"train_lam.py",4860,5,"recon",python,selection_mouse +3510,4319640,"TERMINAL",0,0,"8334",,terminal_output +3511,4320598,"TERMINAL",0,0,"9445",,terminal_output +3512,4321224,"TERMINAL",0,0,"10556",,terminal_output +3513,4322182,"TERMINAL",0,0,"1667",,terminal_output +3514,4323126,"TERMINAL",0,0,"2778",,terminal_output +3515,4323464,"train_lam.py",2923,0,"",python,selection_mouse +3516,4323619,"train_lam.py",2918,7,"outputs",python,selection_mouse +3517,4324877,"TERMINAL",0,0,"39950",,terminal_output +3518,4326124,"TERMINAL",0,0,"520201",,terminal_output +3519,4327078,"TERMINAL",0,0,"6112",,terminal_output +3520,4327988,"TERMINAL",0,0,"7223",,terminal_output +3521,4329082,"TERMINAL",0,0,"8334",,terminal_output +3522,4330084,"TERMINAL",0,0,"9445",,terminal_output +3523,4331140,"TERMINAL",0,0,"20556",,terminal_output +3524,4332138,"TERMINAL",0,0,"1667",,terminal_output +3525,4333176,"TERMINAL",0,0,"2778",,terminal_output +3526,4334213,"TERMINAL",0,0,"3889",,terminal_output +3527,4334387,"train_lam.py",2922,0,"",python,selection_mouse +3528,4334861,"train_lam.py",3012,0,"",python,selection_mouse +3529,4335345,"TERMINAL",0,0,"4995:00",,terminal_output +3530,4336220,"train_lam.py",2929,0,"",python,selection_mouse +3531,4336336,"TERMINAL",0,0,"531312",,terminal_output +3532,4337254,"train_lam.py",2939,0,"",python,selection_mouse +3533,4337352,"TERMINAL",0,0,"7223",,terminal_output +3534,4338237,"train_lam.py",2949,0,"",python,selection_mouse +3535,4338430,"TERMINAL",0,0,"8334",,terminal_output +3536,4339491,"TERMINAL",0,0,"9445",,terminal_output +3537,4340595,"TERMINAL",0,0,"30556",,terminal_output +3538,4341559,"TERMINAL",0,0,"1667",,terminal_output +3539,4342557,"TERMINAL",0,0,"2778",,terminal_output +3540,4343624,"TERMINAL",0,0,"3889",,terminal_output +3541,4344080,"train_lam.py",3006,0,"",python,selection_mouse +3542,4344281,"train_lam.py",3002,7,"asarray",python,selection_mouse +3543,4344613,"TERMINAL",0,0,"49910",,terminal_output +3544,4344847,"train_lam.py",2923,0,"",python,selection_mouse +3545,4345683,"TERMINAL",0,0,"540401",,terminal_output +3546,4346881,"models/lam.py",0,0,"",python,tab +3547,4346981,"TERMINAL",0,0,"6112",,terminal_output +3548,4347849,"TERMINAL",0,0,"7223",,terminal_output +3549,4348778,"TERMINAL",0,0,"8334",,terminal_output +3550,4349861,"TERMINAL",0,0,"9445",,terminal_output +3551,4350849,"TERMINAL",0,0,"40556",,terminal_output +3552,4351704,"models/lam.py",4176,0,"",python,selection_mouse +3553,4351745,"models/lam.py",4175,0,"",python,selection_command +3554,4351901,"TERMINAL",0,0,"1667",,terminal_output +3555,4352099,"models/lam.py",4150,0,"",python,selection_mouse +3556,4352254,"models/lam.py",4136,17,"video_recon_BTHWC",python,selection_mouse +3557,4352957,"TERMINAL",0,0,"2778",,terminal_output +3558,4354098,"TERMINAL",0,0,"3889",,terminal_output +3559,4355108,"TERMINAL",0,0,"49920",,terminal_output +3560,4356125,"TERMINAL",0,0,"550501",,terminal_output +3561,4357129,"TERMINAL",0,0,"6112",,terminal_output +3562,4358173,"TERMINAL",0,0,"7223",,terminal_output +3563,4359204,"TERMINAL",0,0,"8334",,terminal_output +3564,4360264,"TERMINAL",0,0,"9445",,terminal_output +3565,4361364,"TERMINAL",0,0,"50667",,terminal_output +3566,4362374,"TERMINAL",0,0,"2778",,terminal_output +3567,4363391,"TERMINAL",0,0,"3889",,terminal_output +3568,4364492,"TERMINAL",0,0,"49930",,terminal_output +3569,4365540,"TERMINAL",0,0,"53:003:001",,terminal_output +3570,4366575,"TERMINAL",0,0,"6112",,terminal_output +3571,4367594,"TERMINAL",0,0,"7223",,terminal_output +3572,4368618,"TERMINAL",0,0,"8334",,terminal_output +3573,4369607,"TERMINAL",0,0,"9445",,terminal_output +3574,4370769,"TERMINAL",0,0,"9:00556",,terminal_output +3575,4371798,"TERMINAL",0,0,"1667",,terminal_output +3576,4372810,"TERMINAL",0,0,"2778",,terminal_output +3577,4373841,"TERMINAL",0,0,"3889",,terminal_output +3578,4374866,"TERMINAL",0,0,"49940",,terminal_output +3579,4375890,"TERMINAL",0,0,"510101",,terminal_output +3580,4377122,"TERMINAL",0,0,"6112",,terminal_output +3581,4378133,"TERMINAL",0,0,"7223",,terminal_output +3582,4379016,"TERMINAL",0,0,"8334",,terminal_output +3583,4380190,"TERMINAL",0,0,"9445",,terminal_output +3584,4381111,"TERMINAL",0,0,"10556",,terminal_output +3585,4382161,"TERMINAL",0,0,"1667",,terminal_output +3586,4383204,"TERMINAL",0,0,"2778",,terminal_output +3587,4384246,"TERMINAL",0,0,"3889",,terminal_output +3588,4385290,"TERMINAL",0,0,"4202051",,terminal_output +3589,4386340,"TERMINAL",0,0,"6112",,terminal_output +3590,4387449,"TERMINAL",0,0,"7223",,terminal_output +3591,4388428,"TERMINAL",0,0,"8334",,terminal_output +3592,4389476,"TERMINAL",0,0,"9445",,terminal_output +3593,4390635,"TERMINAL",0,0,"20556",,terminal_output +3594,4391648,"TERMINAL",0,0,"1667",,terminal_output +3595,4392672,"TERMINAL",0,0,"2778",,terminal_output +3596,4393673,"TERMINAL",0,0,"3889",,terminal_output +3597,4394842,"TERMINAL",0,0,"4996:00",,terminal_output +3598,4395777,"TERMINAL",0,0,"530301",,terminal_output +3599,4396871,"TERMINAL",0,0,"6112",,terminal_output +3600,4397859,"TERMINAL",0,0,"7223",,terminal_output +3601,4399023,"TERMINAL",0,0,"8334",,terminal_output +3602,4400045,"TERMINAL",0,0,"9445",,terminal_output +3603,4401072,"TERMINAL",0,0,"30556",,terminal_output +3604,4402103,"TERMINAL",0,0,"1667",,terminal_output +3605,4403153,"TERMINAL",0,0,"2778",,terminal_output +3606,4404245,"TERMINAL",0,0,"3889",,terminal_output +3607,4405259,"TERMINAL",0,0,"49910",,terminal_output +3608,4406218,"TERMINAL",0,0,"540401",,terminal_output +3609,4407325,"TERMINAL",0,0,"6112",,terminal_output +3610,4408319,"TERMINAL",0,0,"7334",,terminal_output +3611,4409429,"TERMINAL",0,0,"9445",,terminal_output +3612,4410379,"TERMINAL",0,0,"40556",,terminal_output +3613,4411525,"TERMINAL",0,0,"1667",,terminal_output +3614,4412502,"TERMINAL",0,0,"2778",,terminal_output +3615,4413572,"TERMINAL",0,0,"3889",,terminal_output +3616,4414557,"TERMINAL",0,0,"49920",,terminal_output +3617,4415728,"TERMINAL",0,0,"550501",,terminal_output +3618,4416736,"TERMINAL",0,0,"6112",,terminal_output +3619,4417751,"TERMINAL",0,0,"7223",,terminal_output +3620,4418891,"TERMINAL",0,0,"8334",,terminal_output +3621,4419795,"TERMINAL",0,0,"9445",,terminal_output +3622,4420951,"TERMINAL",0,0,"50556",,terminal_output +3623,4422077,"TERMINAL",0,0,"1667",,terminal_output +3624,4422989,"TERMINAL",0,0,"2778",,terminal_output +3625,4424224,"TERMINAL",0,0,"3889",,terminal_output +3626,4425274,"TERMINAL",0,0,"49930",,terminal_output +3627,4426382,"TERMINAL",0,0,"54:004:001",,terminal_output +3628,4427499,"TERMINAL",0,0,"6112",,terminal_output +3629,4428340,"TERMINAL",0,0,"7223",,terminal_output +3630,4429424,"TERMINAL",0,0,"8334",,terminal_output +3631,4429603,"models/lam.py",4062,0,"",python,selection_mouse +3632,4430407,"TERMINAL",0,0,"9445",,terminal_output +3633,4431301,"utils/preprocess.py",0,0,"import einops\nimport jax\nimport jax.numpy as jnp\n\n\ndef patchify(videos: jax.Array, size: int) -> jax.Array:\n B, T, H, W, C = videos.shape\n x = jnp.pad(videos, ((0, 0), (0, 0), (0, -H % size), (0, -W % size), (0, 0)))\n return einops.rearrange(\n x, ""b t (hn hp) (wn wp) c -> b t (hn wn) (hp wp c)"", hp=size, wp=size\n )\n\n\ndef unpatchify(patches: jax.Array, size: int, h_out: int, w_out: int) -> jax.Array:\n h_pad = -h_out % size\n hn = (h_out + h_pad) // size\n x = einops.rearrange(\n patches,\n ""b t (hn wn) (hp wp c) -> b t (hn hp) (wn wp) c"",\n hp=size,\n wp=size,\n hn=hn,\n )\n return x[:, :, :h_out, :w_out]\n",python,tab +3634,4431302,"utils/preprocess.py",342,0,"",python,selection_command +3635,4431416,"TERMINAL",0,0,"40:00556",,terminal_output +3636,4432471,"TERMINAL",0,0,"2778",,terminal_output +3637,4433523,"TERMINAL",0,0,"3889",,terminal_output +3638,4434311,"utils/preprocess.py",535,0,"",python,selection_command +3639,4434635,"TERMINAL",0,0,"49940",,terminal_output +3640,4435640,"TERMINAL",0,0,"510101",,terminal_output +3641,4435811,"utils/preprocess.py",521,0,"",python,selection_command +3642,4436705,"TERMINAL",0,0,"6112",,terminal_output +3643,4437770,"TERMINAL",0,0,"7223",,terminal_output +3644,4438916,"TERMINAL",0,0,"8334",,terminal_output +3645,4439982,"TERMINAL",0,0,"9445",,terminal_output +3646,4440309,"models/lam.py",0,0,"",python,tab +3647,4440935,"TERMINAL",0,0,"10556",,terminal_output +3648,4441732,"TERMINAL",0,0,"1667",,terminal_output +3649,4442838,"TERMINAL",0,0,"2778",,terminal_output +3650,4443877,"TERMINAL",0,0,"3889",,terminal_output +3651,4445820,"TERMINAL",0,0,"4202051",,terminal_output +3652,4446773,"TERMINAL",0,0,"6112",,terminal_output +3653,4447838,"TERMINAL",0,0,"7223",,terminal_output +3654,4448890,"TERMINAL",0,0,"8334",,terminal_output +3655,4449966,"TERMINAL",0,0,"9445",,terminal_output +3656,4451047,"TERMINAL",0,0,"20556",,terminal_output +3657,4452113,"TERMINAL",0,0,"1667",,terminal_output +3658,4453039,"TERMINAL",0,0,"2778",,terminal_output +3659,4454115,"TERMINAL",0,0,"3889",,terminal_output +3660,4455138,"TERMINAL",0,0,"4997:00",,terminal_output +3661,4456171,"TERMINAL",0,0,"530301",,terminal_output +3662,4457314,"models/lam.py",3389,0,"",python,selection_command +3663,4457470,"TERMINAL",0,0,"6112",,terminal_output +3664,4458262,"TERMINAL",0,0,"7223",,terminal_output +3665,4458737,"models/lam.py",3326,0,"",python,selection_command +3666,4459338,"TERMINAL",0,0,"8445",,terminal_output +3667,4460348,"TERMINAL",0,0,"30556",,terminal_output +3668,4461360,"models/lam.py",3322,0,"",python,selection_command +3669,4461423,"TERMINAL",0,0,"1667",,terminal_output +3670,4462520,"TERMINAL",0,0,"2778",,terminal_output +3671,4463407,"models/lam.py",3327,0,"",python,selection_mouse +3672,4463480,"TERMINAL",0,0,"3889",,terminal_output +3673,4464539,"TERMINAL",0,0,"49910",,terminal_output +3674,4464915,"models/lam.py",4186,0,"",python,selection_command +3675,4465564,"TERMINAL",0,0,"540401",,terminal_output +3676,4466709,"TERMINAL",0,0,"6112",,terminal_output +3677,4467340,"models/lam.py",4788,0,"",python,selection_mouse +3678,4467657,"TERMINAL",0,0,"7223",,terminal_output +3679,4468766,"TERMINAL",0,0,"8334",,terminal_output +3680,4469790,"TERMINAL",0,0,"9445",,terminal_output +3681,4470906,"TERMINAL",0,0,"40556",,terminal_output +3682,4471940,"TERMINAL",0,0,"1667",,terminal_output +3683,4472886,"TERMINAL",0,0,"2778",,terminal_output +3684,4473990,"TERMINAL",0,0,"3889",,terminal_output +3685,4475005,"TERMINAL",0,0,"49920",,terminal_output +3686,4476118,"TERMINAL",0,0,"550501",,terminal_output +3687,4477058,"TERMINAL",0,0,"6112",,terminal_output +3688,4478282,"TERMINAL",0,0,"7223",,terminal_output +3689,4479213,"TERMINAL",0,0,"8334",,terminal_output +3690,4479708,"train_tokenizer.py",0,0,"",python,tab +3691,4480206,"TERMINAL",0,0,"9445",,terminal_output +3692,4481235,"TERMINAL",0,0,"50556",,terminal_output +3693,4482306,"TERMINAL",0,0,"1778",,terminal_output +3694,4483353,"TERMINAL",0,0,"3889",,terminal_output +3695,4484543,"models/lam.py",0,0,"",python,tab +3696,4484720,"TERMINAL",0,0,"49930",,terminal_output +3697,4485447,"TERMINAL",0,0,"55:005:001",,terminal_output +3698,4486482,"TERMINAL",0,0,"6112",,terminal_output +3699,4487537,"TERMINAL",0,0,"7223",,terminal_output +3700,4488564,"TERMINAL",0,0,"8334",,terminal_output +3701,4489650,"TERMINAL",0,0,"9445",,terminal_output +3702,4490669,"TERMINAL",0,0,"1:00556",,terminal_output +3703,4491754,"TERMINAL",0,0,"1667",,terminal_output +3704,4492727,"TERMINAL",0,0,"2778",,terminal_output +3705,4493773,"TERMINAL",0,0,"3889",,terminal_output +3706,4494827,"TERMINAL",0,0,"49940",,terminal_output +3707,4495899,"TERMINAL",0,0,"510101",,terminal_output +3708,4496947,"TERMINAL",0,0,"6112",,terminal_output +3709,4498030,"TERMINAL",0,0,"7223",,terminal_output +3710,4498998,"TERMINAL",0,0,"8334",,terminal_output +3711,4500108,"TERMINAL",0,0,"9445",,terminal_output +3712,4501222,"TERMINAL",0,0,"10556",,terminal_output +3713,4502149,"TERMINAL",0,0,"1667",,terminal_output +3714,4503186,"TERMINAL",0,0,"2778",,terminal_output +3715,4504226,"TERMINAL",0,0,"3889",,terminal_output +3716,4505259,"TERMINAL",0,0,"49950",,terminal_output +3717,4506291,"TERMINAL",0,0,"521212",,terminal_output +3718,4507344,"TERMINAL",0,0,"7223",,terminal_output +3719,4508499,"TERMINAL",0,0,"8334",,terminal_output +3720,4509420,"TERMINAL",0,0,"9445",,terminal_output +3721,4509600,"models/lam.py",4068,0,"",python,selection_mouse +3722,4510540,"TERMINAL",0,0,"20556",,terminal_output +3723,4511510,"models/lam.py",3717,0,"",python,selection_mouse +3724,4511570,"TERMINAL",0,0,"1667",,terminal_output +3725,4512584,"TERMINAL",0,0,"2778",,terminal_output +3726,4512826,"models/lam.py",3330,0,"",python,selection_mouse +3727,4513598,"TERMINAL",0,0,"3889",,terminal_output +3728,4514745,"TERMINAL",0,0,"4998:00",,terminal_output +3729,4515757,"TERMINAL",0,0,"530301",,terminal_output +3730,4516780,"TERMINAL",0,0,"6112",,terminal_output +3731,4517791,"TERMINAL",0,0,"7223",,terminal_output +3732,4518934,"TERMINAL",0,0,"8334",,terminal_output +3733,4519878,"TERMINAL",0,0,"9445",,terminal_output +3734,4520982,"TERMINAL",0,0,"30556",,terminal_output +3735,4522006,"TERMINAL",0,0,"1667",,terminal_output +3736,4523040,"TERMINAL",0,0,"2778",,terminal_output +3737,4524138,"TERMINAL",0,0,"3889",,terminal_output +3738,4525207,"TERMINAL",0,0,"49910",,terminal_output +3739,4526132,"TERMINAL",0,0,"540401",,terminal_output +3740,4527228,"TERMINAL",0,0,"6112",,terminal_output +3741,4528223,"TERMINAL",0,0,"7223",,terminal_output +3742,4529287,"TERMINAL",0,0,"8334",,terminal_output +3743,4530364,"TERMINAL",0,0,"9556",,terminal_output +3744,4531424,"TERMINAL",0,0,"41667",,terminal_output +3745,4532449,"TERMINAL",0,0,"2778",,terminal_output +3746,4533485,"TERMINAL",0,0,"3889",,terminal_output +3747,4534611,"TERMINAL",0,0,"49920",,terminal_output +3748,4535627,"TERMINAL",0,0,"550501",,terminal_output +3749,4536755,"TERMINAL",0,0,"6112",,terminal_output +3750,4537675,"TERMINAL",0,0,"7223",,terminal_output +3751,4538798,"TERMINAL",0,0,"8334",,terminal_output +3752,4539764,"TERMINAL",0,0,"9445",,terminal_output +3753,4540847,"TERMINAL",0,0,"50556",,terminal_output +3754,4541884,"TERMINAL",0,0,"1667",,terminal_output +3755,4542851,"TERMINAL",0,0,"2778",,terminal_output +3756,4543918,"TERMINAL",0,0,"3889",,terminal_output +3757,4545048,"TERMINAL",0,0,"49930",,terminal_output +3758,4545997,"TERMINAL",0,0,"56:006:001",,terminal_output +3759,4547057,"TERMINAL",0,0,"6112",,terminal_output +3760,4548100,"TERMINAL",0,0,"7223",,terminal_output +3761,4549253,"TERMINAL",0,0,"8334",,terminal_output +3762,4550324,"TERMINAL",0,0,"9445",,terminal_output +3763,4551220,"TERMINAL",0,0,"2:00556",,terminal_output +3764,4552702,"TERMINAL",0,0,"1778",,terminal_output +3765,4553854,"TERMINAL",0,0,"3889",,terminal_output +3766,4554841,"TERMINAL",0,0,"49940",,terminal_output +3767,4555904,"TERMINAL",0,0,"510101",,terminal_output +3768,4557020,"TERMINAL",0,0,"6112",,terminal_output +3769,4558040,"TERMINAL",0,0,"7223",,terminal_output +3770,4559188,"TERMINAL",0,0,"8334",,terminal_output +3771,4560100,"TERMINAL",0,0,"9445",,terminal_output +3772,4561136,"TERMINAL",0,0,"10556",,terminal_output +3773,4562157,"TERMINAL",0,0,"1667",,terminal_output +3774,4563181,"TERMINAL",0,0,"2778",,terminal_output +3775,4564387,"TERMINAL",0,0,"3889",,terminal_output +3776,4565324,"TERMINAL",0,0,"49950",,terminal_output +3777,4566591,"TERMINAL",0,0,"521212",,terminal_output +3778,4567677,"TERMINAL",0,0,"7223",,terminal_output +3779,4568807,"TERMINAL",0,0,"8334",,terminal_output +3780,4569750,"TERMINAL",0,0,"9445",,terminal_output +3781,4570715,"TERMINAL",0,0,"20556",,terminal_output +3782,4571777,"TERMINAL",0,0,"1667",,terminal_output +3783,4572776,"TERMINAL",0,0,"2778",,terminal_output +3784,4573813,"TERMINAL",0,0,"3889",,terminal_output +3785,4574956,"TERMINAL",0,0,"4999:00",,terminal_output +3786,4575897,"TERMINAL",0,0,"530301",,terminal_output +3787,4577099,"TERMINAL",0,0,"6112",,terminal_output +3788,4578000,"TERMINAL",0,0,"7223",,terminal_output +3789,4579156,"TERMINAL",0,0,"8334",,terminal_output +3790,4580178,"TERMINAL",0,0,"9445",,terminal_output +3791,4581197,"TERMINAL",0,0,"30556",,terminal_output +3792,4582320,"TERMINAL",0,0,"1667",,terminal_output +3793,4583250,"TERMINAL",0,0,"2778",,terminal_output +3794,4584287,"TERMINAL",0,0,"3889",,terminal_output +3795,4585299,"TERMINAL",0,0,"4404011",,terminal_output +3796,4586366,"TERMINAL",0,0,"6112",,terminal_output +3797,4587370,"TERMINAL",0,0,"7223",,terminal_output +3798,4588469,"TERMINAL",0,0,"8334",,terminal_output +3799,4589481,"TERMINAL",0,0,"9445",,terminal_output +3800,4590623,"TERMINAL",0,0,"40556",,terminal_output +3801,4591639,"TERMINAL",0,0,"1667",,terminal_output +3802,4592609,"TERMINAL",0,0,"2778",,terminal_output +3803,4593700,"TERMINAL",0,0,"3889",,terminal_output +3804,4594720,"TERMINAL",0,0,"49920",,terminal_output +3805,4595849,"TERMINAL",0,0,"550501",,terminal_output +3806,4596797,"TERMINAL",0,0,"6112",,terminal_output +3807,4597826,"TERMINAL",0,0,"7223",,terminal_output +3808,4598930,"TERMINAL",0,0,"8334",,terminal_output +3809,4599943,"TERMINAL",0,0,"9445",,terminal_output +3810,4601057,"TERMINAL",0,0,"50556",,terminal_output +3811,4602082,"TERMINAL",0,0,"1667",,terminal_output +3812,4603124,"TERMINAL",0,0,"2778",,terminal_output +3813,4604140,"TERMINAL",0,0,"3889",,terminal_output +3814,4605270,"TERMINAL",0,0,"49930",,terminal_output +3815,4606180,"TERMINAL",0,0,"57:007:001",,terminal_output +3816,4607236,"TERMINAL",0,0,"6112",,terminal_output +3817,4608330,"TERMINAL",0,0,"7223",,terminal_output +3818,4609324,"TERMINAL",0,0,"8445",,terminal_output +3819,4610367,"TERMINAL",0,0,"3:00556",,terminal_output +3820,4611420,"TERMINAL",0,0,"1667",,terminal_output +3821,4612529,"TERMINAL",0,0,"2778",,terminal_output +3822,4613553,"TERMINAL",0,0,"3889",,terminal_output +3823,4614689,"TERMINAL",0,0,"49940",,terminal_output +3824,4615664,"TERMINAL",0,0,"510101",,terminal_output +3825,4616647,"TERMINAL",0,0,"6112",,terminal_output +3826,4617735,"TERMINAL",0,0,"7223",,terminal_output +3827,4618948,"TERMINAL",0,0,"8334",,terminal_output +3828,4619906,"TERMINAL",0,0,"9445",,terminal_output +3829,4620873,"TERMINAL",0,0,"10556",,terminal_output +3830,4621953,"TERMINAL",0,0,"1667",,terminal_output +3831,4622926,"TERMINAL",0,0,"2778",,terminal_output +3832,4624109,"TERMINAL",0,0,"3889",,terminal_output +3833,4625030,"TERMINAL",0,0,"49950",,terminal_output +3834,4626039,"TERMINAL",0,0,"520201",,terminal_output +3835,4627182,"TERMINAL",0,0,"6112",,terminal_output +3836,4628138,"TERMINAL",0,0,"7223",,terminal_output +3837,4629230,"TERMINAL",0,0,"8334",,terminal_output +3838,4630253,"TERMINAL",0,0,"9445",,terminal_output +3839,4631243,"TERMINAL",0,0,"20556",,terminal_output +3840,4632310,"TERMINAL",0,0,"1778",,terminal_output +3841,4633348,"TERMINAL",0,0,"3889",,terminal_output +3842,4634385,"TERMINAL",0,0,"49940:00",,terminal_output +3843,4635433,"TERMINAL",0,0,"530301",,terminal_output +3844,4636592,"TERMINAL",0,0,"6112",,terminal_output +3845,4637523,"TERMINAL",0,0,"7223",,terminal_output +3846,4638649,"TERMINAL",0,0,"8334",,terminal_output +3847,4639661,"TERMINAL",0,0,"9445",,terminal_output +3848,4640697,"TERMINAL",0,0,"30556",,terminal_output +3849,4641683,"TERMINAL",0,0,"1667",,terminal_output +3850,4642754,"TERMINAL",0,0,"2778",,terminal_output +3851,4643868,"TERMINAL",0,0,"3889",,terminal_output +3852,4644828,"TERMINAL",0,0,"49910",,terminal_output +3853,4645914,"TERMINAL",0,0,"540401",,terminal_output +3854,4647040,"TERMINAL",0,0,"6112",,terminal_output +3855,4647956,"TERMINAL",0,0,"7223",,terminal_output +3856,4648961,"TERMINAL",0,0,"8334",,terminal_output +3857,4650118,"TERMINAL",0,0,"9445",,terminal_output +3858,4651170,"TERMINAL",0,0,"40556",,terminal_output +3859,4652193,"TERMINAL",0,0,"1667",,terminal_output +3860,4653193,"TERMINAL",0,0,"2778",,terminal_output +3861,4654207,"TERMINAL",0,0,"3889",,terminal_output +3862,4655219,"TERMINAL",0,0,"49920",,terminal_output +3863,4656350,"TERMINAL",0,0,"550501",,terminal_output +3864,4657394,"TERMINAL",0,0,"6223",,terminal_output +3865,4658442,"TERMINAL",0,0,"8334",,terminal_output +3866,4659457,"TERMINAL",0,0,"9445",,terminal_output +3867,4660582,"TERMINAL",0,0,"50556",,terminal_output +3868,4661690,"TERMINAL",0,0,"1667",,terminal_output +3869,4662575,"TERMINAL",0,0,"2778",,terminal_output +3870,4663624,"TERMINAL",0,0,"3889",,terminal_output +3871,4664858,"TERMINAL",0,0,"49930",,terminal_output +3872,4665876,"TERMINAL",0,0,"58:008:001",,terminal_output +3873,4666907,"TERMINAL",0,0,"6112",,terminal_output +3874,4667922,"TERMINAL",0,0,"7223",,terminal_output +3875,4668847,"TERMINAL",0,0,"8334",,terminal_output +3876,4669919,"TERMINAL",0,0,"9445",,terminal_output +3877,4671003,"TERMINAL",0,0,"4:00556",,terminal_output +3878,4672020,"TERMINAL",0,0,"1667",,terminal_output +3879,4672937,"TERMINAL",0,0,"2778",,terminal_output +3880,4673973,"TERMINAL",0,0,"3889",,terminal_output +3881,4675113,"TERMINAL",0,0,"49940",,terminal_output +3882,4676231,"TERMINAL",0,0,"510101",,terminal_output +3883,4677120,"TERMINAL",0,0,"6112",,terminal_output +3884,4678179,"TERMINAL",0,0,"7223",,terminal_output +3885,4679202,"TERMINAL",0,0,"8334",,terminal_output +3886,4680215,"TERMINAL",0,0,"9445",,terminal_output +3887,4681249,"TERMINAL",0,0,"10556",,terminal_output +3888,4682280,"TERMINAL",0,0,"1778",,terminal_output +3889,4683318,"TERMINAL",0,0,"3889",,terminal_output +3890,4684390,"TERMINAL",0,0,"49950",,terminal_output +3891,4685651,"TERMINAL",0,0,"520201",,terminal_output +3892,4686567,"TERMINAL",0,0,"6112",,terminal_output +3893,4688188,"TERMINAL",0,0,"7223",,terminal_output +3894,4689231,"TERMINAL",0,0,"8334",,terminal_output +3895,4690272,"TERMINAL",0,0,"9445",,terminal_output +3896,4691309,"TERMINAL",0,0,"20667",,terminal_output +3897,4692349,"TERMINAL",0,0,"2778",,terminal_output +3898,4693388,"TERMINAL",0,0,"3889",,terminal_output +3899,4694487,"TERMINAL",0,0,"4991:00",,terminal_output +3900,4695573,"TERMINAL",0,0,"530301",,terminal_output +3901,4696600,"TERMINAL",0,0,"6112",,terminal_output +3902,4697594,"TERMINAL",0,0,"7223",,terminal_output +3903,4698793,"TERMINAL",0,0,"8334",,terminal_output +3904,4699873,"TERMINAL",0,0,"9445",,terminal_output +3905,4700798,"TERMINAL",0,0,"30556",,terminal_output +3906,4702035,"TERMINAL",0,0,"1667",,terminal_output +3907,4702814,"TERMINAL",0,0,"2778",,terminal_output +3908,4703874,"TERMINAL",0,0,"3889",,terminal_output +3909,4704939,"TERMINAL",0,0,"49910",,terminal_output +3910,4706030,"TERMINAL",0,0,"540401",,terminal_output +3911,4706950,"TERMINAL",0,0,"6112",,terminal_output +3912,4708006,"TERMINAL",0,0,"7223",,terminal_output +3913,4709040,"TERMINAL",0,0,"8334",,terminal_output +3914,4710138,"TERMINAL",0,0,"9445",,terminal_output +3915,4711139,"TERMINAL",0,0,"40556",,terminal_output +3916,4712282,"TERMINAL",0,0,"1667",,terminal_output +3917,4713290,"TERMINAL",0,0,"2778",,terminal_output +3918,4714366,"TERMINAL",0,0,"3889",,terminal_output +3919,4715322,"TERMINAL",0,0,"49920",,terminal_output +3920,4716373,"TERMINAL",0,0,"551512",,terminal_output +3921,4717401,"TERMINAL",0,0,"7223",,terminal_output +3922,4718439,"TERMINAL",0,0,"8334",,terminal_output +3923,4719635,"TERMINAL",0,0,"9445",,terminal_output +3924,4721072,"TERMINAL",0,0,"50556",,terminal_output +3925,4721798,"TERMINAL",0,0,"1667",,terminal_output +3926,4722596,"TERMINAL",0,0,"2778",,terminal_output +3927,4723759,"TERMINAL",0,0,"3889",,terminal_output +3928,4724863,"TERMINAL",0,0,"49930",,terminal_output +3929,4725885,"TERMINAL",0,0,"59:009:001",,terminal_output +3930,4726914,"TERMINAL",0,0,"6112",,terminal_output +3931,4727932,"TERMINAL",0,0,"7223",,terminal_output +3932,4728929,"TERMINAL",0,0,"8334",,terminal_output +3933,4729876,"TERMINAL",0,0,"9445",,terminal_output +3934,4731124,"TERMINAL",0,0,"5:00556",,terminal_output +3935,4732032,"TERMINAL",0,0,"1667",,terminal_output +3936,4733011,"TERMINAL",0,0,"2778",,terminal_output +3937,4734077,"TERMINAL",0,0,"3889",,terminal_output +3938,4735152,"TERMINAL",0,0,"49940",,terminal_output +3939,4736105,"TERMINAL",0,0,"510101",,terminal_output +3940,4737214,"TERMINAL",0,0,"6112",,terminal_output +3941,4738277,"TERMINAL",0,0,"7223",,terminal_output +3942,4739327,"TERMINAL",0,0,"8334",,terminal_output +3943,4740410,"TERMINAL",0,0,"9445",,terminal_output +3944,4741410,"TERMINAL",0,0,"10667",,terminal_output +3945,4742490,"TERMINAL",0,0,"2778",,terminal_output +3946,4743603,"TERMINAL",0,0,"3889",,terminal_output +3947,4744728,"TERMINAL",0,0,"49950",,terminal_output +3948,4745618,"TERMINAL",0,0,"520201",,terminal_output +3949,4746682,"TERMINAL",0,0,"6112",,terminal_output +3950,4747597,"TERMINAL",0,0,"7223",,terminal_output +3951,4748628,"TERMINAL",0,0,"8334",,terminal_output +3952,4749652,"TERMINAL",0,0,"9445",,terminal_output +3953,4750769,"TERMINAL",0,0,"20556",,terminal_output +3954,4751723,"TERMINAL",0,0,"1667",,terminal_output +3955,4752860,"TERMINAL",0,0,"2778",,terminal_output +3956,4753927,"TERMINAL",0,0,"3889",,terminal_output +3957,4754973,"TERMINAL",0,0,"4992:00",,terminal_output +3958,4756198,"TERMINAL",0,0,"530301",,terminal_output +3959,4757265,"TERMINAL",0,0,"6112",,terminal_output +3960,4758437,"TERMINAL",0,0,"7223",,terminal_output +3961,4759399,"TERMINAL",0,0,"8334",,terminal_output +3962,4760426,"TERMINAL",0,0,"9445",,terminal_output +3963,4761444,"TERMINAL",0,0,"30556",,terminal_output +3964,4762584,"TERMINAL",0,0,"1667",,terminal_output +3965,4763672,"TERMINAL",0,0,"2778",,terminal_output +3966,4764609,"TERMINAL",0,0,"3889",,terminal_output +3967,4765717,"TERMINAL",0,0,"49910",,terminal_output +3968,4766939,"TERMINAL",0,0,"541412",,terminal_output +3969,4767963,"TERMINAL",0,0,"7223",,terminal_output +3970,4769097,"TERMINAL",0,0,"8334",,terminal_output +3971,4770139,"TERMINAL",0,0,"9445",,terminal_output +3972,4771361,"TERMINAL",0,0,"40556",,terminal_output +3973,4772391,"TERMINAL",0,0,"1667",,terminal_output +3974,4773473,"TERMINAL",0,0,"2778",,terminal_output +3975,4774189,"TERMINAL",0,0,"3889",,terminal_output +3976,4775243,"TERMINAL",0,0,"49920",,terminal_output +3977,4776373,"TERMINAL",0,0,"550501",,terminal_output +3978,4777276,"TERMINAL",0,0,"6112",,terminal_output +3979,4778059,"TERMINAL",0,0,"7223",,terminal_output +3980,4779351,"TERMINAL",0,0,"8334",,terminal_output +3981,4780057,"TERMINAL",0,0,"9445",,terminal_output +3982,4781183,"TERMINAL",0,0,"50556",,terminal_output +3983,4782309,"TERMINAL",0,0,"1667",,terminal_output +3984,4783439,"TERMINAL",0,0,"2778",,terminal_output +3985,4784413,"TERMINAL",0,0,"3889",,terminal_output +3986,4785483,"TERMINAL",0,0,"49930",,terminal_output +3987,4786248,"TERMINAL",0,0,"530:0030:001",,terminal_output +3988,4787239,"TERMINAL",0,0,"6112",,terminal_output +3989,4788271,"TERMINAL",0,0,"7223",,terminal_output +3990,4789348,"TERMINAL",0,0,"8334",,terminal_output +3991,4790410,"TERMINAL",0,0,"9445",,terminal_output +3992,4791478,"TERMINAL",0,0,"6:00556",,terminal_output +3993,4792573,"TERMINAL",0,0,"1778",,terminal_output +3994,4793775,"TERMINAL",0,0,"3889",,terminal_output +3995,4794902,"TERMINAL",0,0,"49940",,terminal_output +3996,4795804,"TERMINAL",0,0,"510101",,terminal_output +3997,4796849,"TERMINAL",0,0,"6112",,terminal_output +3998,4797917,"TERMINAL",0,0,"7223",,terminal_output +3999,4799026,"TERMINAL",0,0,"8334",,terminal_output +4000,4800001,"TERMINAL",0,0,"9445",,terminal_output +4001,4801456,"TERMINAL",0,0,"10556",,terminal_output +4002,4802008,"TERMINAL",0,0,"1667",,terminal_output +4003,4803153,"TERMINAL",0,0,"2778",,terminal_output +4004,4804126,"TERMINAL",0,0,"3889",,terminal_output +4005,4805140,"TERMINAL",0,0,"49950",,terminal_output +4006,4806370,"TERMINAL",0,0,"520201",,terminal_output +4007,4807429,"TERMINAL",0,0,"6112",,terminal_output +4008,4808574,"TERMINAL",0,0,"7223",,terminal_output +4009,4809650,"TERMINAL",0,0,"8334",,terminal_output +4010,4810671,"TERMINAL",0,0,"9445",,terminal_output +4011,4811606,"TERMINAL",0,0,"20556",,terminal_output +4012,4812364,"TERMINAL",0,0,"1667",,terminal_output +4013,4813642,"TERMINAL",0,0,"2778",,terminal_output +4014,4814573,"TERMINAL",0,0,"3889",,terminal_output +4015,4815711,"TERMINAL",0,0,"4993:00",,terminal_output +4016,4816589,"TERMINAL",0,0,"530301",,terminal_output +4017,4817417,"TERMINAL",0,0,"6112",,terminal_output +4018,4818584,"TERMINAL",0,0,"7334",,terminal_output +4019,4819555,"TERMINAL",0,0,"9445",,terminal_output +4020,4820756,"TERMINAL",0,0,"30556",,terminal_output +4021,4821833,"TERMINAL",0,0,"1667",,terminal_output +4022,4822801,"TERMINAL",0,0,"2778",,terminal_output +4023,4823741,"TERMINAL",0,0,"3889",,terminal_output +4024,4824760,"TERMINAL",0,0,"49910",,terminal_output +4025,4825790,"TERMINAL",0,0,"540401",,terminal_output +4026,4827056,"TERMINAL",0,0,"6112",,terminal_output +4027,4828004,"TERMINAL",0,0,"7223",,terminal_output +4028,4828789,"TERMINAL",0,0,"8334",,terminal_output +4029,4830025,"TERMINAL",0,0,"9445",,terminal_output +4030,4831163,"TERMINAL",0,0,"40556",,terminal_output +4031,4832050,"TERMINAL",0,0,"1667",,terminal_output +4032,4833097,"TERMINAL",0,0,"2778",,terminal_output +4033,4834127,"TERMINAL",0,0,"3889",,terminal_output +4034,4835175,"TERMINAL",0,0,"49920",,terminal_output +4035,4836272,"TERMINAL",0,0,"550501",,terminal_output +4036,4837502,"TERMINAL",0,0,"6112",,terminal_output +4037,4838423,"TERMINAL",0,0,"7223",,terminal_output +4038,4839391,"TERMINAL",0,0,"8334",,terminal_output +4039,4840413,"TERMINAL",0,0,"9445",,terminal_output +4040,4841316,"TERMINAL",0,0,"50556",,terminal_output +4041,4842175,"TERMINAL",0,0,"1667",,terminal_output +4042,4842673,"models/lam.py",4177,0,"",python,selection_mouse +4043,4843237,"TERMINAL",0,0,"2778",,terminal_output +4044,4844359,"TERMINAL",0,0,"3889",,terminal_output +4045,4845329,"models/lam.py",4141,0,"",python,selection_mouse +4046,4845363,"TERMINAL",0,0,"41:001:0031",,terminal_output +4047,4846330,"TERMINAL",0,0,"6112",,terminal_output +4048,4847390,"TERMINAL",0,0,"7223",,terminal_output +4049,4848085,"models/lam.py",4063,0,"",python,selection_mouse +4050,4848487,"TERMINAL",0,0,"8334",,terminal_output +4051,4849452,"TERMINAL",0,0,"9445",,terminal_output +4052,4849664,"models/lam.py",4076,0,"",python,selection_mouse +4053,4850612,"TERMINAL",0,0,"7:00556",,terminal_output +4054,4851597,"TERMINAL",0,0,"1667",,terminal_output +4055,4852599,"TERMINAL",0,0,"2778",,terminal_output +4056,4853785,"TERMINAL",0,0,"3889",,terminal_output +4057,4854689,"TERMINAL",0,0,"49940",,terminal_output +4058,4855671,"TERMINAL",0,0,"510101",,terminal_output +4059,4856756,"TERMINAL",0,0,"6112",,terminal_output +4060,4857779,"TERMINAL",0,0,"7223",,terminal_output +4061,4858846,"TERMINAL",0,0,"8334",,terminal_output +4062,4859876,"TERMINAL",0,0,"9445",,terminal_output +4063,4860894,"TERMINAL",0,0,"10556",,terminal_output +4064,4861966,"TERMINAL",0,0,"1667",,terminal_output +4065,4863022,"TERMINAL",0,0,"2778",,terminal_output +4066,4864064,"TERMINAL",0,0,"3889",,terminal_output +4067,4865153,"TERMINAL",0,0,"49950",,terminal_output +4068,4866076,"TERMINAL",0,0,"520201",,terminal_output +4069,4867177,"TERMINAL",0,0,"6112",,terminal_output +4070,4868164,"TERMINAL",0,0,"7223",,terminal_output +4071,4869277,"TERMINAL",0,0,"8334",,terminal_output +4072,4870309,"TERMINAL",0,0,"9445",,terminal_output +4073,4871265,"TERMINAL",0,0,"20556",,terminal_output +4074,4872327,"TERMINAL",0,0,"1778",,terminal_output +4075,4873444,"TERMINAL",0,0,"3889",,terminal_output +4076,4874420,"TERMINAL",0,0,"4994:00",,terminal_output +4077,4875480,"TERMINAL",0,0,"530301",,terminal_output +4078,4876514,"TERMINAL",0,0,"6112",,terminal_output +4079,4877543,"TERMINAL",0,0,"7223",,terminal_output +4080,4878576,"TERMINAL",0,0,"8334",,terminal_output +4081,4879600,"TERMINAL",0,0,"9445",,terminal_output +4082,4880716,"TERMINAL",0,0,"30556",,terminal_output +4083,4881732,"TERMINAL",0,0,"1667",,terminal_output +4084,4882758,"TERMINAL",0,0,"2778",,terminal_output +4085,4883889,"TERMINAL",0,0,"3889",,terminal_output +4086,4884913,"TERMINAL",0,0,"49910",,terminal_output +4087,4885939,"TERMINAL",0,0,"540401",,terminal_output +4088,4886884,"TERMINAL",0,0,"6112",,terminal_output +4089,4887920,"TERMINAL",0,0,"7223",,terminal_output +4090,4889060,"TERMINAL",0,0,"8334",,terminal_output +4091,4889978,"TERMINAL",0,0,"9445",,terminal_output +4092,4891005,"TERMINAL",0,0,"40556",,terminal_output +4093,4892054,"TERMINAL",0,0,"1667",,terminal_output +4094,4893116,"TERMINAL",0,0,"2778",,terminal_output +4095,4894192,"TERMINAL",0,0,"3889",,terminal_output +4096,4895255,"TERMINAL",0,0,"49920",,terminal_output +4097,4896210,"TERMINAL",0,0,"550501",,terminal_output +4098,4897303,"TERMINAL",0,0,"6112",,terminal_output +4099,4898326,"TERMINAL",0,0,"7223",,terminal_output +4100,4899405,"TERMINAL",0,0,"8334",,terminal_output +4101,4900412,"TERMINAL",0,0,"9556",,terminal_output +4102,4901456,"TERMINAL",0,0,"51667",,terminal_output +4103,4902550,"TERMINAL",0,0,"2778",,terminal_output +4104,4903655,"TERMINAL",0,0,"3889",,terminal_output +4105,4904716,"TERMINAL",0,0,"49930",,terminal_output +4106,4905915,"TERMINAL",0,0,"52:002:001",,terminal_output +4107,4907008,"TERMINAL",0,0,"6112",,terminal_output +4108,4908023,"TERMINAL",0,0,"7223",,terminal_output +4109,4908992,"TERMINAL",0,0,"8334",,terminal_output +4110,4909910,"TERMINAL",0,0,"9445",,terminal_output +4111,4911026,"TERMINAL",0,0,"8:00556",,terminal_output +4112,4912162,"TERMINAL",0,0,"1667",,terminal_output +4113,4913035,"TERMINAL",0,0,"2778",,terminal_output +4114,4914204,"TERMINAL",0,0,"3889",,terminal_output +4115,4915283,"TERMINAL",0,0,"49940",,terminal_output +4116,4916168,"TERMINAL",0,0,"510101",,terminal_output +4117,4917179,"TERMINAL",0,0,"6112",,terminal_output +4118,4918189,"TERMINAL",0,0,"7223",,terminal_output +4119,4919347,"TERMINAL",0,0,"8334",,terminal_output +4120,4920342,"TERMINAL",0,0,"9445",,terminal_output +4121,4921281,"TERMINAL",0,0,"10556",,terminal_output +4122,4922396,"TERMINAL",0,0,"1778",,terminal_output +4123,4923426,"TERMINAL",0,0,"3889",,terminal_output +4124,4924471,"TERMINAL",0,0,"49950",,terminal_output +4125,4925509,"TERMINAL",0,0,"520201",,terminal_output +4126,4926183,"models/lam.py",4081,0,"",python,selection_mouse +4127,4926328,"models/lam.py",4067,17,"video_recon_BTm1P",python,selection_mouse +4128,4926544,"TERMINAL",0,0,"6112",,terminal_output +4129,4927624,"TERMINAL",0,0,"7223",,terminal_output +4130,4928671,"TERMINAL",0,0,"8334",,terminal_output +4131,4929805,"TERMINAL",0,0,"9445",,terminal_output +4132,4930851,"TERMINAL",0,0,"20556",,terminal_output +4133,4932016,"TERMINAL",0,0,"1667",,terminal_output +4134,4932957,"TERMINAL",0,0,"2778",,terminal_output +4135,4934173,"TERMINAL",0,0,"3889",,terminal_output +4136,4935099,"TERMINAL",0,0,"4995:00",,terminal_output +4137,4936158,"TERMINAL",0,0,"530301",,terminal_output +4138,4937189,"TERMINAL",0,0,"6112",,terminal_output +4139,4938326,"TERMINAL",0,0,"7223",,terminal_output +4140,4939305,"TERMINAL",0,0,"8334",,terminal_output +4141,4939677,"TERMINAL",0,0,"g",,terminal_output +4142,4940337,"TERMINAL",0,0,"9445",,terminal_output +4143,4940835,"TERMINAL",0,0,"t",,terminal_output +4144,4941407,"TERMINAL",0,0,"30556",,terminal_output +4145,4941474,"TERMINAL",0,0,"",,terminal_output +4146,4942148,"TERMINAL",0,0,"i",,terminal_output +4147,4942210,"TERMINAL",0,0,"t",,terminal_output +4148,4942320,"TERMINAL",0,0," ",,terminal_output +4149,4942633,"TERMINAL",0,0,"1778",,terminal_output +4150,4942678,"TERMINAL",0,0,"st",,terminal_output +4151,4942930,"TERMINAL",0,0,"a",,terminal_output +4152,4943019,"TERMINAL",0,0,"t",,terminal_output +4153,4943113,"TERMINAL",0,0,"u",,terminal_output +4154,4943232,"TERMINAL",0,0,"s",,terminal_output +4155,4943468,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4156,4943669,"TERMINAL",0,0,"On branch validation-loss\r\nYour branch is up to date with 'origin/validation-loss'.\r\n\r\nChanges to be committed:\r\n (use ""git restore --staged ..."" to unstage)\r\n\tmodified: train_tokenizer.py\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: train_lam.py\r\n\tmodified: train_tokenizer.py\r\n\tmodified: utils/dataloader.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\tutils/visualizer.py\r\n\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +4157,4943822,"TERMINAL",0,0,"3889",,terminal_output +4158,4944489,"TERMINAL",0,0,"49910",,terminal_output +4159,4945586,"TERMINAL",0,0,"540401",,terminal_output +4160,4945790,"TERMINAL",0,0,"g",,terminal_output +4161,4945925,"TERMINAL",0,0,"it",,terminal_output +4162,4946038,"TERMINAL",0,0," ",,terminal_output +4163,4946207,"TERMINAL",0,0,"c",,terminal_output +4164,4946305,"TERMINAL",0,0,"o",,terminal_output +4165,4946506,"TERMINAL",0,0,"m",,terminal_output +4166,4946611,"TERMINAL",0,0,"m",,terminal_output +4167,4946695,"TERMINAL",0,0,"6112",,terminal_output +4168,4946736,"TERMINAL",0,0,"i",,terminal_output +4169,4946822,"TERMINAL",0,0,"t",,terminal_output +4170,4946945,"TERMINAL",0,0," ",,terminal_output +4171,4947014,"TERMINAL",0,0,"-",,terminal_output +4172,4947173,"TERMINAL",0,0,"a",,terminal_output +4173,4947309,"TERMINAL",0,0,"m",,terminal_output +4174,4947438,"TERMINAL",0,0," ",,terminal_output +4175,4947593,"TERMINAL",0,0,"""",,terminal_output +4176,4947786,"TERMINAL",0,0,"7223",,terminal_output +4177,4948918,"TERMINAL",0,0,"8334",,terminal_output +4178,4949880,"TERMINAL",0,0,"9445",,terminal_output +4179,4950247,"TERMINAL",0,0,"a",,terminal_output +4180,4950370,"TERMINAL",0,0,"d",,terminal_output +4181,4950587,"TERMINAL",0,0,"d",,terminal_output +4182,4950739,"TERMINAL",0,0,"40556",,terminal_output +4183,4950844,"TERMINAL",0,0,"e",,terminal_output +4184,4950901,"TERMINAL",0,0,"d",,terminal_output +4185,4951080,"TERMINAL",0,0," ",,terminal_output +4186,4951206,"TERMINAL",0,0,"v",,terminal_output +4187,4951471,"TERMINAL",0,0,"al",,terminal_output +4188,4951656,"TERMINAL",0,0," l",,terminal_output +4189,4951791,"TERMINAL",0,0,"o",,terminal_output +4190,4951826,"TERMINAL",0,0,"1667",,terminal_output +4191,4951958,"TERMINAL",0,0,"s",,terminal_output +4192,4952112,"TERMINAL",0,0,"s ",,terminal_output +4193,4952382,"TERMINAL",0,0,"to",,terminal_output +4194,4952471,"TERMINAL",0,0," ",,terminal_output +4195,4952528,"TERMINAL",0,0,"l",,terminal_output +4196,4952689,"TERMINAL",0,0,"a",,terminal_output +4197,4952805,"TERMINAL",0,0,"m",,terminal_output +4198,4952806,"TERMINAL",0,0,"2778",,terminal_output +4199,4953039,"TERMINAL",0,0,"""",,terminal_output +4200,4953332,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4201,4953605,"TERMINAL",0,0,"[validation-loss d53848c] added val loss to lam\r\n 3 files changed, 178 insertions(+), 80 deletions(-)\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +4202,4953898,"TERMINAL",0,0,"3889",,terminal_output +4203,4954212,"TERMINAL",0,0,"g",,terminal_output +4204,4954289,"TERMINAL",0,0,"i",,terminal_output +4205,4954406,"TERMINAL",0,0,"t",,terminal_output +4206,4954473,"TERMINAL",0,0," ",,terminal_output +4207,4954548,"TERMINAL",0,0,"c",,terminal_output +4208,4954673,"TERMINAL",0,0,"h",,terminal_output +4209,4954774,"TERMINAL",0,0,"e",,terminal_output +4210,4954959,"TERMINAL",0,0,"ck",,terminal_output +4211,4954960,"TERMINAL",0,0,"49920",,terminal_output +4212,4955136,"TERMINAL",0,0,"ou",,terminal_output +4213,4955241,"TERMINAL",0,0,"t",,terminal_output +4214,4955340,"TERMINAL",0,0," ",,terminal_output +4215,4955922,"TERMINAL",0,0,"git checkout -b ""train_lam_coinrun_ablation_wsd_3e-6_28747"" bbef694ce01972ca6d625aa8e4d9e5c0d3f95305",,terminal_output +4216,4955974,"TERMINAL",0,0,"550501",,terminal_output +4217,4956990,"TERMINAL",0,0,"6112",,terminal_output +4218,4957401,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +4219,4958232,"TERMINAL",0,0,"7223",,terminal_output +4220,4958652,"TERMINAL",0,0,"git checkout -b ""train_lam_coinrun_ablation_wsd_3e-6_28747"" bbef694ce01972ca6d625aa8e4d9e5c0d3f95305",,terminal_output +4221,4959369,"TERMINAL",0,0,"8334",,terminal_output +4222,4959436,"TERMINAL",0,0,"\rgit checkout -b ""train_lam_coinrun_ablation_wsd_3e-6_28747"" bbef694ce01972ca6d625aa8e4d9e5c0d3f95305\r\n[?2004l\r",,terminal_output +4223,4960405,"TERMINAL",0,0,"Switched to a new branch 'train_lam_coinrun_ablation_wsd_3e-6_28747'\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +4224,4960423,"TERMINAL",0,0,"9445",,terminal_output +4225,4960656,"",0,0,"Switched from branch 'validation-loss' to 'train_lam_coinrun_ablation_wsd_3e-6_28747'",,git_branch_checkout +4226,4961622,"TERMINAL",0,0,"50556",,terminal_output +4227,4962739,"TERMINAL",0,0,"1667",,terminal_output +4228,4963825,"TERMINAL",0,0,"2778",,terminal_output +4229,4964633,"TERMINAL",0,0,"3889",,terminal_output +4230,4965268,"models/lam.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nnx.Module):\n """"""Latent Action ST-ViVit VQ-VAE\n \n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n M: model dimension\n L: latent dimension\n E: B * (T - 1)\n H: height\n W: width\n C: number of channels (n_dim)\n P: patch token dimension (patch_size^2 * C)\n\n Tm1: T - 1\n Np1: N + 1\n """"""\n\n def __init__(\n self,\n in_dim: int,\n model_dim: int,\n ffn_dim: int,\n latent_dim: int,\n num_latents: int,\n patch_size: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n codebook_dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.in_dim = in_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.patch_size = patch_size\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.codebook_dropout = codebook_dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.patch_token_dim,\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_in = nnx.Param(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (1, 1, 1, self.patch_token_dim)\n )\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n rngs=rngs,\n )\n self.patch_up = nnx.Linear(\n self.patch_token_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.action_up = nnx.Linear(\n self.latent_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n videos_BTHWC = batch[""videos""]\n outputs = self.vq_encode(videos_BTHWC, training)\n patch_BTNP = outputs[""patches""]\n z_q_BTm11L = outputs[""z_q""]\n action_BTm11M = self.action_up(z_q_BTm11L)\n patch_BTm1NM = self.patch_up(patch_BTNP[:, :-1])\n action_BTm1NM = jnp.broadcast_to(action_BTm11M, patch_BTm1NM.shape)\n video_action_patches_BTm1NM = action_BTm1NM + patch_BTm1NM\n del outputs[""patches""], patch_BTNP, patch_BTm1NM\n\n # --- Decode ---\n video_recon_BTm1P = self.decoder(video_action_patches_BTm1NM)\n video_recon_BTm1P = video_recon_BTm1P.astype(jnp.float32)\n video_recon_BTm1P = nnx.sigmoid(video_recon_BTm1P)\n video_recon_BTm1P = video_recon_BTm1P.astype(self.dtype)\n video_recon_BTHWC = unpatchify(video_recon_BTm1P, self.patch_size, H, W)\n outputs[""recon""] = video_recon_BTHWC\n return outputs\n\n def vq_encode(\n self, videos_BTHWC: jax.Array, training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Preprocess videos ---\n B, T = videos_BTHWC.shape[:2]\n patch_BTNP = patchify(videos_BTHWC, self.patch_size)\n action_pad_BT1P = jnp.broadcast_to(\n self.action_in.value, (B, T, 1, self.patch_token_dim)\n )\n padded_patch_BTNp1P = jnp.concatenate((action_pad_BT1P, patch_BTNP), axis=2)\n\n # --- Encode ---\n z_BTNp1L = self.encoder(padded_patch_BTNp1P)\n # Get latent action for all future frames\n z_BTm1L = z_BTNp1L[:, 1:, 0]\n\n # --- Vector quantize ---\n z_EL = z_BTm1L.reshape(B * (T - 1), self.latent_dim)\n z_q_EL, z_EL, emb_EL, indices_E = self.vq(z_EL, training)\n z_q_BTm11L = z_q_EL.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patch_BTNP, z_q=z_q_BTm11L, z=z_EL, emb=emb_EL, indices=indices_E)\n",python,tab +4231,4965621,"TERMINAL",0,0,"49930",,terminal_output +4232,4966774,"models/lam.py",262,4856,"\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n M: model dimension\n L: latent dimension\n E: B * (T - 1)\n H: height\n W: width\n C: number of channels (n_dim)\n P: patch token dimension (patch_size^2 * C)\n\n Tm1: T - 1\n Np1: N + 1\n """"""\n\n def __init__(\n self,\n in_dim: int,\n model_dim: int,\n ffn_dim: int,\n latent_dim: int,\n num_latents: int,\n patch_size: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n codebook_dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.in_dim = in_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.patch_size = patch_size\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.codebook_dropout = codebook_dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.patch_token_dim,\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_in = nnx.Param(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (1, 1, 1, self.patch_token_dim)\n )\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n self.dtype,\n rngs=rngs,\n )\n self.patch_up = nnx.Linear(\n self.patch_token_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.action_up = nnx.Linear(\n self.latent_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n videos_BTHWC = batch[""videos""]\n outputs = self.vq_encode(videos_BTHWC, training)\n patch_BTNP = outputs[""patches""]\n z_q_BTm11L = outputs[""z_q""]\n action_BTm11M = self.action_up(z_q_BTm11L)\n patch_BTm1NM = self.patch_up(patch_BTNP[:, :-1])\n action_BTm1NM = jnp.broadcast_to(action_BTm11M, patch_BTm1NM.shape)\n video_action_patches_BTm1NM = action_BTm1NM + patch_BTm1NM\n del outputs[""patches""], patch_BTNP, patch_BTm1NM\n\n # --- Decode ---\n video_recon_BTm1P = self.decoder(video_action_patches_BTm1NM)\n video_recon_BTm1P = video_recon_BTm1P.astype(jnp.float32)\n video_recon_BTm1P = nnx.sigmoid(video_recon_BTm1P)\n video_recon_BTm1P = video_recon_BTm1P.astype(self.dtype)\n video_recon_BTHWC = unpatchify(video_recon_BTm1P, self.patch_size, H, W)\n outputs[""recon""] = video_recon_BTHWC\n return outputs\n\n def vq_encode(\n self, videos_BTHWC: jax.Array, training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Preprocess videos ---\n B, T = videos_BTHWC.shape[:2]\n patch_BTNP = patchify(videos_BTHWC, self.patch_size)\n action_pad_BT1P = jnp.broadcast_to(\n self.action_in.value, (B, T, 1, self.patch_token_dim)\n )\n padded_patch_BTNp1P = jnp.concatenate((action_pad_BT1P, patch_BTNP), axis=2)\n\n # --- Encode ---\n z_BTNp1L = self.encoder(padded_patch_BTNp1P)\n # Get latent action for all future frames\n z_BTm1L = z_BTNp1L[:, 1:, 0]\n\n # --- Vector quantize ---\n z_EL = z_BTm1L.reshape(B * (T - 1), self.latent_dim)\n z_q_EL, z_EL, emb_EL, indices_E = self.vq(z_EL, training)\n z_q_BTm11L = z_q_EL.reshape(B, T - 1, 1, self.latent_dim)\n return dict(\n patches=patch_BTNP, z_q=z_q_BTm11L, z=z_EL, emb=emb_EL, indices=indices_E\n )\n",python,content +4233,4966887,"TERMINAL",0,0,"53:003:001",,terminal_output +4234,4968006,"TERMINAL",0,0,"6112",,terminal_output +4235,4969090,"TERMINAL",0,0,"7223",,terminal_output +4236,4970225,"TERMINAL",0,0,"8445",,terminal_output +4237,4971038,"TERMINAL",0,0,"9:00556",,terminal_output +4238,4971692,"TERMINAL",0,0,"1667",,terminal_output +4239,4972920,"TERMINAL",0,0,"2778",,terminal_output +4240,4974256,"TERMINAL",0,0,"3889",,terminal_output +4241,4975357,"TERMINAL",0,0,"49940",,terminal_output +4242,4975627,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=coinrun-lam-dev-$slurm_job_id \\n --tags lam coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --val_interval 10 \\n --val_steps 50 \\n --data_dir $array_records_dir_train\n\n",shellscript,tab +4243,4976420,"TERMINAL",0,0,"510101",,terminal_output +4244,4976908,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1605,0,"",shellscript,selection_mouse +4245,4976922,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1604,0,"",shellscript,selection_command +4246,4977073,"TERMINAL",0,0,"6112",,terminal_output +4247,4978397,"TERMINAL",0,0,"7223",,terminal_output +4248,4979467,"TERMINAL",0,0,"8334",,terminal_output +4249,4980284,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1561,45,"",shellscript,content +4250,4980290,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1565,0,"",shellscript,selection_command +4251,4980619,"TERMINAL",0,0,"9445",,terminal_output +4252,4981565,"TERMINAL",0,0,"10556",,terminal_output +4253,4982533,"TERMINAL",0,0,"1667",,terminal_output +4254,4983273,"TERMINAL",0,0,"2778",,terminal_output +4255,4984152,"TERMINAL",0,0,"3889",,terminal_output +4256,4985348,"TERMINAL",0,0,"49950",,terminal_output +4257,4986584,"TERMINAL",0,0,"520201",,terminal_output +4258,4986739,"TERMINAL",0,0,"git checkout -b ""train_lam_coinrun_ablation_wsd_3e-6_28747"" bbef694ce01972ca6d625aa8e4d9e5c0d3f95305",,terminal_output +4259,4987300,"TERMINAL",0,0,"\rommit -am ""added val loss to lam""",,terminal_output +4260,4987631,"TERMINAL",0,0,"6112",,terminal_output +4261,4988383,"TERMINAL",0,0,"status",,terminal_output +4262,4988897,"TERMINAL",0,0,"7223",,terminal_output +4263,4989157,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +4264,4989653,"TERMINAL",0,0,"\rqueue",,terminal_output +4265,4989806,"TERMINAL",0,0,"8334",,terminal_output +4266,4989963,"TERMINAL",0,0,"scancel 3465677",,terminal_output +4267,4990481,"TERMINAL",0,0,"queue",,terminal_output +4268,4990530,"TERMINAL",0,0,"9445",,terminal_output +4269,4991679,"TERMINAL",0,0,"20556",,terminal_output +4270,4992402,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +4271,4992639,"TERMINAL",0,0,"1667",,terminal_output +4272,4993918,"TERMINAL",0,0,"2778",,terminal_output +4273,4994466,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-lam-dev-$slurm_job_id \\r\n --tags lam coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --data_dir $array_records_dir_train\r\n\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3453268\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1757073804\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757077404\r\nSLURM_PMI2_SRUN_PORT=36059\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468617\r\nSLURM_PTY_PORT=33607\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=43381\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468617\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=43381\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\nGpuFreq=control_disabled\r\n",,terminal_output +4274,4994502,"TERMINAL",0,0,"3889",,terminal_output +4275,4995613,"TERMINAL",0,0,"430306:01",,terminal_output +4276,4996622,"TERMINAL",0,0,"6112",,terminal_output +4277,4997651,"TERMINAL",0,0,"7223",,terminal_output +4278,4998770,"TERMINAL",0,0,"8334",,terminal_output +4279,4999625,"TERMINAL",0,0,"9445",,terminal_output +4280,5000637,"TERMINAL",0,0,"30556",,terminal_output +4281,5001473,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +4282,5001753,"TERMINAL",0,0,"1667",,terminal_output +4283,5002266,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +4284,5002411,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_144931-z21qnvj4\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-lam-dev-3468617\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/z21qnvj4\r\n",,terminal_output +4285,5002934,"TERMINAL",0,0,"2778",,terminal_output +4286,5004420,"TERMINAL",0,0,"3889",,terminal_output +4287,5005936,"TERMINAL",0,0,"49910",,terminal_output +4288,5006161,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['action_in', 'action_up', 'decoder', 'encoder', 'patch_up', 'vq']\r\nParameter counts:\r\n{'action_in': 768, 'action_up': 16896, 'decoder': 17474816, 'encoder': 17228832, 'patch_up': 393728, 'vq': 192, 'total': 35115232}\r\n",,terminal_output +4289,5006224,"TERMINAL",0,0,"540401",,terminal_output +4290,5007285,"TERMINAL",0,0,"6112",,terminal_output +4291,5008113,"TERMINAL",0,0,"7223",,terminal_output +4292,5009130,"TERMINAL",0,0,"8334",,terminal_output +4293,5010175,"TERMINAL",0,0,"9445",,terminal_output +4294,5011276,"TERMINAL",0,0,"40556",,terminal_output +4295,5012402,"TERMINAL",0,0,"1667",,terminal_output +4296,5013503,"TERMINAL",0,0,"2778",,terminal_output +4297,5014451,"TERMINAL",0,0,"2025-09-05 14:49:43.783638: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:49:43.784795: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:49:43.784813: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:49:43.785280: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4298,5014452,"TERMINAL",0,0,"3889",,terminal_output +4299,5015375,"TERMINAL",0,0,"49920",,terminal_output +4300,5016465,"TERMINAL",0,0,"550501",,terminal_output +4301,5017327,"TERMINAL",0,0,"6112",,terminal_output +4302,5018660,"TERMINAL",0,0,"7223",,terminal_output +4303,5019553,"TERMINAL",0,0,"8445",,terminal_output +4304,5020814,"TERMINAL",0,0,"50556",,terminal_output +4305,5021642,"TERMINAL",0,0,"1667",,terminal_output +4306,5022668,"TERMINAL",0,0,"2778",,terminal_output +4307,5023779,"TERMINAL",0,0,"3889",,terminal_output +4308,5024741,"TERMINAL",0,0,"49930",,terminal_output +4309,5025957,"TERMINAL",0,0,"54:004:001",,terminal_output +4310,5027056,"TERMINAL",0,0,"6112",,terminal_output +4311,5027875,"TERMINAL",0,0,"7223",,terminal_output +4312,5029036,"TERMINAL",0,0,"8334",,terminal_output +4313,5030323,"TERMINAL",0,0,"9445",,terminal_output +4314,5031348,"TERMINAL",0,0,"50:00556",,terminal_output +4315,5032250,"TERMINAL",0,0,"1667",,terminal_output +4316,5032973,"TERMINAL",0,0,"2778",,terminal_output +4317,5034264,"TERMINAL",0,0,"3889",,terminal_output +4318,5035408,"TERMINAL",0,0,"49940",,terminal_output +4319,5036113,"TERMINAL",0,0,"510101",,terminal_output +4320,5037368,"TERMINAL",0,0,"6112",,terminal_output +4321,5038275,"TERMINAL",0,0,"7223",,terminal_output +4322,5039373,"TERMINAL",0,0,"8334",,terminal_output +4323,5040572,"TERMINAL",0,0,"9445",,terminal_output +4324,5041423,"TERMINAL",0,0,"10556",,terminal_output +4325,5042368,"TERMINAL",0,0,"1667",,terminal_output +4326,5043487,"TERMINAL",0,0,"2778",,terminal_output +4327,5044549,"TERMINAL",0,0,"3889",,terminal_output +4328,5045699,"TERMINAL",0,0,"4202051",,terminal_output +4329,5046697,"TERMINAL",0,0,"6112",,terminal_output +4330,5047696,"TERMINAL",0,0,"7223",,terminal_output +4331,5048861,"TERMINAL",0,0,"8334",,terminal_output +4332,5049789,"TERMINAL",0,0,"9445",,terminal_output +4333,5051530,"TERMINAL",0,0,"20556",,terminal_output +4334,5052656,"TERMINAL",0,0,"1667",,terminal_output +4335,5053671,"TERMINAL",0,0,"2778",,terminal_output +4336,5054384,"TERMINAL",0,0,"3889",,terminal_output +4337,5055365,"TERMINAL",0,0,"4997:00",,terminal_output +4338,5056138,"TERMINAL",0,0,"530301",,terminal_output +4339,5057264,"TERMINAL",0,0,"6112",,terminal_output +4340,5058343,"TERMINAL",0,0,"7223",,terminal_output +4341,5059352,"TERMINAL",0,0,"8334",,terminal_output +4342,5060398,"TERMINAL",0,0,"9445",,terminal_output +4343,5061324,"TERMINAL",0,0,"30556",,terminal_output +4344,5062338,"TERMINAL",0,0,"1667",,terminal_output +4345,5063168,"TERMINAL",0,0,"2778",,terminal_output +4346,5064138,"TERMINAL",0,0,"3889",,terminal_output +4347,5065139,"TERMINAL",0,0,"49910",,terminal_output +4348,5066179,"TERMINAL",0,0,"540401",,terminal_output +4349,5067348,"TERMINAL",0,0,"6112",,terminal_output +4350,5068338,"TERMINAL",0,0,"7223",,terminal_output +4351,5069339,"TERMINAL",0,0,"8334",,terminal_output +4352,5070317,"TERMINAL",0,0,"9556",,terminal_output +4353,5071330,"TERMINAL",0,0,"41667",,terminal_output +4354,5072420,"TERMINAL",0,0,"2778",,terminal_output +4355,5073433,"TERMINAL",0,0,"3889",,terminal_output +4356,5074473,"TERMINAL",0,0,"49920",,terminal_output +4357,5075482,"TERMINAL",0,0,"550501",,terminal_output +4358,5076530,"TERMINAL",0,0,"6112",,terminal_output +4359,5077633,"TERMINAL",0,0,"7223",,terminal_output +4360,5078756,"TERMINAL",0,0,"8334",,terminal_output +4361,5079752,"TERMINAL",0,0,"9445",,terminal_output +4362,5080805,"TERMINAL",0,0,"50556",,terminal_output +4363,5081829,"TERMINAL",0,0,"1667",,terminal_output +4364,5082865,"TERMINAL",0,0,"2778",,terminal_output +4365,5083855,"TERMINAL",0,0,"3889",,terminal_output +4366,5084959,"TERMINAL",0,0,"49930",,terminal_output +4367,5086134,"TERMINAL",0,0,"55:005:001",,terminal_output +4368,5086953,"TERMINAL",0,0,"6112",,terminal_output +4369,5087989,"TERMINAL",0,0,"7223",,terminal_output +4370,5089052,"TERMINAL",0,0,"8334",,terminal_output +4371,5090087,"TERMINAL",0,0,"9445",,terminal_output +4372,5091122,"TERMINAL",0,0,"1:00556",,terminal_output +4373,5092273,"TERMINAL",0,0,"1667",,terminal_output +4374,5093320,"TERMINAL",0,0,"2778",,terminal_output +4375,5094231,"TERMINAL",0,0,"3889",,terminal_output +4376,5095265,"TERMINAL",0,0,"49940",,terminal_output +4377,5096299,"TERMINAL",0,0,"510101",,terminal_output +4378,5097338,"TERMINAL",0,0,"6223",,terminal_output +4379,5098421,"TERMINAL",0,0,"8334",,terminal_output +4380,5099250,"TERMINAL",0,0,"Total memory size: 0.6 GB, Output size: 0.4 GB, Temp size: 0.2 GB, Argument size: 0.4 GB, Host temp size: 0.0 GB.\r\nFLOPs: 2.516e+11, Bytes: 7.303e+09 (6.8 GB), Intensity: 34.4 FLOPs/byte\r\nStarting training from step 0...\r\n\r\nMemstats: After params initialized:\r\n\tUsing (GB) 0.43 / 38.7 (1.111111%) on cuda:0\r\nStep 0, loss: 0.2907809615135193\r\nStep 1, loss: 0.20487669110298157\r\nStep 2, loss: 0.19224461913108826\r\nStep 3, loss: 0.20198248326778412\r\nStep 4, loss: 0.16347838938236237\r\nStep 5, loss: 0.16230832040309906\r\nStep 6, loss: 0.14663851261138916\r\nStep 7, loss: 0.13977152109146118\r\nStep 8, loss: 0.13386794924736023\r\nStep 9, loss: 0.13691286742687225\r\nStep 10, loss: 0.10192667692899704\r\nStep 11, loss: 0.10446860641241074\r\nStep 12, loss: 0.09416987746953964\r\nStep 13, loss: 0.09780814498662949\r\nStep 14, loss: 0.09558838605880737\r\nStep 15, loss: 0.09635493904352188\r\nStep 16, loss: 0.08359640836715698\r\nStep 17, loss: 0.09577649086713791\r\nStep 18, loss: 0.07302761822938919\r\nStep 19, loss: 0.07869668304920197\r\nStep 20, loss: 0.07822348922491074\r\nStep 21, loss: 0.0664888247847557\r\nStep 22, loss: 0.06101291626691818\r\nStep 23, loss: 0.06660832464694977\r\nStep 24, loss: 0.05325968191027641\r\nStep 25, loss: 0.05030716955661774\r\nStep 26, loss: 0.0461876317858696\r\nStep 27, loss: 0.05877241492271423\r\nStep 28, loss: 0.05669289454817772\r\nStep 29, loss: 0.0490875318646431\r\nStep 30, loss: 0.054680608212947845\r\nStep 31, loss: 0.038881272077560425\r\nStep 32, loss: 0.04368234798312187\r\nStep 33, loss: 0.04699988663196564\r\nStep 34, loss: 0.03861392289400101\r\nStep 35, loss: 0.04747316241264343\r\nStep 36, loss: 0.03618457913398743\r\nStep 37, loss: 0.03713495656847954\r\nStep 38, loss: 0.03410891816020012\r\nStep 39, loss: 0.03037068247795105\r\nStep 40, loss: 0.04023834317922592\r\nStep 41, loss: 0.0378466434776783\r\nStep 42, loss: 0.03600723668932915\r\nStep 43, loss: 0.04228172078728676\r\nStep 44, loss: 0.04032917693257332\r\nStep 45, loss: 0.03685379773378372\r\nStep 46, loss: 0.029053105041384697\r\nStep 47, loss: 0.03426255285739899\r\nStep 48, loss: 0.029112931340932846\r\nStep 49, loss: 0.029259715229272842\r\nStep 50, loss: 0.03822842240333557\r\nStep 51, loss: 0.025380410254001617\r\nStep 52, loss: 0.02545599266886711\r\nStep 53, loss: 0.030934832990169525\r\nStep 54, loss: 0.030533991754055023\r\nStep 55, loss: 0.030320778489112854\r\nStep 56, loss: 0.035128168761730194\r\nStep 57, loss: 0.026840997859835625\r\nStep 58, loss: 0.02393060363829136\r\nStep 59, loss: 0.025138404220342636\r\nStep 60, loss: 0.025882255285978317\r\nStep 61, loss: 0.0240098275244236\r\nStep 62, loss: 0.018979666754603386\r\nStep 63, loss: 0.02961442992091179\r\nStep 64, loss: 0.0189448744058609\r\nStep 65, loss: 0.02829093486070633\r\nStep 66, loss: 0.02481415495276451\r\nStep 67, loss: 0.026056364178657532\r\nStep 68, loss: 0.023815015330910683\r\nStep 69, loss: 0.026936529204249382\r\nStep 70, loss: 0.02226216159760952\r\nStep 71, loss: 0.025462446734309196\r\nStep 72, loss: 0.02014605700969696\r\nStep 73, loss: 0.027149276807904243\r\nStep 74, loss: 0.0226758923381567\r\nStep 75, loss: 0.02523641847074032\r\nStep 76, loss: 0.023796407505869865\r\nStep 77, loss: 0.017406370490789413\r\nStep 78, loss: 0.021783504635095596\r\nStep 79, loss: 0.019695529714226723\r\nStep 80, loss: 0.020754938945174217\r\nStep 81, loss: 0.023621993139386177\r\nStep 82, loss: 0.026429520919919014\r\nStep 83, loss: 0.02595333382487297\r\nStep 84, loss: 0.01526771392673254\r\nStep 85, loss: 0.018546177074313164\r\nStep 86, loss: 0.019712960347533226\r\nStep 87, loss: 0.02242395281791687\r\nStep 88, loss: 0.021083161234855652\r\nStep 89, loss: 0.023148395121097565\r\nStep 90, loss: 0.02137535624206066\r\nStep 91, loss: 0.017097987234592438\r\nStep 92, loss: 0.019456028938293457\r\nStep 93, loss: 0.02404623106122017\r\nStep 94, loss: 0.016838455572724342\r\nStep 95, loss: 0.020395776256918907\r\nStep 96, loss: 0.024831360206007957\r\nStep 97, loss: 0.01299219485372305\r\nStep 98, loss: 0.020889991894364357\r\nStep 99, loss: 0.017093077301979065\r\nStep 100, loss: 0.014664755202829838\r\nStep 101, loss: 0.016240935772657394\r\nStep 102, loss: 0.022209880873560905\r\nStep 103, loss: 0.018546078354120255\r\nStep 104, loss: 0.015767868608236313\r\nStep 105, loss: 0.022501584142446518\r\nStep 106, loss: 0.017610210925340652\r\nStep 107, loss: 0.017259778454899788\r\nStep 108, loss: 0.016788098961114883\r\nStep 109, loss: 0.01672743633389473\r\nStep 110, loss: 0.016221890226006508\r\nStep 111, loss: 0.02049020305275917\r\nStep 112, loss: 0.017517628148198128\r\nStep 113, loss: 0.01657887175679207\r\nStep 114, loss: 0.01597391441464424\r\nStep 115, loss: 0.01778361015021801\r\nStep 116, loss: 0.020706741139292717\r\nStep 117, loss: 0.013687815517187119\r\nStep 118, loss: 0.017475508153438568\r\nStep 119, loss: 0.0185208972543478\r\nStep 120, loss: 0.014776168391108513\r\nStep 121, loss: 0.01928330957889557\r\nStep 122, loss: 0.02140713669359684\r\nStep 123, loss: 0.0163588784635067\r\nStep 124, loss: 0.016816476359963417\r\nStep 125, loss: 0.013746051117777824\r\nStep 126, loss: 0.012534981593489647\r\nStep 127, loss: 0.016329964622855186\r\nStep 128, loss: 0.021131787449121475\r\nStep 129, loss: 0.017962772399187088\r\nStep 130, loss: 0.019580507650971413\r\nStep 131, loss: 0.018021678552031517\r\nStep 132, loss: 0.017191709950566292\r\nStep 133, loss: 0.016906671226024628\r\nStep 134, loss: 0.0164163988083601\r\nStep 135, loss: 0.01737116649746895\r\nStep 136, loss: 0.015271490439772606\r\nStep 137, loss: 0.016522753983736038\r\nStep 138, loss: 0.01641821302473545\r\nStep 139, loss: 0.01414334587752819\r\nStep 140, loss: 0.014087391085922718\r\nStep 141, loss: 0.013279623351991177\r\nStep 142, loss: 0.014984103851020336\r\nStep 143, loss: 0.016354860737919807\r\nStep 144, loss: 0.013469722121953964\r\nStep 145, loss: 0.01503074448555708\r\nStep 146, loss: 0.014219673350453377\r\nStep 147, loss: 0.014816194772720337\r\nStep 148, loss: 0.01313858199864626\r\nStep 149, loss: 0.015858979895710945\r\nStep 150, loss: 0.015456815250217915\r\nStep 151, loss: 0.015389707870781422\r\nStep 152, loss: 0.012091447599232197\r\nStep 153, loss: 0.015203968621790409\r\nStep 154, loss: 0.015182387083768845\r\nStep 155, loss: 0.015577806159853935\r\nStep 156, loss: 0.020180070772767067\r\nStep 157, loss: 0.015581539832055569\r\nStep 158, loss: 0.01271317433565855\r\nStep 159, loss: 0.01539587415754795\r\nStep 160, loss: 0.014603250660002232\r\nStep 161, loss: 0.014268789440393448\r\nStep 162, loss: 0.016044508665800095\r\nStep 163, loss: 0.015424052253365517\r\nStep 164, loss: 0.011639541015028954\r\nStep 165, loss: 0.016989832744002342\r\nStep 166, loss: 0.016074586659669876\r\nStep 167, loss: 0.01450294815003872\r\nStep 168, loss: 0.01633497141301632\r\nStep 169, loss: 0.013324996456503868\r\nStep 170, loss: 0.013189850375056267\r\nStep 171, loss: 0.015074877068400383\r\nStep 172, loss: 0.014682481996715069\r\nStep 173, loss: 0.018100008368492126\r\nStep 174, loss: 0.013400370255112648\r\nStep 175, loss: 0.01567847654223442\r\nStep 176, loss: 0.015122741460800171\r\nStep 177, loss: 0.013786758296191692\r\nStep 178, loss: 0.011682386510074139\r\nStep 179, loss: 0.013185624964535236\r\nStep 180, loss: 0.01608995348215103\r\nStep 181, loss: 0.015077189542353153\r\nStep 182, loss: 0.010910343378782272\r\nStep 183, loss: 0.012310906313359737\r\nStep 184, loss: 0.012917791493237019\r\nStep 185, loss: 0.013658295385539532\r\nStep 186, loss: 0.012522728182375431\r\nStep 187, loss: 0.012858663685619831\r\nStep 188, loss: 0.013033694587647915\r\nStep 189, loss: 0.014052755199372768\r\nStep 190, loss: 0.01771222986280918\r\nStep 191, loss: 0.013067700900137424\r\nStep 192, loss: 0.011359093710780144\r\nStep 193, loss: 0.011020203121006489\r\nStep 194, loss: 0.013568686321377754\r\nStep 195, loss: 0.020322274416685104\r\nStep 196, loss: 0.011942881159484386\r\nStep 197, loss: 0.0171518437564373\r\nStep 198, loss: 0.011508755385875702\r\nStep 199, loss: 0.01579204574227333\r\n",,terminal_output +4381,5099489,"TERMINAL",0,0,"9445",,terminal_output +4382,5099813,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-lam-dev-3468617 at: https://wandb.ai/instant-uv/jafar/runs/z21qnvj4\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_144931-z21qnvj4/logs\r\n",,terminal_output +4383,5100425,"TERMINAL",0,0,"10556",,terminal_output +4384,5100857,"TERMINAL",0,0,"]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +4385,5101555,"TERMINAL",0,0,"1667",,terminal_output +4386,5102530,"TERMINAL",0,0,"2778",,terminal_output +4387,5103531,"TERMINAL",0,0,"3889",,terminal_output +4388,5104718,"TERMINAL",0,0,"49950",,terminal_output +4389,5105706,"TERMINAL",0,0,"520201",,terminal_output +4390,5106819,"TERMINAL",0,0,"6112",,terminal_output +4391,5107884,"TERMINAL",0,0,"7223",,terminal_output +4392,5108879,"TERMINAL",0,0,"8334",,terminal_output +4393,5109892,"TERMINAL",0,0,"9445",,terminal_output +4394,5110930,"TERMINAL",0,0,"20556",,terminal_output +4395,5112155,"TERMINAL",0,0,"1667",,terminal_output +4396,5113179,"TERMINAL",0,0,"2778",,terminal_output +4397,5114206,"TERMINAL",0,0,"3889",,terminal_output +4398,5115423,"TERMINAL",0,0,"4998:00",,terminal_output +4399,5116448,"TERMINAL",0,0,"530301",,terminal_output +4400,5117476,"TERMINAL",0,0,"6112",,terminal_output +4401,5118377,"TERMINAL",0,0,"7223",,terminal_output +4402,5119347,"TERMINAL",0,0,"8334",,terminal_output +4403,5120555,"TERMINAL",0,0,"9445",,terminal_output +4404,5121405,"TERMINAL",0,0,"30556",,terminal_output +4405,5122605,"TERMINAL",0,0,"1667",,terminal_output +4406,5123722,"TERMINAL",0,0,"2889",,terminal_output +4407,5124737,"TERMINAL",0,0,"49910",,terminal_output +4408,5125565,"TERMINAL",0,0,"540401",,terminal_output +4409,5126537,"TERMINAL",0,0,"6112",,terminal_output +4410,5127600,"TERMINAL",0,0,"7223",,terminal_output +4411,5128582,"TERMINAL",0,0,"8334",,terminal_output +4412,5129551,"TERMINAL",0,0,"9445",,terminal_output +4413,5130572,"TERMINAL",0,0,"40556",,terminal_output +4414,5131719,"TERMINAL",0,0,"1667",,terminal_output +4415,5132739,"TERMINAL",0,0,"2778",,terminal_output +4416,5134066,"TERMINAL",0,0,"3889",,terminal_output +4417,5134981,"TERMINAL",0,0,"49920",,terminal_output +4418,5135903,"TERMINAL",0,0,"550501",,terminal_output +4419,5136922,"TERMINAL",0,0,"6112",,terminal_output +4420,5137955,"TERMINAL",0,0,"7223",,terminal_output +4421,5139234,"TERMINAL",0,0,"8334",,terminal_output +4422,5140106,"TERMINAL",0,0,"9445",,terminal_output +4423,5141324,"TERMINAL",0,0,"50556",,terminal_output +4424,5142257,"TERMINAL",0,0,"1667",,terminal_output +4425,5143289,"TERMINAL",0,0,"2778",,terminal_output +4426,5144399,"TERMINAL",0,0,"3889",,terminal_output +4427,5145423,"TERMINAL",0,0,"49930",,terminal_output +4428,5146332,"TERMINAL",0,0,"56:006:001",,terminal_output +4429,5147473,"TERMINAL",0,0,"6112",,terminal_output +4430,5148417,"TERMINAL",0,0,"7334",,terminal_output +4431,5149516,"TERMINAL",0,0,"9445",,terminal_output +4432,5150546,"TERMINAL",0,0,"2:00556",,terminal_output +4433,5151472,"TERMINAL",0,0,"1667",,terminal_output +4434,5152512,"TERMINAL",0,0,"2778",,terminal_output +4435,5153506,"TERMINAL",0,0,"3889",,terminal_output +4436,5154568,"TERMINAL",0,0,"49940",,terminal_output +4437,5155671,"TERMINAL",0,0,"510101",,terminal_output +4438,5156725,"TERMINAL",0,0,"6112",,terminal_output +4439,5157690,"TERMINAL",0,0,"7223",,terminal_output +4440,5158766,"TERMINAL",0,0,"8334",,terminal_output +4441,5159818,"TERMINAL",0,0,"9445",,terminal_output +4442,5161014,"TERMINAL",0,0,"10556",,terminal_output +4443,5161918,"TERMINAL",0,0,"1667",,terminal_output +4444,5162968,"TERMINAL",0,0,"2778",,terminal_output +4445,5164073,"TERMINAL",0,0,"3889",,terminal_output +4446,5164953,"TERMINAL",0,0,"49950",,terminal_output +4447,5166126,"TERMINAL",0,0,"520201",,terminal_output +4448,5167235,"TERMINAL",0,0,"6112",,terminal_output +4449,5168172,"TERMINAL",0,0,"7223",,terminal_output +4450,5169540,"TERMINAL",0,0,"8334",,terminal_output +4451,5170204,"TERMINAL",0,0,"9445",,terminal_output +4452,5171536,"TERMINAL",0,0,"20667",,terminal_output +4453,5172882,"TERMINAL",0,0,"2778",,terminal_output +4454,5173488,"TERMINAL",0,0,"3889",,terminal_output +4455,5174523,"TERMINAL",0,0,"4999:00",,terminal_output +4456,5175544,"TERMINAL",0,0,"530301",,terminal_output +4457,5176589,"TERMINAL",0,0,"6112",,terminal_output +4458,5177656,"TERMINAL",0,0,"7223",,terminal_output +4459,5178643,"TERMINAL",0,0,"8334",,terminal_output +4460,5179651,"TERMINAL",0,0,"9445",,terminal_output +4461,5180776,"TERMINAL",0,0,"30556",,terminal_output +4462,5181977,"TERMINAL",0,0,"1667",,terminal_output +4463,5182939,"TERMINAL",0,0,"2778",,terminal_output +4464,5183813,"TERMINAL",0,0,"3889",,terminal_output +4465,5184845,"TERMINAL",0,0,"49910",,terminal_output +4466,5185939,"TERMINAL",0,0,"540401",,terminal_output +4467,5187019,"TERMINAL",0,0,"6112",,terminal_output +4468,5188039,"TERMINAL",0,0,"7223",,terminal_output +4469,5189156,"TERMINAL",0,0,"8334",,terminal_output +4470,5190060,"TERMINAL",0,0,"9445",,terminal_output +4471,5191306,"TERMINAL",0,0,"40556",,terminal_output +4472,5192181,"TERMINAL",0,0,"1667",,terminal_output +4473,5193221,"TERMINAL",0,0,"2778",,terminal_output +4474,5194247,"TERMINAL",0,0,"3889",,terminal_output +4475,5195406,"TERMINAL",0,0,"49920",,terminal_output +4476,5196422,"TERMINAL",0,0,"551512",,terminal_output +4477,5197442,"TERMINAL",0,0,"7223",,terminal_output +4478,5198475,"TERMINAL",0,0,"8334",,terminal_output +4479,5199488,"TERMINAL",0,0,"9445",,terminal_output +4480,5200580,"TERMINAL",0,0,"50556",,terminal_output +4481,5201564,"TERMINAL",0,0,"1667",,terminal_output +4482,5202689,"TERMINAL",0,0,"2778",,terminal_output +4483,5203689,"TERMINAL",0,0,"3889",,terminal_output +4484,5204723,"TERMINAL",0,0,"49930",,terminal_output +4485,5205720,"TERMINAL",0,0,"57:007:001",,terminal_output +4486,5206871,"TERMINAL",0,0,"6112",,terminal_output +4487,5207999,"TERMINAL",0,0,"7223",,terminal_output +4488,5209221,"TERMINAL",0,0,"8334",,terminal_output +4489,5210143,"TERMINAL",0,0,"9445",,terminal_output +4490,5211270,"TERMINAL",0,0,"3:00556",,terminal_output +4491,5212272,"TERMINAL",0,0,"1667",,terminal_output +4492,5213314,"TERMINAL",0,0,"2778",,terminal_output +4493,5214236,"TERMINAL",0,0,"3889",,terminal_output +4494,5215122,"TERMINAL",0,0,"49940",,terminal_output +4495,5216161,"TERMINAL",0,0,"510101",,terminal_output +4496,5217206,"TERMINAL",0,0,"6112",,terminal_output +4497,5218195,"TERMINAL",0,0,"7223",,terminal_output +4498,5219257,"TERMINAL",0,0,"8334",,terminal_output +4499,5220265,"TERMINAL",0,0,"9445",,terminal_output +4500,5221322,"TERMINAL",0,0,"10667",,terminal_output +4501,5222354,"TERMINAL",0,0,"2778",,terminal_output +4502,5223456,"TERMINAL",0,0,"3889",,terminal_output +4503,5224477,"TERMINAL",0,0,"49950",,terminal_output +4504,5225522,"TERMINAL",0,0,"520201",,terminal_output +4505,5226483,"TERMINAL",0,0,"6112",,terminal_output +4506,5227524,"TERMINAL",0,0,"7223",,terminal_output +4507,5228563,"TERMINAL",0,0,"8334",,terminal_output +4508,5229605,"TERMINAL",0,0,"9445",,terminal_output +4509,5230723,"TERMINAL",0,0,"20556",,terminal_output +4510,5231745,"TERMINAL",0,0,"1667",,terminal_output +4511,5232769,"TERMINAL",0,0,"2778",,terminal_output +4512,5233784,"TERMINAL",0,0,"3889",,terminal_output +4513,5234818,"TERMINAL",0,0,"49950:00",,terminal_output +4514,5235886,"TERMINAL",0,0,"530301",,terminal_output +4515,5236886,"TERMINAL",0,0,"6112",,terminal_output +4516,5237910,"TERMINAL",0,0,"7223",,terminal_output +4517,5239121,"TERMINAL",0,0,"8334",,terminal_output +4518,5240053,"TERMINAL",0,0,"9445",,terminal_output +4519,5241066,"TERMINAL",0,0,"30556",,terminal_output +4520,5242205,"TERMINAL",0,0,"1667",,terminal_output +4521,5243285,"TERMINAL",0,0,"2778",,terminal_output +4522,5244289,"TERMINAL",0,0,"3889",,terminal_output +4523,5245067,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +4524,5245297,"TERMINAL",0,0,"49910",,terminal_output +4525,5246315,"TERMINAL",0,0,"540401",,terminal_output +4526,5246365,"TERMINAL",0,0,"g",,terminal_output +4527,5246505,"TERMINAL",0,0,"ot",,terminal_output +4528,5246589,"TERMINAL",0,0," ",,terminal_output +4529,5246772,"TERMINAL",0,0,"c",,terminal_output +4530,5246872,"TERMINAL",0,0,"h",,terminal_output +4531,5247005,"TERMINAL",0,0,"e",,terminal_output +4532,5247072,"TERMINAL",0,0,"c",,terminal_output +4533,5247396,"TERMINAL",0,0,"6112",,terminal_output +4534,5247530,"TERMINAL",0,0,"",,terminal_output +4535,5247857,"TERMINAL",0,0,"",,terminal_output +4536,5248016,"TERMINAL",0,0,"",,terminal_output +4537,5248355,"TERMINAL",0,0,"7334",,terminal_output +4538,5248567,"TERMINAL",0,0,"g",,terminal_output +4539,5248732,"TERMINAL",0,0,"i",,terminal_output +4540,5249509,"TERMINAL",0,0,"9445",,terminal_output +4541,5249754,"TERMINAL",0,0,"t",,terminal_output +4542,5249866,"TERMINAL",0,0," ",,terminal_output +4543,5249989,"TERMINAL",0,0,"c",,terminal_output +4544,5250150,"TERMINAL",0,0,"h",,terminal_output +4545,5250305,"TERMINAL",0,0,"e",,terminal_output +4546,5250450,"TERMINAL",0,0,"c",,terminal_output +4547,5250598,"TERMINAL",0,0,"k",,terminal_output +4548,5250654,"TERMINAL",0,0,"40556",,terminal_output +4549,5250722,"TERMINAL",0,0,"o",,terminal_output +4550,5250855,"TERMINAL",0,0,"u",,terminal_output +4551,5250967,"TERMINAL",0,0,"t ",,terminal_output +4552,5251109,"TERMINAL",0,0,"m",,terminal_output +4553,5251205,"TERMINAL",0,0,"a",,terminal_output +4554,5251340,"TERMINAL",0,0,"in",,terminal_output +4555,5251570,"TERMINAL",0,0,"\r\n[?2004l\rSwitched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +4556,5251610,"TERMINAL",0,0,"1667",,terminal_output +4557,5252140,"TERMINAL",0,0,"g",,terminal_output +4558,5252289,"TERMINAL",0,0,"i",,terminal_output +4559,5252439,"TERMINAL",0,0,"t",,terminal_output +4560,5252448,"TERMINAL",0,0," ",,terminal_output +4561,5252546,"TERMINAL",0,0,"p",,terminal_output +4562,5252674,"TERMINAL",0,0,"2778",,terminal_output +4563,5252680,"TERMINAL",0,0,"u",,terminal_output +4564,5252819,"TERMINAL",0,0,"l",,terminal_output +4565,5252956,"TERMINAL",0,0,"l",,terminal_output +4566,5253138,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4567,5253566,"TERMINAL",0,0,"3889",,terminal_output +4568,5254655,"TERMINAL",0,0,"49920",,terminal_output +4569,5255307,"TERMINAL",0,0,"remote: Enumerating objects: 4, done.\r\nremote: Counting objects: 25% (1/4)\rremote: Counting objects: 50% (2/4)\rremote: Counting objects: 75% (3/4)\rremote: Counting objects: 100% (4/4)\rremote: Counting objects: 100% (4/4), done.\r\nremote: Total 4 (delta 3), reused 4 (delta 3), pack-reused 0 (from 0)\r\nUnpacking objects: 25% (1/4)\rUnpacking objects: 50% (2/4)\rUnpacking objects: 75% (3/4)\rUnpacking objects: 100% (4/4)\rUnpacking objects: 100% (4/4), 387 bytes | 20.00 KiB/s, done.\r\n",,terminal_output +4570,5255444,"TERMINAL",0,0,"From github.com:p-doom/jasmine\r\n * [new branch] unnormalized-emb-in-q-loss -> origin/unnormalized-emb-in-q-loss\r\n",,terminal_output +4571,5255523,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +4572,5255641,"",0,0,"Switched from branch 'train_lam_coinrun_ablation_wsd_3e-6_28747' to 'main'",,git_branch_checkout +4573,5255783,"TERMINAL",0,0,"550501",,terminal_output +4574,5256837,"TERMINAL",0,0,"6112",,terminal_output +4575,5258165,"TERMINAL",0,0,"7223",,terminal_output +4576,5258551,"TERMINAL",0,0,"git pull",,terminal_output +4577,5258796,"TERMINAL",0,0,"checkout main",,terminal_output +4578,5258960,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +4579,5259057,"TERMINAL",0,0,"8334",,terminal_output +4580,5260297,"TERMINAL",0,0,"9445",,terminal_output +4581,5260298,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-lam-dev-$slurm_job_id \\r\n --tags lam coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --data_dir $array_records_dir_train\r\n\r\nSLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3453268\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0403\r\nSLURM_JOB_START_TIME=1757073804\r\nSLURM_STEP_NODELIST=hkn0403\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757077404\r\nSLURM_PMI2_SRUN_PORT=36059\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468617\r\nSLURM_PTY_PORT=33607\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0403\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0403\r\nSLURM_SRUN_COMM_PORT=43381\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468617\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0403\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=43381\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0403\r\n",,terminal_output +4582,5260425,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +4583,5261242,"TERMINAL",0,0,"50556",,terminal_output +4584,5262164,"TERMINAL",0,0,"1667",,terminal_output +4585,5263288,"TERMINAL",0,0,"2778",,terminal_output +4586,5264337,"TERMINAL",0,0,"3889",,terminal_output +4587,5265437,"TERMINAL",0,0,"49930",,terminal_output +4588,5266464,"TERMINAL",0,0,"58:008:001",,terminal_output +4589,5267487,"TERMINAL",0,0,"6112",,terminal_output +4590,5267905,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +4591,5268452,"TERMINAL",0,0,"7223",,terminal_output +4592,5268588,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +4593,5268645,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_145357-z5tvs2a7\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-lam-dev-3468617\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/z5tvs2a7\r\n",,terminal_output +4594,5269517,"TERMINAL",0,0,"9445",,terminal_output +4595,5270684,"TERMINAL",0,0,"4:00556",,terminal_output +4596,5271531,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['action_in', 'action_up', 'decoder', 'encoder', 'patch_up', 'vq']\r\nParameter counts:\r\n{'action_in': 768, 'action_up': 16896, 'decoder': 17474816, 'encoder': 17228832, 'patch_up': 393728, 'vq': 192, 'total': 35115232}\r\n",,terminal_output +4597,5271532,"TERMINAL",0,0,"1667",,terminal_output +4598,5272615,"TERMINAL",0,0,"2778",,terminal_output +4599,5273616,"TERMINAL",0,0,"3889",,terminal_output +4600,5274772,"TERMINAL",0,0,"49940",,terminal_output +4601,5275847,"TERMINAL",0,0,"510101",,terminal_output +4602,5276923,"TERMINAL",0,0,"6112",,terminal_output +4603,5277952,"TERMINAL",0,0,"7223",,terminal_output +4604,5279030,"TERMINAL",0,0,"8334",,terminal_output +4605,5280087,"TERMINAL",0,0,"9445",,terminal_output +4606,5280322,"TERMINAL",0,0,"2025-09-05 14:54:09.659486: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:54:09.660661: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:54:09.660689: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 14:54:09.661166: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4607,5281099,"TERMINAL",0,0,"10556",,terminal_output +4608,5282239,"TERMINAL",0,0,"1667",,terminal_output +4609,5283187,"TERMINAL",0,0,"2778",,terminal_output +4610,5284074,"TERMINAL",0,0,"3889",,terminal_output +4611,5285302,"TERMINAL",0,0,"49950",,terminal_output +4612,5286254,"TERMINAL",0,0,"520201",,terminal_output +4613,5287463,"TERMINAL",0,0,"6112",,terminal_output +4614,5288284,"TERMINAL",0,0,"7223",,terminal_output +4615,5289201,"TERMINAL",0,0,"8334",,terminal_output +4616,5290320,"TERMINAL",0,0,"9445",,terminal_output +4617,5291445,"TERMINAL",0,0,"20556",,terminal_output +4618,5292480,"TERMINAL",0,0,"1667",,terminal_output +4619,5293492,"TERMINAL",0,0,"2889",,terminal_output +4620,5294573,"TERMINAL",0,0,"4991:00",,terminal_output +4621,5295541,"TERMINAL",0,0,"530301",,terminal_output +4622,5296667,"TERMINAL",0,0,"6112",,terminal_output +4623,5297673,"TERMINAL",0,0,"7223",,terminal_output +4624,5298805,"TERMINAL",0,0,"8334",,terminal_output +4625,5299901,"TERMINAL",0,0,"9445",,terminal_output +4626,5300991,"TERMINAL",0,0,"30556",,terminal_output +4627,5301853,"TERMINAL",0,0,"1667",,terminal_output +4628,5302830,"TERMINAL",0,0,"2778",,terminal_output +4629,5303840,"TERMINAL",0,0,"3889",,terminal_output +4630,5304874,"TERMINAL",0,0,"49910",,terminal_output +4631,5305898,"TERMINAL",0,0,"540401",,terminal_output +4632,5307013,"TERMINAL",0,0,"6112",,terminal_output +4633,5307950,"TERMINAL",0,0,"7223",,terminal_output +4634,5309163,"TERMINAL",0,0,"8334",,terminal_output +4635,5310189,"TERMINAL",0,0,"9445",,terminal_output +4636,5311315,"TERMINAL",0,0,"40556",,terminal_output +4637,5312221,"TERMINAL",0,0,"1667",,terminal_output +4638,5313154,"TERMINAL",0,0,"2778",,terminal_output +4639,5314280,"TERMINAL",0,0,"3889",,terminal_output +4640,5315201,"TERMINAL",0,0,"49920",,terminal_output +4641,5316229,"TERMINAL",0,0,"550501",,terminal_output +4642,5317465,"TERMINAL",0,0,"6112",,terminal_output +4643,5318515,"TERMINAL",0,0,"7223",,terminal_output +4644,5319423,"TERMINAL",0,0,"8445",,terminal_output +4645,5320543,"TERMINAL",0,0,"50556",,terminal_output +4646,5321554,"TERMINAL",0,0,"1667",,terminal_output +4647,5322977,"TERMINAL",0,0,"2778",,terminal_output +4648,5323972,"TERMINAL",0,0,"3889",,terminal_output +4649,5325080,"TERMINAL",0,0,"49930",,terminal_output +4650,5326057,"TERMINAL",0,0,"59:009:001",,terminal_output +4651,5327256,"TERMINAL",0,0,"6112",,terminal_output +4652,5327915,"TERMINAL",0,0,"7223",,terminal_output +4653,5329190,"TERMINAL",0,0,"8334",,terminal_output +4654,5330376,"TERMINAL",0,0,"9445",,terminal_output +4655,5331494,"TERMINAL",0,0,"5:00556",,terminal_output +4656,5332613,"TERMINAL",0,0,"1667",,terminal_output +4657,5333644,"TERMINAL",0,0,"2778",,terminal_output +4658,5334155,"TERMINAL",0,0,"3889",,terminal_output +4659,5335679,"TERMINAL",0,0,"49940",,terminal_output +4660,5336650,"TERMINAL",0,0,"510101",,terminal_output +4661,5337678,"TERMINAL",0,0,"6112",,terminal_output +4662,5338619,"TERMINAL",0,0,"7223",,terminal_output +4663,5339232,"TERMINAL",0,0,"8334",,terminal_output +4664,5340455,"TERMINAL",0,0,"9445",,terminal_output +4665,5341530,"TERMINAL",0,0,"10556",,terminal_output +4666,5342369,"TERMINAL",0,0,"1667",,terminal_output +4667,5343475,"TERMINAL",0,0,"2889",,terminal_output +4668,5344590,"TERMINAL",0,0,"49950",,terminal_output +4669,5345727,"TERMINAL",0,0,"520201",,terminal_output +4670,5346553,"TERMINAL",0,0,"6112",,terminal_output +4671,5347571,"TERMINAL",0,0,"7223",,terminal_output +4672,5348527,"TERMINAL",0,0,"8334",,terminal_output +4673,5349607,"TERMINAL",0,0,"9445",,terminal_output +4674,5350630,"TERMINAL",0,0,"20556",,terminal_output +4675,5351703,"TERMINAL",0,0,"1667",,terminal_output +4676,5352768,"TERMINAL",0,0,"2778",,terminal_output +4677,5353798,"TERMINAL",0,0,"3889",,terminal_output +4678,5354887,"TERMINAL",0,0,"4992:00",,terminal_output +4679,5355897,"TERMINAL",0,0,"530301",,terminal_output +4680,5356997,"TERMINAL",0,0,"6112",,terminal_output +4681,5358091,"TERMINAL",0,0,"7223",,terminal_output +4682,5359219,"TERMINAL",0,0,"8334",,terminal_output +4683,5360317,"TERMINAL",0,0,"9445",,terminal_output +4684,5361414,"TERMINAL",0,0,"30556",,terminal_output +4685,5362479,"TERMINAL",0,0,"1667",,terminal_output +4686,5363716,"TERMINAL",0,0,"2778",,terminal_output +4687,5364559,"TERMINAL",0,0,"3889",,terminal_output +4688,5364952,"TERMINAL",0,0,"Total memory size: 0.6 GB, Output size: 0.4 GB, Temp size: 0.2 GB, Argument size: 0.4 GB, Host temp size: 0.0 GB.\r\nFLOPs: 2.609e+11, Bytes: 7.327e+09 (6.8 GB), Intensity: 35.6 FLOPs/byte\r\nStarting training from step 0...\r\n\r\nMemstats: After params initialized:\r\n\tUsing (GB) 0.43 / 38.7 (1.111111%) on cuda:0\r\nStep 0, loss: 0.2907809615135193\r\nStep 1, loss: 0.20487669110298157\r\nStep 2, loss: 0.19224461913108826\r\nStep 3, loss: 0.20198248326778412\r\nStep 4, loss: 0.16347838938236237\r\nStep 5, loss: 0.16230832040309906\r\nStep 6, loss: 0.14663851261138916\r\nStep 7, loss: 0.13977152109146118\r\nStep 8, loss: 0.13386794924736023\r\nStep 9, loss: 0.13691286742687225\r\nStep 10, loss: 0.10192667692899704\r\nStep 11, loss: 0.1044677272439003\r\nStep 12, loss: 0.09416791796684265\r\nStep 13, loss: 0.09780669957399368\r\nStep 14, loss: 0.09559058398008347\r\nStep 15, loss: 0.09634988009929657\r\nStep 16, loss: 0.08359295129776001\r\nStep 17, loss: 0.09576983749866486\r\nStep 18, loss: 0.07302078604698181\r\nStep 19, loss: 0.07869090139865875\r\nStep 20, loss: 0.07822202146053314\r\nStep 21, loss: 0.06648452579975128\r\nStep 22, loss: 0.06101835519075394\r\nStep 23, loss: 0.06660005450248718\r\nStep 24, loss: 0.05323264002799988\r\nStep 25, loss: 0.050303805619478226\r\nStep 26, loss: 0.046183954924345016\r\nStep 27, loss: 0.05877380445599556\r\nStep 28, loss: 0.056686244904994965\r\nStep 29, loss: 0.04908159747719765\r\nStep 30, loss: 0.05466654896736145\r\nStep 31, loss: 0.03886961191892624\r\nStep 32, loss: 0.04366278648376465\r\nStep 33, loss: 0.04702628403902054\r\nStep 34, loss: 0.03860776498913765\r\nStep 35, loss: 0.047496497631073\r\nStep 36, loss: 0.03617597743868828\r\nStep 37, loss: 0.03713085502386093\r\nStep 38, loss: 0.03411027416586876\r\nStep 39, loss: 0.03038857690989971\r\nStep 40, loss: 0.04024888947606087\r\nStep 41, loss: 0.03780907392501831\r\nStep 42, loss: 0.03603516146540642\r\nStep 43, loss: 0.042284950613975525\r\nStep 44, loss: 0.04029197618365288\r\nStep 45, loss: 0.036865606904029846\r\nStep 46, loss: 0.029029667377471924\r\nStep 47, loss: 0.03425827622413635\r\nStep 48, loss: 0.029121899977326393\r\nStep 49, loss: 0.029270753264427185\r\nStep 50, loss: 0.038236405700445175\r\nStep 51, loss: 0.025467678904533386\r\nStep 52, loss: 0.02552272006869316\r\nStep 53, loss: 0.03097713552415371\r\nStep 54, loss: 0.030593199655413628\r\nStep 55, loss: 0.03032303974032402\r\nStep 56, loss: 0.03519182279706001\r\nStep 57, loss: 0.026803400367498398\r\nStep 58, loss: 0.024078769609332085\r\nStep 59, loss: 0.025129396468400955\r\nStep 60, loss: 0.02600712515413761\r\nStep 61, loss: 0.024028316140174866\r\nStep 62, loss: 0.018966276198625565\r\nStep 63, loss: 0.029535476118326187\r\nStep 64, loss: 0.018996495753526688\r\nStep 65, loss: 0.028345173224806786\r\nStep 66, loss: 0.024827852845191956\r\nStep 67, loss: 0.026059312745928764\r\nStep 68, loss: 0.023865530267357826\r\nStep 69, loss: 0.02699657529592514\r\nStep 70, loss: 0.022345898672938347\r\nStep 71, loss: 0.025571178644895554\r\nStep 72, loss: 0.020160339772701263\r\nStep 73, loss: 0.02731441892683506\r\nStep 74, loss: 0.022451432421803474\r\nStep 75, loss: 0.02534486912190914\r\nStep 76, loss: 0.02371443249285221\r\nStep 77, loss: 0.017474329099059105\r\nStep 78, loss: 0.021796364337205887\r\nStep 79, loss: 0.019692249596118927\r\nStep 80, loss: 0.020774418488144875\r\nStep 81, loss: 0.02372140809893608\r\nStep 82, loss: 0.026611795648932457\r\nStep 83, loss: 0.025882748886942863\r\nStep 84, loss: 0.015336929820477962\r\nStep 85, loss: 0.018499184399843216\r\nStep 86, loss: 0.019874298945069313\r\nStep 87, loss: 0.022342341020703316\r\nStep 88, loss: 0.021215401589870453\r\nStep 89, loss: 0.02302802912890911\r\nStep 90, loss: 0.02140907756984234\r\nStep 91, loss: 0.017064211890101433\r\nStep 92, loss: 0.019580373540520668\r\nStep 93, loss: 0.02397313341498375\r\nStep 94, loss: 0.017014777287840843\r\nStep 95, loss: 0.020339153707027435\r\nStep 96, loss: 0.024906886741518974\r\nStep 97, loss: 0.012998129241168499\r\nStep 98, loss: 0.020880380645394325\r\nStep 99, loss: 0.017134740948677063\r\nStep 100, loss: 0.014638373628258705\r\nStep 101, loss: 0.015965502709150314\r\nStep 102, loss: 0.021969303488731384\r\nStep 103, loss: 0.01862594671547413\r\nStep 104, loss: 0.015478555113077164\r\nStep 105, loss: 0.022591855376958847\r\nStep 106, loss: 0.017852338030934334\r\nStep 107, loss: 0.017364224418997765\r\nStep 108, loss: 0.016759617254137993\r\nStep 109, loss: 0.01675388403236866\r\nStep 110, loss: 0.016562556847929955\r\nStep 111, loss: 0.02093818224966526\r\nStep 112, loss: 0.017574887722730637\r\nStep 113, loss: 0.01672026328742504\r\nStep 114, loss: 0.01640930213034153\r\nStep 115, loss: 0.018300682306289673\r\nStep 116, loss: 0.020358584821224213\r\nStep 117, loss: 0.014358889311552048\r\nStep 118, loss: 0.019138429313898087\r\nStep 119, loss: 0.018208257853984833\r\nStep 120, loss: 0.016183095052838326\r\nStep 121, loss: 0.01939987950026989\r\nStep 122, loss: 0.021649548783898354\r\nStep 123, loss: 0.016916872933506966\r\nStep 124, loss: 0.016733385622501373\r\nStep 125, loss: 0.013533731922507286\r\nStep 126, loss: 0.011031046509742737\r\nStep 127, loss: 0.016036519780755043\r\nStep 128, loss: 0.022017469629645348\r\nStep 129, loss: 0.01474419329315424\r\nStep 130, loss: 0.018538694828748703\r\nStep 131, loss: 0.016506386920809746\r\nStep 132, loss: 0.01556385587900877\r\nStep 133, loss: 0.016722580417990685\r\nStep 134, loss: 0.015681210905313492\r\nStep 135, loss: 0.01627577655017376\r\nStep 136, loss: 0.015696680173277855\r\nStep 137, loss: 0.018018420785665512\r\nStep 138, loss: 0.01589091494679451\r\nStep 139, loss: 0.014008085243403912\r\nStep 140, loss: 0.014178372919559479\r\nStep 141, loss: 0.012927031144499779\r\nStep 142, loss: 0.01551508903503418\r\nStep 143, loss: 0.01637556217610836\r\nStep 144, loss: 0.013582299463450909\r\nStep 145, loss: 0.01626475900411606\r\nStep 146, loss: 0.013650564476847649\r\nStep 147, loss: 0.01560058631002903\r\nStep 148, loss: 0.013296963647007942\r\nStep 149, loss: 0.015666494145989418\r\nStep 150, loss: 0.015066364780068398\r\nStep 151, loss: 0.01513235829770565\r\nStep 152, loss: 0.011986869387328625\r\nStep 153, loss: 0.015301206149160862\r\nStep 154, loss: 0.015149109065532684\r\nStep 155, loss: 0.01557920966297388\r\nStep 156, loss: 0.020196640864014626\r\nStep 157, loss: 0.015859330072999\r\nStep 158, loss: 0.012658276595175266\r\nStep 159, loss: 0.015439973212778568\r\nStep 160, loss: 0.014881087467074394\r\nStep 161, loss: 0.014422734268009663\r\nStep 162, loss: 0.015950776636600494\r\nStep 163, loss: 0.015684356912970543\r\nStep 164, loss: 0.011976590380072594\r\nStep 165, loss: 0.016381021589040756\r\nStep 166, loss: 0.014731398783624172\r\nStep 167, loss: 0.014019900932908058\r\nStep 168, loss: 0.015730246901512146\r\nStep 169, loss: 0.012211603112518787\r\nStep 170, loss: 0.013266247697174549\r\nStep 171, loss: 0.013944870792329311\r\nStep 172, loss: 0.014486017636954784\r\nStep 173, loss: 0.017141714692115784\r\nStep 174, loss: 0.012485145591199398\r\nStep 175, loss: 0.015482233837246895\r\nStep 176, loss: 0.013258597813546658\r\nStep 177, loss: 0.014898712746798992\r\nStep 178, loss: 0.01732616126537323\r\nStep 179, loss: 0.013337929733097553\r\nStep 180, loss: 0.01967885158956051\r\nStep 181, loss: 0.015483411960303783\r\nStep 182, loss: 0.014572303742170334\r\nStep 183, loss: 0.012711314484477043\r\nStep 184, loss: 0.015482379123568535\r\nStep 185, loss: 0.014362416230142117\r\nStep 186, loss: 0.015327204950153828\r\nStep 187, loss: 0.01304993499070406\r\nStep 188, loss: 0.01532430574297905\r\nStep 189, loss: 0.014505302533507347\r\nStep 190, loss: 0.018333744257688522\r\nStep 191, loss: 0.01269789319485426\r\nStep 192, loss: 0.012017099186778069\r\nStep 193, loss: 0.011694572865962982\r\nStep 194, loss: 0.01201622560620308\r\nStep 195, loss: 0.01776398904621601\r\nStep 196, loss: 0.011220911517739296\r\nStep 197, loss: 0.014040167443454266\r\nStep 198, loss: 0.01155409961938858\r\nStep 199, loss: 0.012815393507480621\r\n",,terminal_output +4689,5365347,"TERMINAL",0,0,"49910",,terminal_output +4690,5365361,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-lam-dev-3468617 at: https://wandb.ai/instant-uv/jafar/runs/z5tvs2a7\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_145357-z5tvs2a7/logs\r\n",,terminal_output +4691,5366277,"TERMINAL",0,0,"540401",,terminal_output +4692,5366387,"TERMINAL",0,0,"]0;tum_cte0515@hkn0403:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0403 jasmine]$ ",,terminal_output +4693,5367333,"TERMINAL",0,0,"6112",,terminal_output +4694,5368358,"TERMINAL",0,0,"7334",,terminal_output +4695,5369355,"TERMINAL",0,0,"9445",,terminal_output +4696,5370386,"TERMINAL",0,0,"40556",,terminal_output +4697,5371483,"TERMINAL",0,0,"1667",,terminal_output +4698,5372567,"TERMINAL",0,0,"2778",,terminal_output +4699,5373591,"TERMINAL",0,0,"3889",,terminal_output +4700,5374665,"TERMINAL",0,0,"49920",,terminal_output +4701,5375739,"TERMINAL",0,0,"550501",,terminal_output +4702,5376647,"TERMINAL",0,0,"6112",,terminal_output +4703,5377815,"TERMINAL",0,0,"7223",,terminal_output +4704,5378810,"TERMINAL",0,0,"8334",,terminal_output +4705,5379729,"TERMINAL",0,0,"9445",,terminal_output +4706,5380846,"TERMINAL",0,0,"50556",,terminal_output +4707,5381840,"TERMINAL",0,0,"1667",,terminal_output +4708,5382939,"TERMINAL",0,0,"2778",,terminal_output +4709,5384036,"TERMINAL",0,0,"3889",,terminal_output +4710,5385002,"TERMINAL",0,0,"49930",,terminal_output +4711,5386006,"TERMINAL",0,0,"540:0040:001",,terminal_output +4712,5387086,"TERMINAL",0,0,"6112",,terminal_output +4713,5388114,"TERMINAL",0,0,"7223",,terminal_output +4714,5389163,"TERMINAL",0,0,"8334",,terminal_output +4715,5390263,"TERMINAL",0,0,"9445",,terminal_output +4716,5391296,"TERMINAL",0,0,"6:00556",,terminal_output +4717,5392313,"TERMINAL",0,0,"1667",,terminal_output +4718,5394187,"TERMINAL",0,0,"2889",,terminal_output +4719,5395253,"TERMINAL",0,0,"49940",,terminal_output +4720,5396400,"TERMINAL",0,0,"510101",,terminal_output +4721,5399153,"TERMINAL",0,0,"611272238334",,terminal_output +4722,5400707,"TERMINAL",0,0,"9445",,terminal_output +4723,5401740,"TERMINAL",0,0,"10556",,terminal_output +4724,5403798,"TERMINAL",0,0,"16672778",,terminal_output +4725,5404290,"TERMINAL",0,0,"3889",,terminal_output +4726,5405275,"TERMINAL",0,0,"49950",,terminal_output +4727,5406522,"TERMINAL",0,0,"520201",,terminal_output +4728,5408688,"TERMINAL",0,0,"61127223",,terminal_output +4729,5409293,"TERMINAL",0,0,"8334",,terminal_output +4730,5410841,"TERMINAL",0,0,"9445",,terminal_output +4731,5411256,"TERMINAL",0,0,"20556",,terminal_output +4732,5412273,"TERMINAL",0,0,"1667",,terminal_output +4733,5413389,"TERMINAL",0,0,"2778",,terminal_output +4734,5414482,"TERMINAL",0,0,"3889",,terminal_output +4735,5415259,"TERMINAL",0,0,"4993:00",,terminal_output +4736,5416336,"TERMINAL",0,0,"530301",,terminal_output +4737,5417305,"TERMINAL",0,0,"6223",,terminal_output +4738,5418347,"TERMINAL",0,0,"8334",,terminal_output +4739,5419594,"TERMINAL",0,0,"9445",,terminal_output +4740,5420571,"TERMINAL",0,0,"30556",,terminal_output +4741,5421776,"TERMINAL",0,0,"1667",,terminal_output +4742,5422828,"TERMINAL",0,0,"2778",,terminal_output +4743,5423979,"TERMINAL",0,0,"3889",,terminal_output +4744,5425088,"TERMINAL",0,0,"49910",,terminal_output +4745,5426317,"TERMINAL",0,0,"540401",,terminal_output +4746,5427435,"TERMINAL",0,0,"6112",,terminal_output +4747,5428472,"TERMINAL",0,0,"7223",,terminal_output +4748,5429392,"TERMINAL",0,0,"8334",,terminal_output +4749,5430114,"TERMINAL",0,0,"9445",,terminal_output +4750,5431428,"TERMINAL",0,0,"40556",,terminal_output +4751,5432360,"TERMINAL",0,0,"1667",,terminal_output +4752,5433476,"TERMINAL",0,0,"2778",,terminal_output +4753,5434603,"TERMINAL",0,0,"3889",,terminal_output +4754,5435629,"TERMINAL",0,0,"49920",,terminal_output +4755,5436240,"TERMINAL",0,0,"550501",,terminal_output +4756,5437233,"TERMINAL",0,0,"6112",,terminal_output +4757,5438435,"TERMINAL",0,0,"7223",,terminal_output +4758,5439596,"TERMINAL",0,0,"8334",,terminal_output +4759,5440848,"TERMINAL",0,0,"9445",,terminal_output +4760,5441768,"TERMINAL",0,0,"50556",,terminal_output +4761,5442533,"TERMINAL",0,0,"1778",,terminal_output +4762,5443514,"TERMINAL",0,0,"3889",,terminal_output +4763,5444538,"TERMINAL",0,0,"49930",,terminal_output +4764,5445509,"TERMINAL",0,0,"51:001:001",,terminal_output +4765,5446584,"TERMINAL",0,0,"6112",,terminal_output +4766,5447608,"TERMINAL",0,0,"7223",,terminal_output +4767,5448738,"TERMINAL",0,0,"8334",,terminal_output +4768,5449743,"TERMINAL",0,0,"9445",,terminal_output +4769,5450791,"TERMINAL",0,0,"7:00556",,terminal_output +4770,5451850,"TERMINAL",0,0,"1667",,terminal_output +4771,5452784,"TERMINAL",0,0,"2778",,terminal_output +4772,5453891,"TERMINAL",0,0,"3889",,terminal_output +4773,5454938,"TERMINAL",0,0,"49940",,terminal_output +4774,5455921,"TERMINAL",0,0,"510101",,terminal_output +4775,5456995,"TERMINAL",0,0,"6112",,terminal_output +4776,5458121,"TERMINAL",0,0,"7223",,terminal_output +4777,5459281,"TERMINAL",0,0,"8334",,terminal_output +4778,5460203,"TERMINAL",0,0,"9445",,terminal_output +4779,5461329,"TERMINAL",0,0,"10556",,terminal_output +4780,5462334,"TERMINAL",0,0,"1667",,terminal_output +4781,5463221,"TERMINAL",0,0,"2778",,terminal_output +4782,5464336,"TERMINAL",0,0,"3889",,terminal_output +4783,5465442,"TERMINAL",0,0,"49950",,terminal_output +4784,5466391,"TERMINAL",0,0,"521212",,terminal_output +4785,5467438,"TERMINAL",0,0,"7223",,terminal_output +4786,5468505,"TERMINAL",0,0,"8334",,terminal_output +4787,5469529,"TERMINAL",0,0,"9445",,terminal_output +4788,5470647,"TERMINAL",0,0,"20556",,terminal_output +4789,5471608,"TERMINAL",0,0,"1667",,terminal_output +4790,5472615,"TERMINAL",0,0,"2778",,terminal_output +4791,5473734,"TERMINAL",0,0,"3889",,terminal_output +4792,5474847,"TERMINAL",0,0,"4994:00",,terminal_output +4793,5475834,"TERMINAL",0,0,"530301",,terminal_output +4794,5476902,"TERMINAL",0,0,"6112",,terminal_output +4795,5477886,"TERMINAL",0,0,"7223",,terminal_output +4796,5478868,"TERMINAL",0,0,"8334",,terminal_output +4797,5479880,"TERMINAL",0,0,"9445",,terminal_output +4798,5480990,"TERMINAL",0,0,"30556",,terminal_output +4799,5482023,"TERMINAL",0,0,"1667",,terminal_output +4800,5483005,"TERMINAL",0,0,"2778",,terminal_output +4801,5484066,"TERMINAL",0,0,"3889",,terminal_output +4802,5485189,"TERMINAL",0,0,"49910",,terminal_output +4803,5486214,"TERMINAL",0,0,"540401",,terminal_output +4804,5487238,"TERMINAL",0,0,"6112",,terminal_output +4805,5488218,"TERMINAL",0,0,"7223",,terminal_output +4806,5489248,"TERMINAL",0,0,"8334",,terminal_output +4807,5490317,"TERMINAL",0,0,"9445",,terminal_output +4808,5491284,"TERMINAL",0,0,"40667",,terminal_output +4809,5492360,"TERMINAL",0,0,"2778",,terminal_output +4810,5493484,"TERMINAL",0,0,"3889",,terminal_output +4811,5494509,"TERMINAL",0,0,"49920",,terminal_output +4812,5495468,"TERMINAL",0,0,"550501",,terminal_output +4813,5496555,"TERMINAL",0,0,"6112",,terminal_output +4814,5497579,"TERMINAL",0,0,"7223",,terminal_output +4815,5498604,"TERMINAL",0,0,"8334",,terminal_output +4816,5499655,"TERMINAL",0,0,"9445",,terminal_output +4817,5500666,"TERMINAL",0,0,"50556",,terminal_output +4818,5501704,"TERMINAL",0,0,"1667",,terminal_output +4819,5502905,"TERMINAL",0,0,"2778",,terminal_output +4820,5503781,"TERMINAL",0,0,"3889",,terminal_output +4821,5504817,"TERMINAL",0,0,"49930",,terminal_output +4822,5505871,"TERMINAL",0,0,"52:002:001",,terminal_output +4823,5507014,"TERMINAL",0,0,"6112",,terminal_output +4824,5507972,"TERMINAL",0,0,"7223",,terminal_output +4825,5509066,"TERMINAL",0,0,"8334",,terminal_output +4826,5510022,"TERMINAL",0,0,"9445",,terminal_output +4827,5511098,"TERMINAL",0,0,"8:00556",,terminal_output +4828,5512106,"TERMINAL",0,0,"1667",,terminal_output +4829,5513102,"TERMINAL",0,0,"2778",,terminal_output +4830,5514139,"TERMINAL",0,0,"3889",,terminal_output +4831,5515201,"TERMINAL",0,0,"49940",,terminal_output +4832,5516238,"TERMINAL",0,0,"510101",,terminal_output +4833,5517372,"TERMINAL",0,0,"6112",,terminal_output +4834,5518345,"TERMINAL",0,0,"7334",,terminal_output +4835,5519512,"TERMINAL",0,0,"9445",,terminal_output +4836,5520525,"TERMINAL",0,0,"10556",,terminal_output +4837,5521421,"TERMINAL",0,0,"1667",,terminal_output +4838,5522573,"TERMINAL",0,0,"2778",,terminal_output +4839,5523538,"TERMINAL",0,0,"3889",,terminal_output +4840,5524638,"TERMINAL",0,0,"49950",,terminal_output +4841,5525669,"TERMINAL",0,0,"520201",,terminal_output +4842,5526613,"TERMINAL",0,0,"6112",,terminal_output +4843,5527671,"TERMINAL",0,0,"7223",,terminal_output +4844,5528706,"TERMINAL",0,0,"8334",,terminal_output +4845,5529777,"TERMINAL",0,0,"9445",,terminal_output +4846,5530858,"TERMINAL",0,0,"20556",,terminal_output +4847,5531882,"TERMINAL",0,0,"1667",,terminal_output +4848,5532878,"TERMINAL",0,0,"2778",,terminal_output +4849,5533948,"TERMINAL",0,0,"3889",,terminal_output +4850,5535056,"TERMINAL",0,0,"4995:00",,terminal_output +4851,5535982,"TERMINAL",0,0,"530301",,terminal_output +4852,5537104,"TERMINAL",0,0,"6112",,terminal_output +4853,5538055,"TERMINAL",0,0,"7223",,terminal_output +4854,5539078,"TERMINAL",0,0,"8334",,terminal_output +4855,5540290,"TERMINAL",0,0,"9445",,terminal_output +4856,5541219,"TERMINAL",0,0,"30556",,terminal_output +4857,5542246,"TERMINAL",0,0,"1667",,terminal_output +4858,5543248,"TERMINAL",0,0,"2778",,terminal_output +4859,5544272,"TERMINAL",0,0,"3889",,terminal_output +4860,5545350,"TERMINAL",0,0,"4404011",,terminal_output +4861,5546335,"TERMINAL",0,0,"6112",,terminal_output +4862,5547370,"TERMINAL",0,0,"7223",,terminal_output +4863,5548420,"TERMINAL",0,0,"8334",,terminal_output +4864,5549504,"TERMINAL",0,0,"9445",,terminal_output +4865,5550523,"TERMINAL",0,0,"40556",,terminal_output +4866,5551552,"TERMINAL",0,0,"1667",,terminal_output +4867,5552584,"TERMINAL",0,0,"2778",,terminal_output +4868,5553617,"TERMINAL",0,0,"3889",,terminal_output +4869,5554733,"TERMINAL",0,0,"49920",,terminal_output +4870,5555755,"TERMINAL",0,0,"550501",,terminal_output +4871,5556837,"TERMINAL",0,0,"6112",,terminal_output +4872,5557850,"TERMINAL",0,0,"7223",,terminal_output +4873,5558823,"TERMINAL",0,0,"8334",,terminal_output +4874,5559882,"TERMINAL",0,0,"9445",,terminal_output +4875,5561372,"TERMINAL",0,0,"50556",,terminal_output +4876,5562396,"TERMINAL",0,0,"1667",,terminal_output +4877,5563514,"TERMINAL",0,0,"2778",,terminal_output +4878,5564558,"TERMINAL",0,0,"39930",,terminal_output +4879,5565624,"TERMINAL",0,0,"53:003:001",,terminal_output +4880,5566504,"TERMINAL",0,0,"6112",,terminal_output +4881,5567522,"TERMINAL",0,0,"7223",,terminal_output +4882,5568546,"TERMINAL",0,0,"8334",,terminal_output +4883,5569628,"TERMINAL",0,0,"9445",,terminal_output +4884,5570599,"TERMINAL",0,0,"9:00556",,terminal_output +4885,5571616,"TERMINAL",0,0,"1667",,terminal_output +4886,5572639,"TERMINAL",0,0,"2778",,terminal_output +4887,5573670,"TERMINAL",0,0,"3889",,terminal_output +4888,5574710,"TERMINAL",0,0,"49940",,terminal_output +4889,5576027,"TERMINAL",0,0,"510101",,terminal_output +4890,5576799,"TERMINAL",0,0,"6112",,terminal_output +4891,5577862,"TERMINAL",0,0,"7223",,terminal_output +4892,5578986,"TERMINAL",0,0,"8334",,terminal_output +4893,5580011,"TERMINAL",0,0,"9445",,terminal_output +4894,5581034,"TERMINAL",0,0,"10556",,terminal_output +4895,5582170,"TERMINAL",0,0,"1667",,terminal_output +4896,5583057,"TERMINAL",0,0,"2778",,terminal_output +4897,5584209,"TERMINAL",0,0,"3889",,terminal_output +4898,5585234,"TERMINAL",0,0,"49950",,terminal_output +4899,5586257,"TERMINAL",0,0,"520201",,terminal_output +4900,5587281,"TERMINAL",0,0,"6112",,terminal_output +4901,5588269,"TERMINAL",0,0,"7223",,terminal_output +4902,5589327,"TERMINAL",0,0,"8445",,terminal_output +4903,5590455,"TERMINAL",0,0,"20556",,terminal_output +4904,5591480,"TERMINAL",0,0,"1667",,terminal_output +4905,5592560,"TERMINAL",0,0,"2778",,terminal_output +4906,5593630,"TERMINAL",0,0,"3889",,terminal_output +4907,5594554,"TERMINAL",0,0,"4996:00",,terminal_output +4908,5595677,"TERMINAL",0,0,"530301",,terminal_output +4909,5596717,"TERMINAL",0,0,"6112",,terminal_output +4910,5597725,"TERMINAL",0,0,"7223",,terminal_output +4911,5598754,"TERMINAL",0,0,"8334",,terminal_output +4912,5599893,"TERMINAL",0,0,"9445",,terminal_output +4913,5600799,"TERMINAL",0,0,"30556",,terminal_output +4914,5601937,"TERMINAL",0,0,"1667",,terminal_output +4915,5603000,"TERMINAL",0,0,"2778",,terminal_output +4916,5604075,"TERMINAL",0,0,"3889",,terminal_output +4917,5605153,"TERMINAL",0,0,"49910",,terminal_output +4918,5606136,"TERMINAL",0,0,"540401",,terminal_output +4919,5607222,"TERMINAL",0,0,"6112",,terminal_output +4920,5608427,"TERMINAL",0,0,"7223",,terminal_output +4921,5609364,"TERMINAL",0,0,"8334",,terminal_output +4922,5610730,"TERMINAL",0,0,"9445",,terminal_output +4923,5611754,"TERMINAL",0,0,"40556",,terminal_output +4924,5612676,"TERMINAL",0,0,"1667",,terminal_output +4925,5613794,"TERMINAL",0,0,"2778",,terminal_output +4926,5614999,"TERMINAL",0,0,"3889",,terminal_output +4927,5615945,"TERMINAL",0,0,"49920",,terminal_output +4928,5617191,"TERMINAL",0,0,"551512",,terminal_output +4929,5618181,"TERMINAL",0,0,"7223",,terminal_output +4930,5619163,"TERMINAL",0,0,"8334",,terminal_output +4931,5622018,"TERMINAL",0,0,"9445505561667",,terminal_output +4932,5623328,"TERMINAL",0,0,"2778",,terminal_output +4933,5624659,"TERMINAL",0,0,"3889",,terminal_output +4934,5625685,"TERMINAL",0,0,"49930",,terminal_output +4935,5626593,"TERMINAL",0,0,"54:004:001",,terminal_output +4936,5627611,"TERMINAL",0,0,"6112",,terminal_output +4937,5628470,"TERMINAL",0,0,"7223",,terminal_output +4938,5628855,"TERMINAL",0,0,"8334",,terminal_output +4939,5630190,"TERMINAL",0,0,"9445",,terminal_output +4940,5631417,"TERMINAL",0,0,"5:00:00556",,terminal_output +4941,5632543,"TERMINAL",0,0,"1667",,terminal_output +4942,5633566,"TERMINAL",0,0,"2778",,terminal_output +4943,5634584,"TERMINAL",0,0,"3889",,terminal_output +4944,5635249,"TERMINAL",0,0,"49940",,terminal_output +4945,5636384,"TERMINAL",0,0,"510101",,terminal_output +4946,5637458,"TERMINAL",0,0,"6112",,terminal_output +4947,5638445,"TERMINAL",0,0,"7223",,terminal_output +4948,5639713,"TERMINAL",0,0,"8334",,terminal_output +4949,5640737,"TERMINAL",0,0,"9445",,terminal_output +4950,5641370,"TERMINAL",0,0,"10556",,terminal_output +4951,5642680,"TERMINAL",0,0,"1778",,terminal_output +4952,5643755,"TERMINAL",0,0,"3889",,terminal_output +4953,5644728,"TERMINAL",0,0,"49950",,terminal_output +4954,5645752,"TERMINAL",0,0,"520201",,terminal_output +4955,5646738,"TERMINAL",0,0,"6112",,terminal_output +4956,5647799,"TERMINAL",0,0,"7223",,terminal_output +4957,5648871,"TERMINAL",0,0,"8334",,terminal_output +4958,5649893,"TERMINAL",0,0,"9445",,terminal_output +4959,5650976,"TERMINAL",0,0,"20556",,terminal_output +4960,5651897,"TERMINAL",0,0,"1667",,terminal_output +4961,5652865,"TERMINAL",0,0,"2778",,terminal_output +4962,5654938,"TERMINAL",0,0,"3997:00",,terminal_output +4963,5656049,"TERMINAL",0,0,"530301",,terminal_output +4964,5657128,"TERMINAL",0,0,"6112",,terminal_output +4965,5658038,"TERMINAL",0,0,"7223",,terminal_output +4966,5659213,"TERMINAL",0,0,"8334",,terminal_output +4967,5660341,"TERMINAL",0,0,"9445",,terminal_output +4968,5661164,"TERMINAL",0,0,"30556",,terminal_output +4969,5662470,"TERMINAL",0,0,"1667",,terminal_output +4970,5663307,"TERMINAL",0,0,"2778",,terminal_output +4971,5664331,"TERMINAL",0,0,"3889",,terminal_output +4972,5665511,"TERMINAL",0,0,"49910",,terminal_output +4973,5666538,"TERMINAL",0,0,"540401",,terminal_output +4974,5667531,"TERMINAL",0,0,"6112",,terminal_output +4975,5668468,"TERMINAL",0,0,"7223",,terminal_output +4976,5669508,"TERMINAL",0,0,"8334",,terminal_output +4977,5670634,"TERMINAL",0,0,"9556",,terminal_output +4978,5671719,"TERMINAL",0,0,"41667",,terminal_output +4979,5672687,"TERMINAL",0,0,"2778",,terminal_output +4980,5673706,"TERMINAL",0,0,"3889",,terminal_output +4981,5674702,"TERMINAL",0,0,"49920",,terminal_output +4982,5676170,"TERMINAL",0,0,"550501",,terminal_output +4983,5677198,"TERMINAL",0,0,"6112",,terminal_output +4984,5678174,"TERMINAL",0,0,"7223",,terminal_output +4985,5679014,"TERMINAL",0,0,"8334",,terminal_output +4986,5679747,"TERMINAL",0,0,"9445",,terminal_output +4987,5680859,"TERMINAL",0,0,"50556",,terminal_output +4988,5681908,"TERMINAL",0,0,"1667",,terminal_output +4989,5683012,"TERMINAL",0,0,"2778",,terminal_output +4990,5683947,"TERMINAL",0,0,"3889",,terminal_output +4991,5685036,"TERMINAL",0,0,"49930",,terminal_output +4992,5685995,"TERMINAL",0,0,"55:005:001",,terminal_output +4993,5687022,"TERMINAL",0,0,"6112",,terminal_output +4994,5688036,"TERMINAL",0,0,"7223",,terminal_output +4995,5689153,"TERMINAL",0,0,"8334",,terminal_output +4996,5690297,"TERMINAL",0,0,"9445",,terminal_output +4997,5691370,"TERMINAL",0,0,"1:00556",,terminal_output +4998,5692547,"TERMINAL",0,0,"1667",,terminal_output +4999,5693251,"TERMINAL",0,0,"2778",,terminal_output +5000,5694291,"TERMINAL",0,0,"3889",,terminal_output +5001,5695314,"TERMINAL",0,0,"49940",,terminal_output +5002,5696413,"TERMINAL",0,0,"611112",,terminal_output +5003,5697476,"TERMINAL",0,0,"7223",,terminal_output +5004,5698489,"TERMINAL",0,0,"8334",,terminal_output +5005,5699668,"TERMINAL",0,0,"9445",,terminal_output +5006,5700750,"TERMINAL",0,0,"10556",,terminal_output +5007,5701713,"TERMINAL",0,0,"1667",,terminal_output +5008,5702901,"TERMINAL",0,0,"2778",,terminal_output +5009,5703836,"TERMINAL",0,0,"3889",,terminal_output +5010,5705055,"TERMINAL",0,0,"49950",,terminal_output +5011,5706163,"TERMINAL",0,0,"520201",,terminal_output +5012,5707324,"TERMINAL",0,0,"6112",,terminal_output +5013,5708396,"TERMINAL",0,0,"7223",,terminal_output +5014,5709269,"TERMINAL",0,0,"8334",,terminal_output +5015,5710616,"TERMINAL",0,0,"9445",,terminal_output +5016,5711288,"TERMINAL",0,0,"20556",,terminal_output +5017,5712628,"TERMINAL",0,0,"1667",,terminal_output +5018,5713569,"TERMINAL",0,0,"2778",,terminal_output +5019,5714492,"TERMINAL",0,0,"3889",,terminal_output +5020,5715793,"TERMINAL",0,0,"4998:00",,terminal_output +5021,5716919,"TERMINAL",0,0,"530301",,terminal_output +5022,5719893,"TERMINAL",0,0,"61127223",,terminal_output +5023,5719965,"TERMINAL",0,0,"8334",,terminal_output +5024,5720783,"TERMINAL",0,0,"9445",,terminal_output +5025,5721529,"TERMINAL",0,0,"30667",,terminal_output +5026,5722869,"TERMINAL",0,0,"2778",,terminal_output +5027,5723884,"TERMINAL",0,0,"3889",,terminal_output +5028,5724755,"TERMINAL",0,0,"49910",,terminal_output +5029,5725727,"TERMINAL",0,0,"540401",,terminal_output +5030,5727169,"TERMINAL",0,0,"6112",,terminal_output +5031,5728024,"TERMINAL",0,0,"7223",,terminal_output +5032,5729107,"TERMINAL",0,0,"8334",,terminal_output +5033,5730029,"TERMINAL",0,0,"9445",,terminal_output +5034,5730791,"TERMINAL",0,0,"40556",,terminal_output +5035,5732000,"TERMINAL",0,0,"1667",,terminal_output +5036,5732961,"TERMINAL",0,0,"2778",,terminal_output +5037,5734067,"TERMINAL",0,0,"3889",,terminal_output +5038,5735097,"TERMINAL",0,0,"49920",,terminal_output +5039,5736177,"TERMINAL",0,0,"550501",,terminal_output +5040,5737196,"TERMINAL",0,0,"6112",,terminal_output +5041,5738321,"TERMINAL",0,0,"7223",,terminal_output +5042,5739346,"TERMINAL",0,0,"8334",,terminal_output +5043,5740473,"TERMINAL",0,0,"9445",,terminal_output +5044,5741301,"TERMINAL",0,0,"50556",,terminal_output +5045,5742316,"TERMINAL",0,0,"1667",,terminal_output +5046,5743340,"TERMINAL",0,0,"2778",,terminal_output +5047,5744329,"TERMINAL",0,0,"3889",,terminal_output +5048,5745296,"TERMINAL",0,0,"49930",,terminal_output +5049,5746366,"TERMINAL",0,0,"56:006:001",,terminal_output +5050,5747435,"TERMINAL",0,0,"6112",,terminal_output +5051,5748482,"TERMINAL",0,0,"7334",,terminal_output +5052,5749586,"TERMINAL",0,0,"9445",,terminal_output +5053,5750540,"TERMINAL",0,0,"2:00556",,terminal_output +5054,5751531,"TERMINAL",0,0,"1667",,terminal_output +5055,5752528,"TERMINAL",0,0,"2778",,terminal_output +5056,5753681,"TERMINAL",0,0,"3889",,terminal_output +5057,5754626,"TERMINAL",0,0,"49940",,terminal_output +5058,5755656,"TERMINAL",0,0,"510101",,terminal_output +5059,5756755,"TERMINAL",0,0,"6112",,terminal_output +5060,5757694,"TERMINAL",0,0,"7223",,terminal_output +5061,5758742,"TERMINAL",0,0,"8334",,terminal_output +5062,5759744,"TERMINAL",0,0,"9445",,terminal_output +5063,5760829,"TERMINAL",0,0,"10556",,terminal_output +5064,5761858,"TERMINAL",0,0,"1667",,terminal_output +5065,5762918,"TERMINAL",0,0,"2778",,terminal_output +5066,5764024,"TERMINAL",0,0,"3889",,terminal_output +5067,5764933,"TERMINAL",0,0,"49950",,terminal_output +5068,5766022,"TERMINAL",0,0,"520201",,terminal_output +5069,5767199,"TERMINAL",0,0,"6112",,terminal_output +5070,5768151,"TERMINAL",0,0,"7223",,terminal_output +5071,5769333,"TERMINAL",0,0,"8334",,terminal_output +5072,5770271,"TERMINAL",0,0,"9445",,terminal_output +5073,5771231,"TERMINAL",0,0,"20556",,terminal_output +5074,5772321,"TERMINAL",0,0,"1667",,terminal_output +5075,5773420,"TERMINAL",0,0,"2778",,terminal_output +5076,5774674,"TERMINAL",0,0,"3999:00",,terminal_output +5077,5775597,"TERMINAL",0,0,"530301",,terminal_output +5078,5776664,"TERMINAL",0,0,"6112",,terminal_output +5079,5777643,"TERMINAL",0,0,"7223",,terminal_output +5080,5778668,"TERMINAL",0,0,"8334",,terminal_output +5081,5779758,"TERMINAL",0,0,"9445",,terminal_output +5082,5780818,"TERMINAL",0,0,"30556",,terminal_output +5083,5781821,"TERMINAL",0,0,"1667",,terminal_output +5084,5782852,"TERMINAL",0,0,"2778",,terminal_output +5085,5783816,"TERMINAL",0,0,"3889",,terminal_output +5086,5784867,"TERMINAL",0,0,"49910",,terminal_output +5087,5786041,"TERMINAL",0,0,"540401",,terminal_output +5088,5787035,"TERMINAL",0,0,"6112",,terminal_output +5089,5788054,"TERMINAL",0,0,"7223",,terminal_output +5090,5789123,"TERMINAL",0,0,"8334",,terminal_output +5091,5790109,"TERMINAL",0,0,"9445",,terminal_output +5092,5791078,"TERMINAL",0,0,"40556",,terminal_output +5093,5792286,"TERMINAL",0,0,"1667",,terminal_output +5094,5793295,"TERMINAL",0,0,"2778",,terminal_output +5095,5794204,"TERMINAL",0,0,"3889",,terminal_output +5096,5795573,"TERMINAL",0,0,"4505021",,terminal_output +5097,5796588,"TERMINAL",0,0,"6112",,terminal_output +5098,5797626,"TERMINAL",0,0,"7223",,terminal_output +5099,5798704,"TERMINAL",0,0,"8334",,terminal_output +5100,5799605,"TERMINAL",0,0,"9445",,terminal_output +5101,5800686,"TERMINAL",0,0,"50556",,terminal_output +5102,5801661,"TERMINAL",0,0,"1667",,terminal_output +5103,5802849,"TERMINAL",0,0,"2778",,terminal_output +5104,5803811,"TERMINAL",0,0,"3889",,terminal_output +5105,5804986,"TERMINAL",0,0,"49930",,terminal_output +5106,5806214,"TERMINAL",0,0,"57:007:001",,terminal_output +5107,5807252,"TERMINAL",0,0,"6112",,terminal_output +5108,5808275,"TERMINAL",0,0,"7223",,terminal_output +5109,5809308,"TERMINAL",0,0,"8334",,terminal_output +5110,5810272,"TERMINAL",0,0,"9445",,terminal_output +5111,5811436,"TERMINAL",0,0,"3:00556",,terminal_output +5112,5812470,"TERMINAL",0,0,"1667",,terminal_output +5113,5813434,"TERMINAL",0,0,"2778",,terminal_output +5114,5814652,"TERMINAL",0,0,"3889",,terminal_output +5115,5815646,"TERMINAL",0,0,"49940",,terminal_output +5116,5816669,"TERMINAL",0,0,"510101",,terminal_output +5117,5817786,"TERMINAL",0,0,"6112",,terminal_output +5118,5818629,"TERMINAL",0,0,"7223",,terminal_output +5119,5819518,"TERMINAL",0,0,"8445",,terminal_output +5120,5820670,"TERMINAL",0,0,"10556",,terminal_output +5121,5823319,"TERMINAL",0,0,"16672778",,terminal_output +5122,5824125,"TERMINAL",0,0,"3889",,terminal_output +5123,5825168,"TERMINAL",0,0,"49950",,terminal_output +5124,5826355,"TERMINAL",0,0,"520201",,terminal_output +5125,5827410,"TERMINAL",0,0,"6112",,terminal_output +5126,5828493,"TERMINAL",0,0,"7223",,terminal_output +5127,5829320,"TERMINAL",0,0,"8334",,terminal_output +5128,5830689,"TERMINAL",0,0,"9445",,terminal_output +5129,5833418,"TERMINAL",0,0,"2055616672778",,terminal_output +5130,5835002,"TERMINAL",0,0,"3889",,terminal_output +5131,5835148,"TERMINAL",0,0,"4991:00:00",,terminal_output +5132,5835341,"TERMINAL",0,0,"salloc: Job 3468617 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3468617.interactive ON hkn0403 CANCELLED AT 2025-09-05T15:03:25 DUE TO TIME LIMIT ***\r\n",,terminal_output +5133,5836013,"TERMINAL",0,0,"53030CG1",,terminal_output +5134,5837161,"TERMINAL",0,0,"611",,terminal_output +5135,5838045,"TERMINAL",0,0,"722",,terminal_output +5136,5839187,"TERMINAL",0,0,"833",,terminal_output +5137,5840314,"TERMINAL",0,0,"944",,terminal_output +5138,5841338,"TERMINAL",0,0,"3055",,terminal_output +5139,5842362,"TERMINAL",0,0,"166",,terminal_output +5140,5843204,"TERMINAL",0,0,"277",,terminal_output +5141,5844308,"TERMINAL",0,0,"388",,terminal_output +5142,5845331,"TERMINAL",0,0,"44040",,terminal_output +5143,5846337,"TERMINAL",0,0,"611",,terminal_output +5144,5847483,"TERMINAL",0,0,"722",,terminal_output +5145,5848514,"TERMINAL",0,0,"833",,terminal_output +5146,5849472,"TERMINAL",0,0,"944",,terminal_output +5147,5850555,"TERMINAL",0,0,"4055",,terminal_output +5148,5851580,"TERMINAL",0,0,"166",,terminal_output +5149,5852704,"TERMINAL",0,0,"277",,terminal_output +5150,5853727,"TERMINAL",0,0,"388",,terminal_output +5151,5854738,"TERMINAL",0,0,"499",,terminal_output +5152,5855720,"TERMINAL",0,0,"55050",,terminal_output +5153,5856757,"TERMINAL",0,0,"611",,terminal_output +5154,5857812,"TERMINAL",0,0,"722",,terminal_output +5155,5859165,"TERMINAL",0,0,"833",,terminal_output +5156,5859994,"TERMINAL",0,0,"944",,terminal_output +5157,5861000,"TERMINAL",0,0,"5055",,terminal_output +5158,5862022,"TERMINAL",0,0,"166",,terminal_output +5159,5863031,"TERMINAL",0,0,"277",,terminal_output +5160,5864124,"TERMINAL",0,0,"388",,terminal_output +5161,5865104,"TERMINAL",0,0,"srun: error: hkn0403: task 0: Killed\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +5162,5865166,"TERMINAL",0,0,"499",,terminal_output +5163,5866144,"TERMINAL",0,0,"58:008:00",,terminal_output +5164,5867392,"TERMINAL",0,0,"611",,terminal_output +5165,5868290,"TERMINAL",0,0,"722",,terminal_output +5166,5869390,"TERMINAL",0,0,"844",,terminal_output +5167,5870376,"TERMINAL",0,0,"4:0055",,terminal_output +5168,5871391,"TERMINAL",0,0,"166",,terminal_output +5169,5872496,"TERMINAL",0,0,"277",,terminal_output +5170,5873594,"TERMINAL",0,0,"388",,terminal_output +5171,5874748,"TERMINAL",0,0,"499",,terminal_output +5172,5875898,"TERMINAL",0,0,"51010",,terminal_output +5173,5876854,"TERMINAL",0,0,"\r611",,terminal_output +5174,5877782,"TERMINAL",0,0,"722",,terminal_output +5175,5878852,"TERMINAL",0,0,"833",,terminal_output +5176,5879896,"TERMINAL",0,0,"944",,terminal_output +5177,5880915,"TERMINAL",0,0,"1055",,terminal_output +5178,5881867,"TERMINAL",0,0,"166",,terminal_output +5179,5882981,"TERMINAL",0,0,"277",,terminal_output +5180,5883915,"TERMINAL",0,0,"388",,terminal_output +5181,5884953,"TERMINAL",0,0,"499",,terminal_output +5182,5885995,"TERMINAL",0,0,"52020",,terminal_output +5183,5887318,"TERMINAL",0,0,"611",,terminal_output +5184,5888088,"TERMINAL",0,0,"722",,terminal_output +5185,5889204,"TERMINAL",0,0,"833",,terminal_output +5186,5890169,"TERMINAL",0,0,"944",,terminal_output +5187,5891312,"TERMINAL",0,0,"2055",,terminal_output +5188,5892791,"TERMINAL",0,0,"177",,terminal_output +5189,5893834,"TERMINAL",0,0,"388",,terminal_output +5190,5894893,"TERMINAL",0,0,"499",,terminal_output +5191,5896392,"TERMINAL",0,0,"53131",,terminal_output +5192,5897450,"TERMINAL",0,0,"722",,terminal_output +5193,5898477,"TERMINAL",0,0,"833",,terminal_output +5194,5899501,"TERMINAL",0,0,"944",,terminal_output +5195,5900636,"TERMINAL",0,0,"3055",,terminal_output +5196,5901560,"TERMINAL",0,0,"166",,terminal_output +5197,5902675,"TERMINAL",0,0,"277",,terminal_output +5198,5903682,"TERMINAL",0,0,"388",,terminal_output +5199,5904727,"TERMINAL",0,0,"499",,terminal_output +5200,5905712,"TERMINAL",0,0,"54040",,terminal_output +5201,5906771,"TERMINAL",0,0,"611",,terminal_output +5202,5907799,"TERMINAL",0,0,"722",,terminal_output +5203,5908840,"TERMINAL",0,0,"833",,terminal_output +5204,5909830,"TERMINAL",0,0,"944",,terminal_output +5205,5910866,"TERMINAL",0,0,"4055",,terminal_output +5206,5911994,"TERMINAL",0,0,"166",,terminal_output +5207,5913019,"TERMINAL",0,0,"277",,terminal_output +5208,5914041,"TERMINAL",0,0,"388",,terminal_output +5209,5915066,"TERMINAL",0,0,"499",,terminal_output +5210,5916201,"TERMINAL",0,0,"55050",,terminal_output +5211,5917127,"TERMINAL",0,0,"611",,terminal_output +5212,5918162,"TERMINAL",0,0,"722",,terminal_output +5213,5919264,"TERMINAL",0,0,"833",,terminal_output +5214,5920287,"TERMINAL",0,0,"944",,terminal_output +5215,5921313,"TERMINAL",0,0,"5066",,terminal_output +5216,5922441,"TERMINAL",0,0,"277",,terminal_output +5217,5923418,"TERMINAL",0,0,"388",,terminal_output +5218,5924486,"TERMINAL",0,0,"499",,terminal_output +5219,5925511,"TERMINAL",0,0,"59:009:00",,terminal_output +5220,5926500,"TERMINAL",0,0,"611",,terminal_output +5221,5927660,"TERMINAL",0,0,"722",,terminal_output +5222,5928597,"TERMINAL",0,0,"833",,terminal_output +5223,5929675,"TERMINAL",0,0,"944",,terminal_output +5224,5930732,"TERMINAL",0,0,"5:0055",,terminal_output +5225,5931804,"TERMINAL",0,0,"166",,terminal_output +5226,5932782,"TERMINAL",0,0,"277",,terminal_output +5227,5933814,"TERMINAL",0,0,"388",,terminal_output +5228,5934905,"TERMINAL",0,0,"499",,terminal_output +5229,5935958,"TERMINAL",0,0,"51010",,terminal_output +5230,5936980,"TERMINAL",0,0,"611",,terminal_output +5231,5938051,"TERMINAL",0,0,"722",,terminal_output +5232,5939067,"TERMINAL",0,0,"833",,terminal_output +5233,5940164,"TERMINAL",0,0,"944",,terminal_output +5234,5941187,"TERMINAL",0,0,"1055",,terminal_output +5235,5942201,"TERMINAL",0,0,"166",,terminal_output +5236,5943203,"TERMINAL",0,0,"277",,terminal_output +5237,5944354,"TERMINAL",0,0,"388",,terminal_output +5238,5945375,"TERMINAL",0,0,"42020",,terminal_output +5239,5946341,"TERMINAL",0,0,"611",,terminal_output +5240,5947436,"TERMINAL",0,0,"722",,terminal_output +5241,5948479,"TERMINAL",0,0,"833",,terminal_output +5242,5949470,"TERMINAL",0,0,"944",,terminal_output +5243,5950602,"TERMINAL",0,0,"2055",,terminal_output +5244,5951662,"TERMINAL",0,0,"166",,terminal_output +5245,5952689,"TERMINAL",0,0,"277",,terminal_output +5246,5953725,"TERMINAL",0,0,"388",,terminal_output +5247,5954705,"TERMINAL",0,0,"499",,terminal_output +5248,5955733,"TERMINAL",0,0,"53030",,terminal_output +5249,5956780,"TERMINAL",0,0,"611",,terminal_output +5250,5957835,"TERMINAL",0,0,"722",,terminal_output +5251,5958861,"TERMINAL",0,0,"833",,terminal_output +5252,5959918,"TERMINAL",0,0,"944",,terminal_output +5253,5960952,"TERMINAL",0,0,"3055",,terminal_output +5254,5962068,"TERMINAL",0,0,"166",,terminal_output +5255,5963091,"TERMINAL",0,0,"277",,terminal_output +5256,5964078,"TERMINAL",0,0,"388",,terminal_output +5257,5965243,"TERMINAL",0,0,"499",,terminal_output +5258,5966276,"TERMINAL",0,0,"54040",,terminal_output +5259,5967291,"TERMINAL",0,0,"611",,terminal_output +5260,5968245,"TERMINAL",0,0,"722",,terminal_output +5261,5969338,"TERMINAL",0,0,"844",,terminal_output +5262,5970362,"TERMINAL",0,0,"4055",,terminal_output +5263,5971364,"TERMINAL",0,0,"166",,terminal_output +5264,5972513,"TERMINAL",0,0,"277",,terminal_output +5265,5973537,"TERMINAL",0,0,"388",,terminal_output +5266,5974562,"TERMINAL",0,0,"499",,terminal_output +5267,5975531,"TERMINAL",0,0,"55050",,terminal_output +5268,5976571,"TERMINAL",0,0,"611",,terminal_output +5269,5977838,"TERMINAL",0,0,"722",,terminal_output +5270,5978760,"TERMINAL",0,0,"833",,terminal_output +5271,5979742,"TERMINAL",0,0,"944",,terminal_output +5272,5980740,"TERMINAL",0,0,"5055",,terminal_output +5273,5981903,"TERMINAL",0,0,"166",,terminal_output +5274,5982889,"TERMINAL",0,0,"277",,terminal_output +5275,5983908,"TERMINAL",0,0,"388",,terminal_output +5276,5984990,"TERMINAL",0,0,"499",,terminal_output +5277,5986030,"TERMINAL",0,0,"550:0050:00",,terminal_output +5278,5987053,"TERMINAL",0,0,"611",,terminal_output +5279,5988118,"TERMINAL",0,0,"722",,terminal_output +5280,5989111,"TERMINAL",0,0,"833",,terminal_output +5281,5990248,"TERMINAL",0,0,"944",,terminal_output +5282,5991253,"TERMINAL",0,0,"6:0055",,terminal_output +5283,5992244,"TERMINAL",0,0,"166",,terminal_output +5284,5993243,"TERMINAL",0,0,"277",,terminal_output +5285,5994313,"TERMINAL",0,0,"399",,terminal_output +5286,5995349,"TERMINAL",0,0,"51010",,terminal_output +5287,5996375,"TERMINAL",0,0,"611",,terminal_output +5288,5997575,"TERMINAL",0,0,"722",,terminal_output +5289,5998689,"TERMINAL",0,0,"833",,terminal_output +5290,5999502,"TERMINAL",0,0,"944",,terminal_output +5291,6000555,"TERMINAL",0,0,"1055",,terminal_output +5292,6001597,"TERMINAL",0,0,"166",,terminal_output +5293,6002721,"TERMINAL",0,0,"277",,terminal_output +5294,6003688,"TERMINAL",0,0,"388",,terminal_output +5295,6004768,"TERMINAL",0,0,"499",,terminal_output +5296,6005794,"TERMINAL",0,0,"52020",,terminal_output +5297,6006799,"TERMINAL",0,0,"611",,terminal_output +5298,6007838,"TERMINAL",0,0,"722",,terminal_output +5299,6008886,"TERMINAL",0,0,"833",,terminal_output +5300,6009919,"TERMINAL",0,0,"944",,terminal_output +5301,6010961,"TERMINAL",0,0,"2055",,terminal_output +5302,6012154,"TERMINAL",0,0,"166",,terminal_output +5303,6013063,"TERMINAL",0,0,"277",,terminal_output +5304,6014199,"TERMINAL",0,0,"388",,terminal_output +5305,6015130,"TERMINAL",0,0,"499",,terminal_output +5306,6016238,"TERMINAL",0,0,"53030",,terminal_output +5307,6017262,"TERMINAL",0,0,"611",,terminal_output +5308,6018252,"TERMINAL",0,0,"722",,terminal_output +5309,6019298,"TERMINAL",0,0,"844",,terminal_output +5310,6020353,"TERMINAL",0,0,"3055",,terminal_output +5311,6021384,"TERMINAL",0,0,"166",,terminal_output +5312,6022484,"TERMINAL",0,0,"277",,terminal_output +5313,6023508,"TERMINAL",0,0,"388",,terminal_output +5314,6024532,"TERMINAL",0,0,"499",,terminal_output +5315,6025568,"TERMINAL",0,0,"54040",,terminal_output +5316,6026698,"TERMINAL",0,0,"611",,terminal_output +5317,6027708,"TERMINAL",0,0,"722",,terminal_output +5318,6028693,"TERMINAL",0,0,"833",,terminal_output +5319,6029751,"TERMINAL",0,0,"944",,terminal_output +5320,6030778,"TERMINAL",0,0,"4055",,terminal_output +5321,6031820,"TERMINAL",0,0,"166",,terminal_output +5322,6032864,"TERMINAL",0,0,"277",,terminal_output +5323,6033963,"TERMINAL",0,0,"388",,terminal_output +5324,6034990,"TERMINAL",0,0,"499",,terminal_output +5325,6036113,"TERMINAL",0,0,"55050",,terminal_output +5326,6037137,"TERMINAL",0,0,"611",,terminal_output +5327,6038175,"TERMINAL",0,0,"722",,terminal_output +5328,6039123,"TERMINAL",0,0,"833",,terminal_output +5329,6040230,"TERMINAL",0,0,"944",,terminal_output +5330,6041227,"TERMINAL",0,0,"5055",,terminal_output +5331,6042268,"TERMINAL",0,0,"166",,terminal_output +5332,6043300,"TERMINAL",0,0,"288",,terminal_output +5333,6044407,"TERMINAL",0,0,"499",,terminal_output +5334,6045425,"TERMINAL",0,0,"51:001:00",,terminal_output +5335,6046457,"TERMINAL",0,0,"611",,terminal_output +5336,6047573,"TERMINAL",0,0,"722",,terminal_output +5337,6048596,"TERMINAL",0,0,"833",,terminal_output +5338,6049588,"TERMINAL",0,0,"944",,terminal_output +5339,6050756,"TERMINAL",0,0,"7:0055",,terminal_output +5340,6051726,"TERMINAL",0,0,"166",,terminal_output +5341,6052795,"TERMINAL",0,0,"277",,terminal_output +5342,6053820,"TERMINAL",0,0,"388",,terminal_output +5343,6054801,"TERMINAL",0,0,"499",,terminal_output +5344,6055830,"TERMINAL",0,0,"51010",,terminal_output +5345,6056846,"TERMINAL",0,0,"611",,terminal_output +5346,6057915,"TERMINAL",0,0,"722",,terminal_output +5347,6059041,"TERMINAL",0,0,"833",,terminal_output +5348,6060064,"TERMINAL",0,0,"944",,terminal_output +5349,6061088,"TERMINAL",0,0,"1055",,terminal_output +5350,6062122,"TERMINAL",0,0,"166",,terminal_output +5351,6063142,"TERMINAL",0,0,"277",,terminal_output +5352,6064134,"TERMINAL",0,0,"388",,terminal_output +5353,6065290,"TERMINAL",0,0,"499",,terminal_output +5354,6066311,"TERMINAL",0,0,"52020",,terminal_output +5355,6067263,"TERMINAL",0,0,"611",,terminal_output +5356,6068289,"TERMINAL",0,0,"733",,terminal_output +5357,6069341,"TERMINAL",0,0,"944",,terminal_output +5358,6070408,"TERMINAL",0,0,"2055",,terminal_output +5359,6071433,"TERMINAL",0,0,"166",,terminal_output +5360,6072660,"TERMINAL",0,0,"277",,terminal_output +5361,6073489,"TERMINAL",0,0,"388",,terminal_output +5362,6074522,"TERMINAL",0,0,"499",,terminal_output +5363,6075647,"TERMINAL",0,0,"53030",,terminal_output +5364,6076653,"TERMINAL",0,0,"611",,terminal_output +5365,6077642,"TERMINAL",0,0,"722",,terminal_output +5366,6078720,"TERMINAL",0,0,"833",,terminal_output +5367,6079794,"TERMINAL",0,0,"944",,terminal_output +5368,6080763,"TERMINAL",0,0,"3055",,terminal_output +5369,6081797,"TERMINAL",0,0,"166",,terminal_output +5370,6082834,"TERMINAL",0,0,"277",,terminal_output +5371,6083869,"TERMINAL",0,0,"388",,terminal_output +5372,6084905,"TERMINAL",0,0,"499",,terminal_output +5373,6085972,"TERMINAL",0,0,"54040",,terminal_output +5374,6087104,"TERMINAL",0,0,"611",,terminal_output +5375,6088025,"TERMINAL",0,0,"722",,terminal_output +5376,6089157,"TERMINAL",0,0,"833",,terminal_output +5377,6090180,"TERMINAL",0,0,"944",,terminal_output +5378,6091139,"TERMINAL",0,0,"4055",,terminal_output +5379,6092228,"TERMINAL",0,0,"166",,terminal_output +5380,6093255,"TERMINAL",0,0,"277",,terminal_output +5381,6094380,"TERMINAL",0,0,"399",,terminal_output +5382,6095394,"TERMINAL",0,0,"55050",,terminal_output +5383,6096357,"TERMINAL",0,0,"611",,terminal_output +5384,6097445,"TERMINAL",0,0,"722",,terminal_output +5385,6098475,"TERMINAL",0,0,"833",,terminal_output +5386,6099499,"TERMINAL",0,0,"944",,terminal_output +5387,6100619,"TERMINAL",0,0,"5055",,terminal_output +5388,6101574,"TERMINAL",0,0,"166",,terminal_output +5389,6102721,"TERMINAL",0,0,"277",,terminal_output +5390,6103642,"TERMINAL",0,0,"388",,terminal_output +5391,6104761,"TERMINAL",0,0,"499",,terminal_output +5392,6105737,"TERMINAL",0,0,"52:002:00",,terminal_output +5393,6106764,"TERMINAL",0,0,"611",,terminal_output +5394,6107787,"TERMINAL",0,0,"722",,terminal_output +5395,6108836,"TERMINAL",0,0,"833",,terminal_output +5396,6109863,"TERMINAL",0,0,"944",,terminal_output +5397,6110899,"TERMINAL",0,0,"8:0055",,terminal_output +5398,6111983,"TERMINAL",0,0,"166",,terminal_output +5399,6112971,"TERMINAL",0,0,"277",,terminal_output +5400,6114033,"TERMINAL",0,0,"388",,terminal_output +5401,6115066,"TERMINAL",0,0,"499",,terminal_output +5402,6116098,"TERMINAL",0,0,"51010",,terminal_output +5403,6117135,"TERMINAL",0,0,"611",,terminal_output +5404,6118157,"TERMINAL",0,0,"722",,terminal_output +5405,6119258,"TERMINAL",0,0,"833",,terminal_output +5406,6120287,"TERMINAL",0,0,"944",,terminal_output +5407,6121305,"TERMINAL",0,0,"1066",,terminal_output +5408,6122428,"TERMINAL",0,0,"277",,terminal_output +5409,6123461,"TERMINAL",0,0,"388",,terminal_output +5410,6124448,"TERMINAL",0,0,"499",,terminal_output +5411,6125509,"TERMINAL",0,0,"52020",,terminal_output +5412,6126627,"TERMINAL",0,0,"611",,terminal_output +5413,6127560,"TERMINAL",0,0,"722",,terminal_output +5414,6128559,"TERMINAL",0,0,"833",,terminal_output +5415,6129737,"TERMINAL",0,0,"944",,terminal_output +5416,6130756,"TERMINAL",0,0,"2055",,terminal_output +5417,6131685,"TERMINAL",0,0,"166",,terminal_output +5418,6132769,"TERMINAL",0,0,"277",,terminal_output +5419,6133806,"TERMINAL",0,0,"388",,terminal_output +5420,6134827,"TERMINAL",0,0,"499",,terminal_output +5421,6135860,"TERMINAL",0,0,"53030",,terminal_output +5422,6136896,"TERMINAL",0,0,"611",,terminal_output +5423,6137931,"TERMINAL",0,0,"722",,terminal_output +5424,6139026,"TERMINAL",0,0,"833",,terminal_output +5425,6140045,"TERMINAL",0,0,"944",,terminal_output +5426,6141144,"TERMINAL",0,0,"3055",,terminal_output +5427,6142080,"TERMINAL",0,0,"166",,terminal_output +5428,6143215,"TERMINAL",0,0,"277",,terminal_output +5429,6144239,"TERMINAL",0,0,"388",,terminal_output +5430,6145230,"TERMINAL",0,0,"499",,terminal_output +5431,6146286,"TERMINAL",0,0,"54040",,terminal_output +5432,6147414,"TERMINAL",0,0,"622",,terminal_output +5433,6148340,"TERMINAL",0,0,"833",,terminal_output +5434,6149344,"TERMINAL",0,0,"944",,terminal_output +5435,6150486,"TERMINAL",0,0,"4055",,terminal_output +5436,6151430,"TERMINAL",0,0,"166",,terminal_output +5437,6152534,"TERMINAL",0,0,"277",,terminal_output +5438,6153558,"TERMINAL",0,0,"388",,terminal_output +5439,6154590,"TERMINAL",0,0,"499",,terminal_output +5440,6155751,"TERMINAL",0,0,"55050",,terminal_output +5441,6156760,"TERMINAL",0,0,"611",,terminal_output +5442,6157726,"TERMINAL",0,0,"722",,terminal_output +5443,6158781,"TERMINAL",0,0,"833",,terminal_output +5444,6159804,"TERMINAL",0,0,"944",,terminal_output +5445,6160832,"TERMINAL",0,0,"5055",,terminal_output +5446,6161853,"TERMINAL",0,0,"166",,terminal_output +5447,6162854,"TERMINAL",0,0,"277",,terminal_output +5448,6163897,"TERMINAL",0,0,"388",,terminal_output +5449,6165116,"TERMINAL",0,0,"499",,terminal_output +5450,6165987,"TERMINAL",0,0,"53:003:00",,terminal_output +5451,6167021,"TERMINAL",0,0,"611",,terminal_output +5452,6168063,"TERMINAL",0,0,"722",,terminal_output +5453,6169122,"TERMINAL",0,0,"833",,terminal_output +5454,6170248,"TERMINAL",0,0,"944",,terminal_output +5455,6171273,"TERMINAL",0,0,"9:0055",,terminal_output +5456,6172297,"TERMINAL",0,0,"166",,terminal_output +5457,6173322,"TERMINAL",0,0,"277",,terminal_output +5458,6174344,"TERMINAL",0,0,"399",,terminal_output +5459,6175556,"TERMINAL",0,0,"51010",,terminal_output +5460,6176350,"TERMINAL",0,0,"611",,terminal_output +5461,6177418,"TERMINAL",0,0,"722",,terminal_output +5462,6178557,"TERMINAL",0,0,"833",,terminal_output +5463,6179459,"TERMINAL",0,0,"944",,terminal_output +5464,6180592,"TERMINAL",0,0,"1055",,terminal_output +5465,6181615,"TERMINAL",0,0,"166",,terminal_output +5466,6182639,"TERMINAL",0,0,"277",,terminal_output +5467,6183663,"TERMINAL",0,0,"388",,terminal_output +5468,6184692,"TERMINAL",0,0,"499",,terminal_output +5469,6185813,"TERMINAL",0,0,"52020",,terminal_output +5470,6186837,"TERMINAL",0,0,"611",,terminal_output +5471,6187763,"TERMINAL",0,0,"722",,terminal_output +5472,6188808,"TERMINAL",0,0,"833",,terminal_output +5473,6189848,"TERMINAL",0,0,"944",,terminal_output +5474,6190956,"TERMINAL",0,0,"2055",,terminal_output +5475,6191934,"TERMINAL",0,0,"166",,terminal_output +5476,6193014,"TERMINAL",0,0,"277",,terminal_output +5477,6194211,"TERMINAL",0,0,"388",,terminal_output +5478,6195143,"TERMINAL",0,0,"499",,terminal_output +5479,6196073,"TERMINAL",0,0,"53030",,terminal_output +5480,6197111,"TERMINAL",0,0,"611",,terminal_output +5481,6198148,"TERMINAL",0,0,"722",,terminal_output +5482,6199232,"TERMINAL",0,0,"833",,terminal_output +5483,6200261,"TERMINAL",0,0,"944",,terminal_output +5484,6201278,"TERMINAL",0,0,"3055",,terminal_output +5485,6202301,"TERMINAL",0,0,"177",,terminal_output +5486,6203429,"TERMINAL",0,0,"388",,terminal_output +5487,6204371,"TERMINAL",0,0,"499",,terminal_output +5488,6205403,"TERMINAL",0,0,"54040",,terminal_output +5489,6206499,"TERMINAL",0,0,"611",,terminal_output +5490,6207524,"TERMINAL",0,0,"722",,terminal_output +5491,6208547,"TERMINAL",0,0,"833",,terminal_output +5492,6209673,"TERMINAL",0,0,"944",,terminal_output +5493,6210698,"TERMINAL",0,0,"4055",,terminal_output +5494,6211731,"TERMINAL",0,0,"166",,terminal_output +5495,6212782,"TERMINAL",0,0,"277",,terminal_output +5496,6213729,"TERMINAL",0,0,"388",,terminal_output +5497,6214793,"TERMINAL",0,0,"499",,terminal_output +5498,6215821,"TERMINAL",0,0,"55050",,terminal_output +5499,6216857,"TERMINAL",0,0,"611",,terminal_output +5500,6217874,"TERMINAL",0,0,"722",,terminal_output +5501,6218912,"TERMINAL",0,0,"833",,terminal_output +5502,6219952,"TERMINAL",0,0,"944",,terminal_output +5503,6221024,"TERMINAL",0,0,"5055",,terminal_output +5504,6222088,"TERMINAL",0,0,"166",,terminal_output +5505,6223199,"TERMINAL",0,0,"277",,terminal_output +5506,6224213,"TERMINAL",0,0,"388",,terminal_output +5507,6225157,"TERMINAL",0,0,"499",,terminal_output +5508,6226196,"TERMINAL",0,0,"54:004:00",,terminal_output +5509,6227300,"TERMINAL",0,0,"611",,terminal_output +5510,6228271,"TERMINAL",0,0,"733",,terminal_output +5511,6229323,"TERMINAL",0,0,"944",,terminal_output +5512,6230344,"TERMINAL",0,0,"10:0055",,terminal_output +5513,6231392,"TERMINAL",0,0,"166",,terminal_output +5514,6232447,"TERMINAL",0,0,"277",,terminal_output +5515,6233481,"TERMINAL",0,0,"388",,terminal_output +5516,6234518,"TERMINAL",0,0,"499",,terminal_output +5517,6235590,"TERMINAL",0,0,"51010",,terminal_output +5518,6236723,"TERMINAL",0,0,"611",,terminal_output +5519,6237667,"TERMINAL",0,0,"722",,terminal_output +5520,6238806,"TERMINAL",0,0,"833",,terminal_output +5521,6239780,"TERMINAL",0,0,"944",,terminal_output +5522,6240906,"TERMINAL",0,0,"1055",,terminal_output +5523,6241828,"TERMINAL",0,0,"166",,terminal_output +5524,6242848,"TERMINAL",0,0,"277",,terminal_output +5525,6243885,"TERMINAL",0,0,"388",,terminal_output +5526,6244984,"TERMINAL",0,0,"499",,terminal_output +5527,6246028,"TERMINAL",0,0,"52020",,terminal_output +5528,6246971,"TERMINAL",0,0,"611",,terminal_output +5529,6248083,"TERMINAL",0,0,"722",,terminal_output +5530,6249047,"TERMINAL",0,0,"833",,terminal_output +5531,6250089,"TERMINAL",0,0,"944",,terminal_output +5532,6251164,"TERMINAL",0,0,"2055",,terminal_output +5533,6252273,"TERMINAL",0,0,"166",,terminal_output +5534,6253203,"TERMINAL",0,0,"277",,terminal_output +5535,6254329,"TERMINAL",0,0,"388",,terminal_output +5536,6255392,"TERMINAL",0,0,"43030",,terminal_output +5537,6256368,"TERMINAL",0,0,"611",,terminal_output +5538,6257353,"TERMINAL",0,0,"722",,terminal_output +5539,6258425,"TERMINAL",0,0,"833",,terminal_output +5540,6259439,"TERMINAL",0,0,"944",,terminal_output +5541,6260478,"TERMINAL",0,0,"3055",,terminal_output +5542,6261601,"TERMINAL",0,0,"166",,terminal_output +5543,6262614,"TERMINAL",0,0,"277",,terminal_output +5544,6263761,"TERMINAL",0,0,"388",,terminal_output +5545,6264811,"TERMINAL",0,0,"499",,terminal_output +5546,6265714,"TERMINAL",0,0,"54040",,terminal_output +5547,6266829,"TERMINAL",0,0,"611",,terminal_output +5548,6267804,"TERMINAL",0,0,"722",,terminal_output +5549,6268870,"TERMINAL",0,0,"833",,terminal_output +5550,6269900,"TERMINAL",0,0,"944",,terminal_output +5551,6270870,"TERMINAL",0,0,"4055",,terminal_output +5552,6271973,"TERMINAL",0,0,"166",,terminal_output +5553,6272945,"TERMINAL",0,0,"277",,terminal_output +5554,6274033,"TERMINAL",0,0,"388",,terminal_output +5555,6275116,"TERMINAL",0,0,"499",,terminal_output +5556,6276162,"TERMINAL",0,0,"55050",,terminal_output +5557,6277259,"TERMINAL",0,0,"611",,terminal_output +5558,6278189,"TERMINAL",0,0,"722",,terminal_output +5559,6279308,"TERMINAL",0,0,"833",,terminal_output +5560,6280283,"TERMINAL",0,0,"944",,terminal_output +5561,6281346,"TERMINAL",0,0,"5055",,terminal_output +5562,6282377,"TERMINAL",0,0,"177",,terminal_output +5563,6283384,"TERMINAL",0,0,"388",,terminal_output +5564,6284434,"TERMINAL",0,0,"499",,terminal_output +5565,6285472,"TERMINAL",0,0,"55:005:00",,terminal_output +5566,6286462,"TERMINAL",0,0,"611",,terminal_output +5567,6287553,"TERMINAL",0,0,"722",,terminal_output +5568,6288531,"TERMINAL",0,0,"833",,terminal_output +5569,6289612,"TERMINAL",0,0,"944",,terminal_output +5570,6290664,"TERMINAL",0,0,"1:0055",,terminal_output +5571,6291800,"TERMINAL",0,0,"166",,terminal_output +5572,6292765,"TERMINAL",0,0,"277",,terminal_output +5573,6293846,"TERMINAL",0,0,"388",,terminal_output +5574,6294880,"TERMINAL",0,0,"499",,terminal_output +5575,6295898,"TERMINAL",0,0,"51010",,terminal_output +5576,6296877,"TERMINAL",0,0,"611",,terminal_output +5577,6297951,"TERMINAL",0,0,"722",,terminal_output +5578,6298949,"TERMINAL",0,0,"833",,terminal_output +5579,6300102,"TERMINAL",0,0,"944",,terminal_output +5580,6301126,"TERMINAL",0,0,"1055",,terminal_output +5581,6302150,"TERMINAL",0,0,"166",,terminal_output +5582,6303134,"TERMINAL",0,0,"277",,terminal_output +5583,6304154,"TERMINAL",0,0,"388",,terminal_output +5584,6305214,"TERMINAL",0,0,"499",,terminal_output +5585,6306205,"TERMINAL",0,0,"52020",,terminal_output +5586,6307364,"TERMINAL",0,0,"611",,terminal_output +5587,6308289,"TERMINAL",0,0,"733",,terminal_output +5588,6309318,"TERMINAL",0,0,"944",,terminal_output +5589,6310445,"TERMINAL",0,0,"2055",,terminal_output +5590,6311432,"TERMINAL",0,0,"166",,terminal_output +5591,6312440,"TERMINAL",0,0,"277",,terminal_output +5592,6313610,"TERMINAL",0,0,"388",,terminal_output +5593,6314547,"TERMINAL",0,0,"499",,terminal_output +5594,6315771,"TERMINAL",0,0,"53030",,terminal_output +5595,6316629,"TERMINAL",0,0,"611",,terminal_output +5596,6317739,"TERMINAL",0,0,"722",,terminal_output +5597,6318796,"TERMINAL",0,0,"833",,terminal_output +5598,6319754,"TERMINAL",0,0,"944",,terminal_output +5599,6320828,"TERMINAL",0,0,"3055",,terminal_output +5600,6321927,"TERMINAL",0,0,"166",,terminal_output +5601,6322843,"TERMINAL",0,0,"277",,terminal_output +5602,6323884,"TERMINAL",0,0,"388",,terminal_output +5603,6324937,"TERMINAL",0,0,"499",,terminal_output +5604,6326022,"TERMINAL",0,0,"54040",,terminal_output +5605,6327005,"TERMINAL",0,0,"611",,terminal_output +5606,6328116,"TERMINAL",0,0,"722",,terminal_output +5607,6329148,"TERMINAL",0,0,"833",,terminal_output +5608,6330131,"TERMINAL",0,0,"944",,terminal_output +5609,6331198,"TERMINAL",0,0,"4055",,terminal_output +5610,6332256,"TERMINAL",0,0,"166",,terminal_output +5611,6333252,"TERMINAL",0,0,"277",,terminal_output +5612,6334401,"TERMINAL",0,0,"399",,terminal_output +5613,6335330,"TERMINAL",0,0,"55050",,terminal_output +5614,6336365,"TERMINAL",0,0,"611",,terminal_output +5615,6337482,"TERMINAL",0,0,"722",,terminal_output +5616,6338510,"TERMINAL",0,0,"833",,terminal_output +5617,6339499,"TERMINAL",0,0,"944",,terminal_output +5618,6340801,"TERMINAL",0,0,"5055",,terminal_output +5619,6341681,"TERMINAL",0,0,"166",,terminal_output +5620,6342702,"TERMINAL",0,0,"277",,terminal_output +5621,6343715,"TERMINAL",0,0,"388",,terminal_output +5622,6344742,"TERMINAL",0,0,"499",,terminal_output +5623,6345778,"TERMINAL",0,0,"56:006:00",,terminal_output +5624,6346899,"TERMINAL",0,0,"611",,terminal_output +5625,6347832,"TERMINAL",0,0,"722",,terminal_output +5626,6348875,"TERMINAL",0,0,"833",,terminal_output +5627,6349910,"TERMINAL",0,0,"944",,terminal_output +5628,6350951,"TERMINAL",0,0,"2:0055",,terminal_output +5629,6351992,"TERMINAL",0,0,"166",,terminal_output +5630,6353137,"TERMINAL",0,0,"277",,terminal_output +5631,6354084,"TERMINAL",0,0,"388",,terminal_output +5632,6355187,"TERMINAL",0,0,"499",,terminal_output +5633,6356173,"TERMINAL",0,0,"51010",,terminal_output +5634,6357206,"TERMINAL",0,0,"611",,terminal_output +5635,6358379,"TERMINAL",0,0,"722",,terminal_output +5636,6359382,"TERMINAL",0,0,"844",,terminal_output +5637,6360520,"TERMINAL",0,0,"1055",,terminal_output +5638,6361414,"TERMINAL",0,0,"166",,terminal_output +5639,6362560,"TERMINAL",0,0,"277",,terminal_output +5640,6363519,"TERMINAL",0,0,"388",,terminal_output +5641,6364567,"TERMINAL",0,0,"499",,terminal_output +5642,6365588,"TERMINAL",0,0,"52020",,terminal_output +5643,6365994,"TERMINAL",0,0,"salloc_node",,terminal_command +5644,6366049,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3468835\r\n",,terminal_output +5645,6366175,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +5646,6366670,"TERMINAL",0,0,"6113468835 dev_accel interact tum_cte0 R\t0:01\t 1 hkn0402",,terminal_output +5647,6367674,"TERMINAL",0,0,"7222",,terminal_output +5648,6368814,"TERMINAL",0,0,"8333",,terminal_output +5649,6369849,"TERMINAL",0,0,"9444",,terminal_output +5650,6370560,"TERMINAL",0,0,"s",,terminal_output +5651,6370744,"TERMINAL",0,0,"ou",,terminal_output +5652,6370799,"TERMINAL",0,0,"20555",,terminal_output +5653,6370849,"TERMINAL",0,0,"r",,terminal_output +5654,6371022,"TERMINAL",0,0,"c",,terminal_output +5655,6371283,"TERMINAL",0,0,"e",,terminal_output +5656,6371471,"TERMINAL",0,0," ",,terminal_output +5657,6371798,"TERMINAL",0,0,".",,terminal_output +5658,6371852,"TERMINAL",0,0,"1666",,terminal_output +5659,6371909,"TERMINAL",0,0,"v",,terminal_output +5660,6372531,"TERMINAL",0,0,"e",,terminal_output +5661,6372632,"TERMINAL",0,0,"n",,terminal_output +5662,6372883,"TERMINAL",0,0,"2777",,terminal_output +5663,6373075,"TERMINAL",0,0,"v",,terminal_output +5664,6373946,"TERMINAL",0,0,"3888",,terminal_output +5665,6374433,"TERMINAL",0,0,"/",,terminal_output +5666,6374686,"TERMINAL",0,0,"b",,terminal_output +5667,6374929,"TERMINAL",0,0,"i",,terminal_output +5668,6374979,"TERMINAL",0,0,"4999",,terminal_output +5669,6375066,"TERMINAL",0,0,"n",,terminal_output +5670,6375710,"TERMINAL",0,0,"/",,terminal_output +5671,6375898,"TERMINAL",0,0,"a",,terminal_output +5672,6376048,"TERMINAL",0,0,"c",,terminal_output +5673,6376048,"TERMINAL",0,0,"5303010",,terminal_output +5674,6376106,"TERMINAL",0,0,"t",,terminal_output +5675,6376273,"TERMINAL",0,0,"i",,terminal_output +5676,6376466,"TERMINAL",0,0,"v",,terminal_output +5677,6376516,"TERMINAL",0,0,"a",,terminal_output +5678,6376610,"TERMINAL",0,0,"t",,terminal_output +5679,6376779,"TERMINAL",0,0,"e",,terminal_output +5680,6377049,"TERMINAL",0,0,"6111",,terminal_output +5681,6377119,"TERMINAL",0,0,"\r\n",,terminal_output +5682,6378208,"TERMINAL",0,0,"7222",,terminal_output +5683,6379156,"TERMINAL",0,0,"8333",,terminal_output +5684,6379861,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=coinrun-lam-dev-$slurm_job_id \\n --tags lam coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --data_dir $array_records_dir_train\n\n",shellscript,tab +5685,6380251,"TERMINAL",0,0,"9444",,terminal_output +5686,6381220,"TERMINAL",0,0,"30555",,terminal_output +5687,6382259,"TERMINAL",0,0,"1666",,terminal_output +5688,6382491,"TERMINAL",0,0,"watch",,terminal_focus +5689,6383099,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +5690,6386966,"TERMINAL",0,0,"dev",,terminal_command +5691,6389850,"TERMINAL",0,0,"git branch",,terminal_command +5692,6389908,"TERMINAL",0,0,"]633;C[?1h=\r add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n:",,terminal_output +5693,6393180,"TERMINAL",0,0,"salloc: Nodes hkn0402 are ready for job\r\n",,terminal_output +5694,6393236,"TERMINAL",0,0,"\r dont-let-tf-see-gpu\r\n:",,terminal_output +5695,6393352,"TERMINAL",0,0,"source .venv/bin/activate\r\n",,terminal_output +5696,6393698,"TERMINAL",0,0,"\r feat/darkness-filter\r\n:",,terminal_output +5697,6393835,"TERMINAL",0,0,"\r feat/explicit-image-dims\r\n:",,terminal_output +5698,6394044,"TERMINAL",0,0,"\r fix-action-padding-lam-future-information-access\r\n:",,terminal_output +5699,6394044,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h[tum_cte0515@hkn0402 jasmine]$ source .venv/bin/activate\r\n[?2004l\r]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +5700,6394166,"TERMINAL",0,0,"\r fix-sampling\r\n:",,terminal_output +5701,6394281,"TERMINAL",0,0,"\r fix-transformer-forwardpass\r\n:",,terminal_output +5702,6394414,"TERMINAL",0,0,"\r fix/spatiotemporal-pe-once-in-STTransformer\r\n:",,terminal_output +5703,6394558,"TERMINAL",0,0,"\r grad-norm-log-and-clip\r\n:",,terminal_output +5704,6394697,"TERMINAL",0,0,"\r grain-dataloader\r\n:",,terminal_output +5705,6394882,"TERMINAL",0,0,"\r input_pipeline/add-npy2array_record\r\n:",,terminal_output +5706,6394990,"TERMINAL",0,0,"\r logging-variants\r\n:",,terminal_output +5707,6395135,"TERMINAL",0,0,"\r lr-schedules\r\n:",,terminal_output +5708,6395288,"TERMINAL",0,0,"\r* main\r\n:",,terminal_output +5709,6395427,"TERMINAL",0,0,"\r maskgit-different-maskprob-per-sample\r\n:",,terminal_output +5710,6395540,"TERMINAL",0,0,"\r maskgit-sampling-iterative-unmasking-fix\r\n:",,terminal_output +5711,6395684,"TERMINAL",0,0,"\r metrics-logging-for-dynamics-model\r\n:",,terminal_output +5712,6396382,"TERMINAL",0,0,"\r monkey-patch\r\n:",,terminal_output +5713,6397117,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +5714,6402017,"TERMINAL",0,0,"git checkout validation-loss",,terminal_command +5715,6402080,"TERMINAL",0,0,"]633;C",,terminal_output +5716,6402334,"TERMINAL",0,0,"Switched to branch 'validation-loss'\r\nYour branch is ahead of 'origin/validation-loss' by 1 commit.\r\n (use ""git push"" to publish your local commits)\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +5717,6403710,"TERMINAL",0,0,"srun",,terminal_focus +5718,6405673,"",0,0,"Switched from branch 'main' to 'validation-loss'",,git_branch_checkout +5719,6411412,"TERMINAL",0,0,"g",,terminal_output +5720,6411471,"TERMINAL",0,0,"i",,terminal_output +5721,6411515,"TERMINAL",0,0,"t",,terminal_output +5722,6411680,"TERMINAL",0,0," ",,terminal_output +5723,6411822,"TERMINAL",0,0,"p",,terminal_output +5724,6411960,"TERMINAL",0,0,"u",,terminal_output +5725,6412058,"TERMINAL",0,0,"s",,terminal_output +5726,6412115,"TERMINAL",0,0,"h",,terminal_output +5727,6413406,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5728,6415666,"TERMINAL",0,0,"Enumerating objects: 11, done.\r\nCounting objects: 9% (1/11)\rCounting objects: 18% (2/11)\rCounting objects: 27% (3/11)\rCounting objects: 36% (4/11)\r",,terminal_output +5729,6415732,"TERMINAL",0,0,"Counting objects: 45% (5/11)\rCounting objects: 54% (6/11)\rCounting objects: 63% (7/11)\rCounting objects: 72% (8/11)\rCounting objects: 81% (9/11)\rCounting objects: 90% (10/11)\rCounting objects: 100% (11/11)\rCounting objects: 100% (11/11), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 16% (1/6)\rCompressing objects: 33% (2/6)\rCompressing objects: 50% (3/6)\rCompressing objects: 66% (4/6)\rCompressing objects: 83% (5/6)\rCompressing objects: 100% (6/6)\rCompressing objects: 100% (6/6), done.\r\nWriting objects: 16% (1/6)\rWriting objects: 33% (2/6)\rWriting objects: 66% (4/6)\rWriting objects: 83% (5/6)\rWriting objects: 100% (6/6)\rWriting objects: 100% (6/6), 2.54 KiB | 2.54 MiB/s, done.\r\nTotal 6 (delta 5), reused 0 (delta 0), pack-reused 0\r\nremote: Resolving deltas: 0% (0/5)\rremote: Resolving deltas: 20% (1/5)\rremote: Resolving deltas: 40% (2/5)\rremote: Resolving deltas: 60% (3/5)\rremote: Resolving deltas: 80% (4/5)\rremote: Resolving deltas: 100% (5/5)\rremote: Resolving deltas: 100% (5/5), completed with 4 local objects.\r\n",,terminal_output +5730,6416274,"TERMINAL",0,0,"To github.com:p-doom/jasmine.git\r\n 3bb22df..d53848c validation-loss -> validation-loss\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +5731,6587091,"models/lam.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nnx.Module):\n """"""Latent Action ST-ViVit VQ-VAE\n \n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n M: model dimension\n L: latent dimension\n E: B * (T - 1)\n H: height\n W: width\n C: number of channels (n_dim)\n P: patch token dimension (patch_size^2 * C)\n\n Tm1: T - 1\n Np1: N + 1\n """"""\n\n def __init__(\n self,\n in_dim: int,\n model_dim: int,\n ffn_dim: int,\n latent_dim: int,\n num_latents: int,\n patch_size: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n codebook_dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.in_dim = in_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.patch_size = patch_size\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.codebook_dropout = codebook_dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.patch_token_dim,\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_in = nnx.Param(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (1, 1, 1, self.patch_token_dim)\n )\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n rngs=rngs,\n )\n self.patch_up = nnx.Linear(\n self.patch_token_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.action_up = nnx.Linear(\n self.latent_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n videos_BTHWC = batch[""videos""]\n outputs = self.vq_encode(videos_BTHWC, training)\n patch_BTNP = outputs[""patches""]\n z_q_BTm11L = outputs[""z_q""]\n action_BTm11M = self.action_up(z_q_BTm11L)\n patch_BTm1NM = self.patch_up(patch_BTNP[:, :-1])\n action_BTm1NM = jnp.broadcast_to(action_BTm11M, patch_BTm1NM.shape)\n video_action_patches_BTm1NM = action_BTm1NM + patch_BTm1NM\n del outputs[""patches""], patch_BTNP, patch_BTm1NM\n\n # --- Decode ---\n video_recon_BTm1P = self.decoder(video_action_patches_BTm1NM)\n video_recon_BTm1P = video_recon_BTm1P.astype(jnp.float32)\n video_recon_BTm1P = nnx.sigmoid(video_recon_BTm1P)\n video_recon_BTm1P = video_recon_BTm1P.astype(self.dtype)\n video_recon_BTHWC = unpatchify(video_recon_BTm1P, self.patch_size, H, W)\n outputs[""recon""] = video_recon_BTHWC\n return outputs\n\n def vq_encode(\n self, videos_BTHWC: jax.Array, training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Preprocess videos ---\n B, T = videos_BTHWC.shape[:2]\n patch_BTNP = patchify(videos_BTHWC, self.patch_size)\n action_pad_BT1P = jnp.broadcast_to(\n self.action_in.value, (B, T, 1, self.patch_token_dim)\n )\n padded_patch_BTNp1P = jnp.concatenate((action_pad_BT1P, patch_BTNP), axis=2)\n\n # --- Encode ---\n z_BTNp1L = self.encoder(padded_patch_BTNp1P)\n # Get latent action for all future frames\n z_BTm1L = z_BTNp1L[:, 1:, 0]\n\n # --- Vector quantize ---\n z_EL = z_BTm1L.reshape(B * (T - 1), self.latent_dim)\n z_q_EL, z_EL, emb_EL, indices_E = self.vq(z_EL, training)\n z_q_BTm11L = z_q_EL.reshape(B, T - 1, 1, self.latent_dim)\n return dict(patches=patch_BTNP, z_q=z_q_BTm11L, z=z_EL, emb=emb_EL, indices=indices_E)\n",python,tab +5732,6589274,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import create_dataloader_iterator\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(\n model: LatentActionModel, inputs: dict, training: bool = True\n) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n # --- Compute loss ---\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n gt_future_frames = gt[:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@nnx.jit\ndef train_step(\n lam: LatentActionModel,\n optimizer: nnx.Optimizer,\n inputs: dict,\n action_last_active: jax.Array,\n rng: jax.Array,\n) -> tuple[jax.Array, jax.Array, jax.Array, dict]:\n def loss_fn(\n model: LatentActionModel,\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n model.train()\n return lam_loss_fn(model, inputs, training=True)\n\n # --- Update model ---\n (loss, (recon, idx_counts, metrics)), grads = nnx.value_and_grad(\n loss_fn, has_aux=True\n )(lam)\n optimizer.update(grads)\n\n # --- Reset inactive latent actions ---\n codebook = lam.vq.codebook\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook.value\n )\n lam.vq.codebook.value = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return loss, recon, action_last_active, metrics\n\n@nnx.jit\ndef val_step(lam: LatentActionModel, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n lam.eval()\n (loss, (recon, _, metrics)) = lam_loss_fn(lam, inputs, training=False)\n return loss, recon, metrics\n\ndef calculate_validation_metrics(val_dataloader):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n print(f""Calculating validation metrics..."")\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(lam, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n # Count parameters\n _, params, _ = nnx.split(lam, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(lam, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n if args.val_data_dir:\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n\n if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n action_last_active = jnp.zeros(args.num_latents, dtype=jnp.int32)\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n inputs = dict(videos=videos, rng=_rng)\n rng, _rng = jax.random.split(rng)\n loss, recon, action_last_active, metrics = train_step(\n lam, optimizer, inputs, action_last_active, _rng\n )\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n print(recon.shape)\n step += 1\n\n # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +5733,6591773,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",13996,0,"",python,selection_mouse +5734,6601044,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14377,0,"",python,selection_mouse +5735,6602891,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14377,0,",",python,content +5736,6602893,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14378,0,"",python,selection_keyboard +5737,6602982,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14378,0," ",python,content +5738,6602983,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14379,0,"",python,selection_keyboard +5739,6603445,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14379,0,"1",python,content +5740,6603446,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14380,0,"",python,selection_keyboard +5741,6603572,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14380,0,":",python,content +5742,6603573,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14381,0,"",python,selection_keyboard +5743,6607098,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14828,0,"",python,selection_mouse +5744,6607886,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14828,0,",",python,content +5745,6607887,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14829,0,"",python,selection_keyboard +5746,6607927,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14829,0," ",python,content +5747,6607927,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14830,0,"",python,selection_keyboard +5748,6608175,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14830,0,"1",python,content +5749,6608176,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14831,0,"",python,selection_keyboard +5750,6609524,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14831,0,":",python,content +5751,6609525,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",14832,0,"",python,selection_keyboard +5752,6620360,"train_lam.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import create_dataloader_iterator\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(\n model: LatentActionModel, inputs: dict, training: bool = True\n) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n # --- Compute loss ---\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n gt_future_frames = gt[:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@nnx.jit\ndef train_step(\n lam: LatentActionModel,\n optimizer: nnx.Optimizer,\n inputs: dict,\n action_last_active: jax.Array,\n rng: jax.Array,\n) -> tuple[jax.Array, jax.Array, jax.Array, dict]:\n def loss_fn(\n model: LatentActionModel,\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n model.train()\n return lam_loss_fn(model, inputs, training=True)\n\n # --- Update model ---\n (loss, (recon, idx_counts, metrics)), grads = nnx.value_and_grad(\n loss_fn, has_aux=True\n )(lam)\n optimizer.update(grads)\n\n # --- Reset inactive latent actions ---\n codebook = lam.vq.codebook\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook.value\n )\n lam.vq.codebook.value = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return loss, recon, action_last_active, metrics\n\n@nnx.jit\ndef val_step(lam: LatentActionModel, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n lam.eval()\n (loss, (recon, _, metrics)) = lam_loss_fn(lam, inputs, training=False)\n return loss, recon, metrics\n\ndef calculate_validation_metrics(val_dataloader):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n print(f""Calculating validation metrics..."")\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(lam, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n # Count parameters\n _, params, _ = nnx.split(lam, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(lam, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n if args.val_data_dir:\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n\n if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n action_last_active = jnp.zeros(args.num_latents, dtype=jnp.int32)\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n inputs = dict(videos=videos, rng=_rng)\n rng, _rng = jax.random.split(rng)\n loss, recon, action_last_active, metrics = train_step(\n lam, optimizer, inputs, action_last_active, _rng\n )\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n print(recon.shape)\n step += 1\n\n # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +5753,6624531,"train_tokenizer.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import create_dataloader_iterator\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n # --- Compute loss ---\n # FIXME (f.srambical): Can we even do native int8 training without casting the video at all?\n # FIXME (f.srambical): If the tokenizer is the reason for the dynamics model being memory-bound,\n # should we at least train the tokenizer natively in int8?\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=True)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@nnx.jit\ndef train_step(\n tokenizer: TokenizerVQVAE, optimizer: nnx.Optimizer, inputs: dict\n) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n tokenizer\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n@nnx.jit\ndef val_step(tokenizer: TokenizerVQVAE, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\ndef calculate_validation_metrics(val_dataloader):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(tokenizer, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n \n # --- TRAIN LOOP ---\n dataloader_train = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n inputs = dict(videos=videos)\n loss, recon, metrics = train_step(tokenizer, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +5754,6628766,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_lam.py",0,0,"",python,tab +5755,6629576,"train_tokenizer.py",0,0,"",python,tab +5756,6630284,"train_lam.py",0,0,"",python,tab +5757,6630677,"train_lam.py",3809,0,"",python,selection_mouse +5758,6630698,"train_lam.py",3808,0,"",python,selection_command +5759,6633893,"train_lam.py",5072,0,"",python,selection_command +5760,6634199,"train_lam.py",5425,0,"",python,selection_command +5761,6634831,"train_lam.py",5958,0,"",python,selection_command +5762,6635127,"train_lam.py",7651,0,"",python,selection_command +5763,6636080,"train_lam.py",7682,0,"",python,selection_command +5764,6636343,"train_lam.py",12284,0,"",python,selection_command +5765,6636907,"train_lam.py",12688,0,"",python,selection_command +5766,6637536,"train_lam.py",13260,0,"",python,selection_command +5767,6638526,"train_lam.py",13308,0,"",python,selection_command +5768,6639552,"train_lam.py",13296,31,"",python,content +5769,6639561,"train_lam.py",13308,0,"",python,selection_command +5770,6645286,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=coinrun-lam-dev-$slurm_job_id \\n --tags lam coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --data_dir $array_records_dir_train\n\n",shellscript,tab +5771,6648132,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1510,0,"",shellscript,selection_mouse +5772,6650002,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=coinrun-tokenizer-dev-$slurm_job_id \\n --tags tokenizer coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \\n --data_dir $array_records_dir_train\n",shellscript,tab +5773,6650866,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1687,0,"",shellscript,selection_mouse +5774,6650876,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1686,0,"",shellscript,selection_command +5775,6651645,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1667,20," --val_steps 50 \",shellscript,selection_command +5776,6651833,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1643,44," --val_interval 10 \\n --val_steps 50 \",shellscript,selection_command +5777,6651970,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1599,88," --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \",shellscript,selection_command +5778,6652624,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1599,0,"",shellscript,selection_command +5779,6653789,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +5780,6654743,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1553,0,"",shellscript,selection_mouse +5781,6655672,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1560,0,"\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \",shellscript,content +5782,6655700,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1565,0,"",shellscript,selection_command +5783,6658460,"TERMINAL",0,0,"s",,terminal_output +5784,6658504,"TERMINAL",0,0,"h",,terminal_output +5785,6658637,"TERMINAL",0,0," ",,terminal_output +5786,6660866,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +5787,6666312,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +5788,6666784,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-lam-dev-$slurm_job_id \\r\n --tags lam coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train\r\n\r\n",,terminal_output +5789,6666944,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +5790,6667096,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +5791,6687695,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +5792,6688564,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +5793,6688668,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_151737-0aixh8ju\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-lam-dev-3468835\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/0aixh8ju\r\n",,terminal_output +5794,6693661,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['action_in', 'action_up', 'decoder', 'encoder', 'patch_up', 'vq']\r\nParameter counts:\r\n{'action_in': 768, 'action_up': 16896, 'decoder': 17474816, 'encoder': 17228832, 'patch_up': 393728, 'vq': 192, 'total': 35115232}\r\nStarting training from step 0...\r\n",,terminal_output +5795,6702333,"TERMINAL",0,0,"2025-09-05 15:17:51.963231: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:17:51.964404: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:17:51.964425: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:17:51.964903: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5796,6749032,"TERMINAL",0,0,"Step 0, loss: 0.29082542657852173\r\nStep 1, loss: 0.2048691064119339\r\nStep 2, loss: 0.19221149384975433\r\nStep 3, loss: 0.20193631947040558\r\nStep 4, loss: 0.1634262055158615\r\nStep 5, loss: 0.16224892437458038\r\nStep 6, loss: 0.1465519517660141\r\nStep 7, loss: 0.13969092071056366\r\nStep 8, loss: 0.13379880785942078\r\nStep 9, loss: 0.13682898879051208\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\n",,terminal_output +5797,6790654,"train_tokenizer.py",0,0,"",python,tab +5798,6791582,"train_tokenizer.py",14024,0,"",python,selection_mouse +5799,6791593,"train_tokenizer.py",14023,0,"",python,selection_command +5800,6793251,"models/lam.py",0,0,"",python,tab +5801,6794219,"models/lam.py",0,0,"",python,tab +5802,6797228,"train_tokenizer.py",0,0,"",python,tab +5803,6797228,"train_tokenizer.py",1643,0,"",python,selection_command +5804,6797998,"train_tokenizer.py",1937,0,"",python,selection_command +5805,6800974,"train_tokenizer.py",2010,0,"",python,selection_mouse +5806,6801538,"train_tokenizer.py",2015,0,"",python,selection_mouse +5807,6801579,"train_tokenizer.py",2014,0,"",python,selection_command +5808,6803212,"train_tokenizer.py",2122,1,"V",python,selection_command +5809,6803322,"train_tokenizer.py",2124,2,"VA",python,selection_command +5810,6803430,"train_tokenizer.py",3011,3,"val",python,selection_command +5811,6806079,"train_tokenizer.py",1973,3,"val",python,selection_command +5812,6806191,"train_tokenizer.py",1951,3,"val",python,selection_command +5813,6806332,"train_tokenizer.py",1942,3,"val",python,selection_command +5814,6806857,"train_tokenizer.py",1915,3,"val",python,selection_command +5815,6807362,"train_tokenizer.py",1818,3,"val",python,selection_command +5816,6807861,"train_tokenizer.py",1756,3,"val",python,selection_command +5817,6808284,"train_tokenizer.py",1724,3,"val",python,selection_command +5818,6808698,"train_tokenizer.py",15731,3,"val",python,selection_command +5819,6810379,"train_tokenizer.py",1724,3,"val",python,selection_command +5820,6816370,"models/lam.py",0,0,"",python,tab +5821,6816371,"models/lam.py",1125,0,"",python,selection_mouse +5822,6816413,"models/lam.py",1124,0,"",python,selection_command +5823,6818040,"models/lam.py",2142,1,"v",python,selection_command +5824,6818211,"models/lam.py",4495,2,"va",python,selection_command +5825,6818266,"models/lam.py",4495,3,"val",python,selection_command +5826,6821478,"train_tokenizer.py",0,0,"",python,tab +5827,6823011,"train_lam.py",0,0,"",python,tab +5828,6824391,"train_lam.py",0,0,"",python,tab +5829,6824981,"train_lam.py",12558,0,"",python,selection_mouse +5830,6826519,"train_lam.py",12598,1,"c",python,selection_command +5831,6826636,"train_lam.py",12605,2,"ca",python,selection_command +5832,6826751,"train_lam.py",12605,3,"cal",python,selection_command +5833,6827398,"train_lam.py",12605,2,"ca",python,selection_command +5834,6827553,"train_lam.py",12598,1,"c",python,selection_command +5835,6827774,"train_lam.py",12614,1,"v",python,selection_command +5836,6827935,"train_lam.py",12661,2,"va",python,selection_command +5837,6827936,"train_lam.py",12661,3,"val",python,selection_command +5838,6830159,"train_tokenizer.py",0,0,"",python,tab +5839,6830159,"train_tokenizer.py",15731,3,"val",python,selection_command +5840,6830915,"train_tokenizer.py",1724,3,"val",python,selection_command +5841,6832480,"train_lam.py",0,0,"",python,tab +5842,6832481,"train_lam.py",12555,3,"val",python,selection_command +5843,6832964,"train_lam.py",12522,3,"val",python,selection_command +5844,6833083,"train_lam.py",12200,3,"val",python,selection_command +5845,6833201,"train_lam.py",12175,3,"val",python,selection_command +5846,6833346,"train_lam.py",12149,3,"val",python,selection_command +5847,6833514,"train_lam.py",11493,3,"val",python,selection_command +5848,6833613,"train_lam.py",11437,3,"val",python,selection_command +5849,6833750,"train_lam.py",11153,3,"val",python,selection_command +5850,6833916,"train_lam.py",11043,3,"val",python,selection_command +5851,6834096,"train_lam.py",10880,3,"val",python,selection_command +5852,6834154,"train_lam.py",10833,3,"val",python,selection_command +5853,6834311,"train_lam.py",10811,3,"val",python,selection_command +5854,6834466,"train_lam.py",10229,3,"val",python,selection_command +5855,6834574,"train_lam.py",10194,3,"val",python,selection_command +5856,6834693,"train_lam.py",9953,3,"val",python,selection_command +5857,6834841,"train_lam.py",9746,3,"val",python,selection_command +5858,6834979,"train_lam.py",9689,3,"val",python,selection_command +5859,6835157,"train_lam.py",6060,3,"Val",python,selection_command +5860,6835264,"train_lam.py",5918,3,"Val",python,selection_command +5861,6835407,"train_lam.py",5754,3,"val",python,selection_command +5862,6835579,"train_lam.py",5744,3,"val",python,selection_command +5863,6835727,"train_lam.py",5625,3,"val",python,selection_command +5864,6835984,"train_lam.py",5599,3,"val",python,selection_command +5865,6836142,"train_lam.py",5561,3,"val",python,selection_command +5866,6836426,"train_lam.py",5543,3,"val",python,selection_command +5867,6836576,"train_lam.py",5487,3,"val",python,selection_command +5868,6836696,"train_lam.py",5447,3,"val",python,selection_command +5869,6836859,"train_lam.py",5406,3,"val",python,selection_command +5870,6837015,"train_lam.py",5357,3,"val",python,selection_command +5871,6837113,"train_lam.py",5218,3,"val",python,selection_command +5872,6837280,"train_lam.py",5134,3,"val",python,selection_command +5873,6837390,"train_lam.py",5092,3,"val",python,selection_command +5874,6837533,"train_lam.py",4989,3,"val",python,selection_command +5875,6837679,"train_lam.py",4970,3,"val",python,selection_command +5876,6837815,"train_lam.py",4842,3,"val",python,selection_command +5877,6837970,"train_lam.py",4748,3,"val",python,selection_command +5878,6838257,"train_lam.py",4593,3,"val",python,selection_command +5879,6838374,"train_lam.py",4561,3,"val",python,selection_command +5880,6838532,"train_lam.py",3971,3,"val",python,selection_command +5881,6838697,"train_lam.py",2805,3,"val",python,selection_command +5882,6838833,"train_lam.py",1946,3,"val",python,selection_command +5883,6838972,"train_lam.py",1924,3,"val",python,selection_command +5884,6839086,"train_lam.py",1915,3,"val",python,selection_command +5885,6839628,"train_lam.py",1888,3,"val",python,selection_command +5886,6839882,"train_lam.py",1823,3,"val",python,selection_command +5887,6842682,"train_lam.py",1761,3,"val",python,selection_command +5888,6842827,"train_lam.py",1729,3,"val",python,selection_command +5889,6844249,"train_lam.py",1761,3,"val",python,selection_command +5890,6844394,"train_lam.py",1823,3,"val",python,selection_command +5891,6844527,"train_lam.py",1888,3,"val",python,selection_command +5892,6844916,"train_lam.py",1915,3,"val",python,selection_command +5893,6845058,"train_lam.py",1924,3,"val",python,selection_command +5894,6845235,"train_lam.py",1946,3,"val",python,selection_command +5895,6845642,"train_lam.py",2805,3,"val",python,selection_command +5896,6846742,"train_lam.py",3971,3,"val",python,selection_command +5897,6848061,"train_lam.py",4561,3,"val",python,selection_command +5898,6848132,"TERMINAL",0,0,"Step 10, validation loss: 0.11593025177717209\r\nStep 10, loss: 0.10182199627161026\r\nStep 11, loss: 0.10439193993806839\r\nStep 12, loss: 0.09406972676515579\r\nStep 13, loss: 0.09772490710020065\r\nStep 14, loss: 0.09554266184568405\r\nStep 15, loss: 0.09632503986358643\r\nStep 16, loss: 0.0835835412144661\r\nStep 17, loss: 0.09580384939908981\r\nStep 18, loss: 0.07298149913549423\r\nStep 19, loss: 0.07869017869234085\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 20, validation loss: 0.0711536630988121\r\nStep 20, loss: 0.07823637127876282\r\nStep 21, loss: 0.06644692271947861\r\nStep 22, loss: 0.061057426035404205\r\nStep 23, loss: 0.06661047786474228\r\nStep 24, loss: 0.05326160788536072\r\nStep 25, loss: 0.05032860487699509\r\nStep 26, loss: 0.04621792584657669\r\nStep 27, loss: 0.058809492737054825\r\nStep 28, loss: 0.05668637156486511\r\nStep 29, loss: 0.049171317368745804\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 30, validation loss: 0.049885015934705734\r\nStep 30, loss: 0.054714083671569824\r\nStep 31, loss: 0.03886327147483826\r\nStep 32, loss: 0.04377226158976555\r\nStep 33, loss: 0.04706248268485069\r\nStep 34, loss: 0.03870921954512596\r\nStep 35, loss: 0.04759908840060234\r\nStep 36, loss: 0.03624310344457626\r\nStep 37, loss: 0.03723645582795143\r\nStep 38, loss: 0.03415235877037048\r\nStep 39, loss: 0.03045961633324623\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 40, validation loss: 0.03741709515452385\r\nStep 40, loss: 0.04029426723718643\r\nStep 41, loss: 0.037842318415641785\r\nStep 42, loss: 0.036140669137239456\r\nStep 43, loss: 0.042215485125780106\r\nStep 44, loss: 0.04040057957172394\r\nStep 45, loss: 0.03682219609618187\r\nStep 46, loss: 0.02909393236041069\r\nStep 47, loss: 0.03433133289217949\r\nStep 48, loss: 0.02926039509475231\r\nStep 49, loss: 0.029214780777692795\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 50, validation loss: 0.030060337856411934\r\nStep 50, loss: 0.03843312710523605\r\nStep 51, loss: 0.025400172919034958\r\nStep 52, loss: 0.0252959243953228\r\nStep 53, loss: 0.03099111281335354\r\nStep 54, loss: 0.030360329896211624\r\nStep 55, loss: 0.030434750020503998\r\nStep 56, loss: 0.03504445031285286\r\nStep 57, loss: 0.026979556307196617\r\nStep 58, loss: 0.023744165897369385\r\nStep 59, loss: 0.025236567482352257\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 60, validation loss: 0.026440540328621864\r\nStep 60, loss: 0.025721365585923195\r\nStep 61, loss: 0.023945394903421402\r\nStep 62, loss: 0.019217371940612793\r\nStep 63, loss: 0.030162263661623\r\nStep 64, loss: 0.019047735258936882\r\nStep 65, loss: 0.028320234268903732\r\nStep 66, loss: 0.025090985000133514\r\nStep 67, loss: 0.026432154700160027\r\nStep 68, loss: 0.023692764341831207\r\nStep 69, loss: 0.026204457506537437\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 70, validation loss: 0.023999271914362907\r\nStep 70, loss: 0.022219838574528694\r\nStep 71, loss: 0.02456802874803543\r\nStep 72, loss: 0.020066484808921814\r\nStep 73, loss: 0.026353834196925163\r\nStep 74, loss: 0.023041803389787674\r\nStep 75, loss: 0.025038868188858032\r\nStep 76, loss: 0.02404751628637314\r\nStep 77, loss: 0.017286183312535286\r\nStep 78, loss: 0.021259160712361336\r\nStep 79, loss: 0.019932588562369347\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 80, validation loss: 0.02309889905154705\r\nStep 80, loss: 0.02013363130390644\r\nStep 81, loss: 0.023084068670868874\r\nStep 82, loss: 0.024414854124188423\r\nStep 83, loss: 0.026525352150201797\r\nStep 84, loss: 0.01507299579679966\r\nStep 85, loss: 0.01850030943751335\r\nStep 86, loss: 0.018153125420212746\r\nStep 87, loss: 0.021576229482889175\r\nStep 88, loss: 0.0203157477080822\r\nStep 89, loss: 0.022011112421751022\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 90, validation loss: 0.02036336436867714\r\nStep 90, loss: 0.022103428840637207\r\nStep 91, loss: 0.017125708982348442\r\nStep 92, loss: 0.018620772287249565\r\nStep 93, loss: 0.023652445524930954\r\nStep 94, loss: 0.016419129446148872\r\nStep 95, loss: 0.019779052585363388\r\nStep 96, loss: 0.02462187595665455\r\nStep 97, loss: 0.013301949948072433\r\nStep 98, loss: 0.020797014236450195\r\nStep 99, loss: 0.016959479078650475\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 100, validation loss: 0.018018418923020363\r\nStep 100, loss: 0.014280087314546108\r\nStep 101, loss: 0.015398509800434113\r\nStep 102, loss: 0.021299662068486214\r\nStep 103, loss: 0.01845368556678295\r\nStep 104, loss: 0.014964827336370945\r\nStep 105, loss: 0.021878650411963463\r\nStep 106, loss: 0.01783316768705845\r\nStep 107, loss: 0.01999477483332157\r\nStep 108, loss: 0.020448556169867516\r\nStep 109, loss: 0.017638100311160088\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 110, validation loss: 0.02219230681657791\r\nStep 110, loss: 0.021010704338550568\r\nStep 111, loss: 0.021663282066583633\r\nStep 112, loss: 0.02006286010146141\r\nStep 113, loss: 0.018050743266940117\r\nStep 114, loss: 0.017534755170345306\r\nStep 115, loss: 0.01856238953769207\r\nStep 116, loss: 0.02143116109073162\r\nStep 117, loss: 0.014868938364088535\r\nStep 118, loss: 0.01818583719432354\r\nStep 119, loss: 0.02024712599813938\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 120, validation loss: 0.017954012379050255\r\nStep 120, loss: 0.015612361021339893\r\nStep 121, loss: 0.020772771909832954\r\nStep 122, loss: 0.02284320816397667\r\nStep 123, loss: 0.017088936641812325\r\nStep 124, loss: 0.01777857542037964\r\nStep 125, loss: 0.015260602347552776\r\nStep 126, loss: 0.012052659876644611\r\nStep 127, loss: 0.017362497746944427\r\nStep 128, loss: 0.02183559536933899\r\nStep 129, loss: 0.015694834291934967\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 130, validation loss: 0.01715499721467495\r\nStep 130, loss: 0.01950552873313427\r\nStep 131, loss: 0.017693782225251198\r\nStep 132, loss: 0.016871673986315727\r\nStep 133, loss: 0.017513863742351532\r\nStep 134, loss: 0.01661726087331772\r\nStep 135, loss: 0.01714961975812912\r\nStep 136, loss: 0.015073807910084724\r\nStep 137, loss: 0.018432624638080597\r\nStep 138, loss: 0.016114436089992523\r\nStep 139, loss: 0.01654084585607052\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 140, validation loss: 0.016955509781837463\r\nStep 140, loss: 0.01476135104894638\r\nStep 141, loss: 0.014641030691564083\r\nStep 142, loss: 0.016761157661676407\r\nStep 143, loss: 0.016827577725052834\r\nStep 144, loss: 0.014376374892890453\r\nStep 145, loss: 0.01538116205483675\r\nStep 146, loss: 0.014515054412186146\r\nStep 147, loss: 0.014944409020245075\r\nStep 148, loss: 0.013572963885962963\r\nStep 149, loss: 0.015522069297730923\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 150, validation loss: 0.01587880216538906\r\nStep 150, loss: 0.014929411001503468\r\nStep 151, loss: 0.015367308631539345\r\nStep 152, loss: 0.012317831628024578\r\nStep 153, loss: 0.015392826870083809\r\nStep 154, loss: 0.015150872990489006\r\nStep 155, loss: 0.015777956694364548\r\nStep 156, loss: 0.020430907607078552\r\nStep 157, loss: 0.015841173008084297\r\nStep 158, loss: 0.012503002770245075\r\nStep 159, loss: 0.015705564990639687\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 160, validation loss: 0.014538419432938099\r\nStep 160, loss: 0.014772671274840832\r\nStep 161, loss: 0.014240352436900139\r\nStep 162, loss: 0.01603005640208721\r\nStep 163, loss: 0.015789544209837914\r\nStep 164, loss: 0.01176241971552372\r\nStep 165, loss: 0.016945183277130127\r\nStep 166, loss: 0.01582084223628044\r\nStep 167, loss: 0.014537770301103592\r\nStep 168, loss: 0.016208745539188385\r\nStep 169, loss: 0.01326863095164299\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 170, validation loss: 0.014496937394142151\r\nStep 170, loss: 0.013539574109017849\r\nStep 171, loss: 0.014650494791567326\r\nStep 172, loss: 0.01496818009763956\r\nStep 173, loss: 0.017783446237444878\r\nStep 174, loss: 0.013445344753563404\r\nStep 175, loss: 0.015878424048423767\r\nStep 176, loss: 0.014815215952694416\r\nStep 177, loss: 0.015086258761584759\r\nStep 178, loss: 0.012993844226002693\r\nStep 179, loss: 0.013619251549243927\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 180, validation loss: 0.014749165624380112\r\nStep 180, loss: 0.016952170059084892\r\n",,terminal_output +5899,6849357,"train_lam.py",4593,3,"val",python,selection_command +5900,6849893,"train_lam.py",4748,3,"val",python,selection_command +5901,6851215,"train_lam.py",4842,3,"val",python,selection_command +5902,6851634,"train_lam.py",4970,3,"val",python,selection_command +5903,6851815,"train_lam.py",4989,3,"val",python,selection_command +5904,6852523,"train_lam.py",5092,3,"val",python,selection_command +5905,6853991,"train_lam.py",5134,3,"val",python,selection_command +5906,6855730,"TERMINAL",0,0,"Step 181, loss: 0.015445985831320286\r\nStep 182, loss: 0.011947507970035076\r\nStep 183, loss: 0.012612231075763702\r\nStep 184, loss: 0.013378221541643143\r\nStep 185, loss: 0.014329500496387482\r\nStep 186, loss: 0.012977870181202888\r\nStep 187, loss: 0.01297012809664011\r\nStep 188, loss: 0.013462279923260212\r\nStep 189, loss: 0.01424240879714489\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 190, validation loss: 0.013132420368492603\r\nStep 190, loss: 0.01636599749326706\r\nStep 191, loss: 0.012012002058327198\r\nStep 192, loss: 0.011318069882690907\r\nStep 193, loss: 0.011374292895197868\r\nStep 194, loss: 0.011988263577222824\r\nStep 195, loss: 0.017992639914155006\r\nStep 196, loss: 0.011089115403592587\r\nStep 197, loss: 0.013801159337162971\r\nStep 198, loss: 0.01140244398266077\r\nStep 199, loss: 0.012956082820892334\r\nCalculating validation metrics...\r\nCalculating validation metrics...\r\nStep 200, validation loss: 0.014105962589383125\r\n",,terminal_output +5907,6856346,"train_lam.py",5110,0,"",python,selection_mouse +5908,6857676,"train_lam.py",5068,48,"",python,content +5909,6857707,"train_lam.py",5072,0,"",python,selection_command +5910,6857708,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-lam-dev-3468835 at: https://wandb.ai/instant-uv/jafar/runs/0aixh8ju\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_151737-0aixh8ju/logs\r\n",,terminal_output +5911,6859090,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +5912,6859351,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +5913,6868532,"train_lam.py",13401,0,"",python,selection_command +5914,6868931,"train_lam.py",13461,0,"",python,selection_command +5915,6876463,"train_lam.py",9653,0,"",python,selection_mouse +5916,6883199,"train_lam.py",9628,0,"",python,selection_mouse +5917,6883210,"train_lam.py",9627,0,"",python,selection_command +5918,6883727,"train_lam.py",9651,0,"",python,selection_mouse +5919,6885347,"train_lam.py",9698,4,"val_",python,selection_command +5920,6886014,"train_lam.py",9698,5,"val_d",python,selection_command +5921,6886210,"train_lam.py",9698,6,"val_da",python,selection_command +5922,6886324,"train_lam.py",9698,7,"val_dat",python,selection_command +5923,6886455,"train_lam.py",9698,8,"val_data",python,selection_command +5924,6886892,"train_lam.py",10763,9,"val_data_",python,selection_command +5925,6887020,"train_lam.py",10763,10,"val_data_d",python,selection_command +5926,6887336,"train_lam.py",10763,11,"val_data_di",python,selection_command +5927,6887451,"train_lam.py",10763,12,"val_data_dir",python,selection_command +5928,6889523,"train_lam.py",9641,12,"val_data_dir",python,selection_command +5929,6890125,"train_lam.py",1888,12,"val_data_dir",python,selection_command +5930,6891383,"train_lam.py",9641,12,"val_data_dir",python,selection_command +5931,6892425,"train_lam.py",9648,0,"",python,selection_mouse +5932,6894899,"train_tokenizer.py",0,0,"",python,tab +5933,6894900,"train_tokenizer.py",1736,0,"",python,selection_mouse +5934,6896965,"train_tokenizer.py",1803,1,"h",python,selection_command +5935,6897087,"train_tokenizer.py",3060,2,"ha",python,selection_command +5936,6897965,"train_tokenizer.py",6227,3,"han",python,selection_command +5937,6898161,"train_tokenizer.py",8755,4,"hand",python,selection_command +5938,6898162,"train_tokenizer.py",8755,5,"handl",python,selection_command +5939,6898295,"train_tokenizer.py",8755,6,"handle",python,selection_command +5940,6898409,"train_tokenizer.py",8755,7,"handler",python,selection_command +5941,6898575,"train_tokenizer.py",8755,8,"handler_",python,selection_command +5942,6899193,"train_tokenizer.py",8826,8,"handler_",python,selection_command +5943,6900770,"train_tokenizer.py",9363,0,"",python,selection_mouse +5944,6901248,"train_tokenizer.py",9431,0,"",python,selection_mouse +5945,6901250,"train_tokenizer.py",9430,0,"",python,selection_command +5946,6902016,"train_tokenizer.py",9431,0,"\n if args.val_data_dir:",python,content +5947,6902040,"train_tokenizer.py",9436,0,"",python,selection_command +5948,6903907,"train_tokenizer.py",9462,0,"",python,selection_command +5949,6904198,"train_tokenizer.py",9458,25," handler_registry.add(",python,selection_command +5950,6904370,"train_tokenizer.py",9458,57," handler_registry.add(\n ""val_dataloader_state"",",python,selection_command +5951,6904495,"train_tokenizer.py",9458,98," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,",python,selection_command +5952,6904659,"train_tokenizer.py",9458,180," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),",python,selection_command +5953,6904798,"train_tokenizer.py",9458,186," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_command +5954,6904955,"train_tokenizer.py",9458,212," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(",python,selection_command +5955,6905081,"train_tokenizer.py",9458,244," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",",python,selection_command +5956,6905230,"train_tokenizer.py",9458,288," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,",python,selection_command +5957,6905355,"train_tokenizer.py",9458,370," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),",python,selection_command +5958,6905670,"train_tokenizer.py",9458,376," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_command +5959,6905948,"train_tokenizer.py",9462,0,"",python,selection_command +5960,6906128,"train_tokenizer.py",9833,0," ",python,content +5961,6906128,"train_tokenizer.py",9755,0," ",python,content +5962,6906128,"train_tokenizer.py",9711,0," ",python,content +5963,6906128,"train_tokenizer.py",9679,0," ",python,content +5964,6906128,"train_tokenizer.py",9649,0," ",python,content +5965,6906128,"train_tokenizer.py",9643,0," ",python,content +5966,6906128,"train_tokenizer.py",9565,0," ",python,content +5967,6906128,"train_tokenizer.py",9524,0," ",python,content +5968,6906129,"train_tokenizer.py",9492,0," ",python,content +5969,6906129,"train_tokenizer.py",9462,0," ",python,content +5970,6906653,"train_tokenizer.py",9465,0,"",python,selection_command +5971,6909130,"train_lam.py",0,0,"",python,tab +5972,6909299,"train_lam.py",10763,12,"val_data_dir",python,selection_command +5973,6911549,"train_tokenizer.py",0,0,"",python,tab +5974,6912000,"train_tokenizer.py",9501,1,"v",python,selection_command +5975,6912249,"train_tokenizer.py",9501,2,"va",python,selection_command +5976,6912249,"train_tokenizer.py",9501,3,"val",python,selection_command +5977,6912573,"train_tokenizer.py",9501,4,"val_",python,selection_command +5978,6912890,"train_tokenizer.py",10558,5,"val_i",python,selection_command +5979,6914066,"train_tokenizer.py",11265,5,"val_i",python,selection_command +5980,6915474,"train_tokenizer.py",10464,0,"",python,selection_mouse +5981,6915998,"train_tokenizer.py",10553,0,"\n if args.val_data_dir:",python,content +5982,6916044,"train_tokenizer.py",10558,0,"",python,selection_command +5983,6916930,"train_tokenizer.py",10584,0,"",python,selection_command +5984,6917998,"train_tokenizer.py",10584,0," ",python,content +5985,6918298,"train_tokenizer.py",10587,0,"",python,selection_command +5986,6922401,"train_lam.py",0,0,"",python,tab +5987,6922401,"train_lam.py",11776,0,"",python,selection_mouse +5988,6922560,"train_lam.py",11775,0,"",python,selection_command +5989,6922561,"train_lam.py",11775,1,")",python,selection_mouse +5990,6922561,"train_lam.py",11669,106," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n ",python,selection_mouse +5991,6922562,"train_lam.py",11666,109," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n ",python,selection_mouse +5992,6922562,"train_lam.py",11665,110," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n ",python,selection_mouse +5993,6922707,"train_lam.py",11776,0,"",python,selection_command +5994,6922708,"train_lam.py",11566,210," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +5995,6922708,"train_lam.py",11565,211," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +5996,6922708,"train_lam.py",11517,259," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +5997,6922709,"train_lam.py",11501,275," else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +5998,6922709,"train_lam.py",11482,294," )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +5999,6922709,"train_lam.py",11374,402," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +6000,6922742,"train_lam.py",11373,403," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +6001,6922789,"train_lam.py",11267,509," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +6002,6922802,"train_lam.py",11168,608," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +6003,6922846,"train_lam.py",11121,655," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +6004,6922884,"train_lam.py",11120,656," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +6005,6922929,"train_lam.py",11119,657," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +6006,6922945,"train_lam.py",11089,687," if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )",python,selection_mouse +6007,6934851,"train_lam.py",11909,0,"",python,selection_mouse +6008,6934885,"train_lam.py",11908,0,"",python,selection_command +6009,6935051,"train_lam.py",11908,1,")",python,selection_mouse +6010,6935261,"train_lam.py",11909,0,"",python,selection_command +6011,6935261,"train_lam.py",11906,3," )",python,selection_mouse +6012,6935262,"train_lam.py",11828,81," checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6013,6935262,"train_lam.py",11655,254," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6014,6935262,"train_lam.py",11494,415," else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6015,6935357,"train_lam.py",11167,742," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6016,6935444,"train_lam.py",11120,789," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6017,6935489,"train_lam.py",11119,790," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6018,6936098,"train_lam.py",11089,820," if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6019,6941063,"train_tokenizer.py",0,0,"",python,tab +6020,6941063,"train_tokenizer.py",11350,0,"",python,selection_mouse +6021,6941063,"train_tokenizer.py",11337,13," ),\n )",python,selection_mouse +6022,6941063,"train_tokenizer.py",11228,122," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )",python,selection_mouse +6023,6941063,"train_tokenizer.py",11117,233," train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )",python,selection_mouse +6024,6941063,"train_tokenizer.py",11116,234," train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )",python,selection_mouse +6025,6941137,"train_tokenizer.py",11022,328," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )",python,selection_mouse +6026,6941137,"train_tokenizer.py",11349,0,"",python,selection_command +6027,6941137,"train_tokenizer.py",11022,327," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n ",python,selection_mouse +6028,6941137,"train_tokenizer.py",11337,13," ),\n )",python,selection_command +6029,6941164,"train_tokenizer.py",11022,328," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )",python,selection_mouse +6030,6941186,"train_tokenizer.py",10985,365," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )",python,selection_mouse +6031,6941357,"train_tokenizer.py",10939,411," checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )",python,selection_mouse +6032,6942968,"train_tokenizer.py",10892,458," restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n ),\n )",python,selection_mouse +6033,6944091,"train_tokenizer.py",10892,458,"",python,content +6034,6945264,"train_tokenizer.py",10892,0," if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,content +6035,6947428,"train_tokenizer.py",11702,0,"",python,selection_mouse +6036,6951721,"train_lam.py",0,0,"",python,tab +6037,6951722,"train_lam.py",12174,0,"",python,selection_mouse +6038,6951722,"train_lam.py",12173,1,"]",python,selection_mouse +6039,6951722,"train_lam.py",12164,10,"er_state""]",python,selection_mouse +6040,6951723,"train_lam.py",12160,14,"loader_state""]",python,selection_mouse +6041,6951723,"train_lam.py",12148,26,"ed[""val_dataloader_state""]",python,selection_mouse +6042,6951723,"train_lam.py",12142,32,"restored[""val_dataloader_state""]",python,selection_mouse +6043,6951723,"train_lam.py",12139,35," = restored[""val_dataloader_state""]",python,selection_mouse +6044,6951723,"train_lam.py",12136,38,"tor = restored[""val_dataloader_state""]",python,selection_mouse +6045,6951724,"train_lam.py",12132,42,"terator = restored[""val_dataloader_state""]",python,selection_mouse +6046,6951724,"train_lam.py",12131,43,"iterator = restored[""val_dataloader_state""]",python,selection_mouse +6047,6951724,"train_lam.py",12129,45,"l_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6048,6951790,"train_lam.py",12127,47,"val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6049,6951791,"train_lam.py",12173,0,"",python,selection_command +6050,6951792,"train_lam.py",12173,1,"]",python,selection_command +6051,6951841,"train_lam.py",12123,51," val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6052,6951871,"train_lam.py",12121,53," val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6053,6951890,"train_lam.py",12119,55," val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6054,6951921,"train_lam.py",12118,56," val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6055,6951947,"train_lam.py",12117,57," val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6056,6951987,"train_lam.py",12116,58," val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6057,6952010,"train_lam.py",12115,59," val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6058,6952034,"train_lam.py",12085,89," if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6059,6953967,"train_tokenizer.py",0,0,"",python,tab +6060,6953967,"train_tokenizer.py",11712,0,"",python,selection_mouse +6061,6954897,"train_lam.py",0,0,"",python,tab +6062,6954898,"train_lam.py",12095,0,"",python,selection_mouse +6063,6957591,"train_tokenizer.py",0,0,"",python,tab +6064,6957592,"train_tokenizer.py",11856,0,"",python,selection_mouse +6065,6958697,"train_tokenizer.py",11855,0,"",python,selection_command +6066,6959014,"train_tokenizer.py",11887,0,"\n if args.val_data_dir:",python,content +6067,6959053,"train_tokenizer.py",11896,0,"",python,selection_command +6068,6959600,"train_tokenizer.py",11926,0,"",python,selection_command +6069,6960019,"train_tokenizer.py",11926,0," ",python,content +6070,6960246,"train_tokenizer.py",11929,0,"",python,selection_command +6071,6963880,"train_lam.py",0,0,"",python,tab +6072,6963881,"train_lam.py",12513,0,"",python,selection_mouse +6073,6964422,"train_lam.py",12656,0,"",python,selection_mouse +6074,6965196,"train_lam.py",12461,0,"",python,selection_mouse +6075,6966293,"train_tokenizer.py",0,0,"",python,tab +6076,6966293,"train_tokenizer.py",12268,0,"",python,selection_mouse +6077,6966322,"train_tokenizer.py",12267,0,"",python,selection_command +6078,6967105,"train_tokenizer.py",12268,0,"\n if args.val_data_dir:",python,content +6079,6967154,"train_tokenizer.py",12277,0,"",python,selection_command +6080,6967951,"train_tokenizer.py",12273,4,"",python,content +6081,6968249,"train_tokenizer.py",12272,0,"",python,selection_command +6082,6968414,"train_tokenizer.py",12298,0,"",python,selection_command +6083,6969142,"train_tokenizer.py",12295,22," dataloader_val = (",python,selection_command +6084,6969236,"train_tokenizer.py",12295,92," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)",python,selection_command +6085,6969382,"train_tokenizer.py",12295,125," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator",python,selection_command +6086,6969509,"train_tokenizer.py",12295,131," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_command +6087,6969852,"train_tokenizer.py",12299,0,"",python,selection_command +6088,6970419,"train_tokenizer.py",12425,0," ",python,content +6089,6970419,"train_tokenizer.py",12396,0," ",python,content +6090,6970419,"train_tokenizer.py",12326,0," ",python,content +6091,6970419,"train_tokenizer.py",12299,0," ",python,content +6092,6970740,"train_tokenizer.py",12302,0,"",python,selection_command +6093,6991712,"train_lam.py",0,0,"",python,tab +6094,6991713,"train_lam.py",14640,0,"",python,selection_mouse +6095,6994675,"train_tokenizer.py",0,0,"",python,tab +6096,6994676,"train_tokenizer.py",14171,0,"",python,selection_mouse +6097,6994753,"train_tokenizer.py",14170,0,"",python,selection_command +6098,6998223,"train_tokenizer.py",14171,0,"\n if args.val_data_dir and step % args.val_interval == 0:",python,content +6099,6998279,"train_tokenizer.py",14192,0,"",python,selection_command +6100,6999416,"train_tokenizer.py",14191,0,"",python,selection_command +6101,7001155,"train_tokenizer.py",14267,0,"",python,selection_command +6102,7002098,"train_tokenizer.py",14248,86," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0",python,selection_command +6103,7002338,"train_tokenizer.py",14248,146," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)",python,selection_command +6104,7002481,"train_tokenizer.py",14248,232," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)",python,selection_command +6105,7002638,"train_tokenizer.py",14248,291," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(",python,selection_command +6106,7002770,"train_tokenizer.py",14248,364," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""",python,selection_command +6107,7002888,"train_tokenizer.py",14248,386," gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )",python,selection_command +6108,7003223,"train_tokenizer.py",14268,0,"",python,selection_command +6109,7003422,"train_tokenizer.py",14633,0," ",python,content +6110,7003422,"train_tokenizer.py",14564,0," ",python,content +6111,7003423,"train_tokenizer.py",14501,0," ",python,content +6112,7003423,"train_tokenizer.py",14415,0," ",python,content +6113,7003423,"train_tokenizer.py",14355,0," ",python,content +6114,7003423,"train_tokenizer.py",14268,0," ",python,content +6115,7003747,"train_tokenizer.py",14271,0,"",python,selection_command +6116,7074451,"train_lam.py",0,0,"",python,tab +6117,7074452,"train_lam.py",15357,0,"",python,selection_mouse +6118,7074452,"train_lam.py",15338,20," ",python,selection_mouse +6119,7074634,"train_lam.py",15338,69," if jax.process_index() == 0:\n ",python,selection_mouse +6120,7074635,"train_lam.py",15338,113," if jax.process_index() == 0:\n log_images = dict(\n ",python,selection_mouse +6121,7074635,"train_lam.py",15338,184," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n ",python,selection_mouse +6122,7074635,"train_lam.py",15338,258," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n ",python,selection_mouse +6123,7074636,"train_lam.py",15338,314," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n ",python,selection_mouse +6124,7074686,"train_lam.py",15338,390," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ",python,selection_mouse +6125,7074769,"train_lam.py",15338,422," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )",python,selection_mouse +6126,7074857,"train_lam.py",15338,577," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n ",python,selection_mouse +6127,7074965,"train_lam.py",15338,949," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ",python,selection_mouse +6128,7075010,"train_lam.py",15338,989," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )",python,selection_mouse +6129,7075078,"train_lam.py",15338,1064," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images",python,selection_mouse +6130,7075300,"train_lam.py",15338,1065," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)",python,selection_mouse +6131,7081619,"train_tokenizer.py",0,0,"",python,tab +6132,7081620,"train_tokenizer.py",15691,0,"",python,selection_mouse +6133,7081620,"train_tokenizer.py",15619,72,"\n )\n wandb.log(log_images)",python,selection_mouse +6134,7081620,"train_tokenizer.py",15530,161," np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6135,7081621,"train_tokenizer.py",15466,225," val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6136,7081621,"train_tokenizer.py",15382,309," val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6137,7081621,"train_tokenizer.py",15299,392," val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6138,7081621,"train_tokenizer.py",15297,394," val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6139,7081622,"train_tokenizer.py",15295,396," val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6140,7081622,"train_tokenizer.py",15263,428," ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6141,7081674,"train_tokenizer.py",15260,431," ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6142,7081675,"train_tokenizer.py",15690,0,"",python,selection_command +6143,7081675,"train_tokenizer.py",15619,72,"\n )\n wandb.log(log_images)",python,selection_command +6144,7081711,"train_tokenizer.py",15129,562," true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6145,7081793,"train_tokenizer.py",15056,635," recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6146,7081888,"train_tokenizer.py",14986,705," image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6147,7081981,"train_tokenizer.py",14943,748," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6148,7082107,"train_tokenizer.py",14894,797," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +6149,7083389,"train_tokenizer.py",14894,797,"",python,content +6150,7084084,"train_tokenizer.py",14894,0," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)",python,content +6151,7084821,"train_tokenizer.py",15958,0,"",python,selection_command +6152,7100802,"train_lam.py",0,0,"",python,tab +6153,7100802,"train_lam.py",16444,0,"",python,selection_mouse +6154,7100940,"train_lam.py",16443,1," ",python,selection_mouse +6155,7101817,"train_lam.py",16443,0,"",python,selection_mouse +6156,7105204,"train_lam.py",17648,0,"",python,selection_mouse +6157,7105210,"train_lam.py",17647,0,"",python,selection_command +6158,7105359,"train_lam.py",17647,1,",",python,selection_mouse +6159,7105371,"train_lam.py",17648,0,"",python,selection_command +6160,7105448,"train_lam.py",17647,1,",",python,selection_mouse +6161,7105449,"train_lam.py",17601,47," args=ckpt_manager_args\n ),",python,selection_mouse +6162,7105449,"train_lam.py",17596,52," args=ckpt_manager_args\n ),",python,selection_mouse +6163,7105495,"train_lam.py",17568,80," step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6164,7105516,"train_lam.py",17566,82," step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6165,7105552,"train_lam.py",17522,126," checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6166,7105574,"train_lam.py",17497,151," )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6167,7105605,"train_lam.py",17409,239," train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6168,7105629,"train_lam.py",17163,485," ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6169,7105717,"train_lam.py",17118,530," )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6170,7105797,"train_lam.py",16914,734," ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6171,7105888,"train_lam.py",16759,889," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6172,7106138,"train_lam.py",16669,979," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6173,7106164,"train_lam.py",16609,1039," ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6174,7106227,"train_lam.py",16571,1077," if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6175,7107020,"train_lam.py",16516,1132," optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +6176,7114659,"train_tokenizer.py",0,0,"",python,tab +6177,7114659,"train_tokenizer.py",16729,0,"",python,selection_mouse +6178,7114659,"train_tokenizer.py",16711,18,"\n )",python,selection_mouse +6179,7114659,"train_tokenizer.py",16709,20,"),\n )",python,selection_mouse +6180,7114659,"train_tokenizer.py",16680,49," ),\n ),\n )",python,selection_mouse +6181,7114659,"train_tokenizer.py",16677,52," ),\n ),\n )",python,selection_mouse +6182,7114660,"train_tokenizer.py",16615,114," val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6183,7114660,"train_tokenizer.py",16518,211," val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6184,7114660,"train_tokenizer.py",16489,240," ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6185,7114660,"train_tokenizer.py",16487,242," ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6186,7114708,"train_tokenizer.py",16426,303," train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6187,7114709,"train_tokenizer.py",16728,0,"",python,selection_command +6188,7114709,"train_tokenizer.py",16711,18,"\n )",python,selection_command +6189,7114750,"train_tokenizer.py",16329,400," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6190,7114772,"train_tokenizer.py",16239,490," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6191,7114814,"train_tokenizer.py",16194,535," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6192,7114847,"train_tokenizer.py",16168,561," step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6193,7114868,"train_tokenizer.py",16127,602," checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6194,7114953,"train_tokenizer.py",16072,657," optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +6195,7116120,"train_tokenizer.py",16072,657,"",python,content +6196,7116787,"train_tokenizer.py",16072,0," optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,content +6197,7124180,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +6198,7135593,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +6199,7138549,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +6200,7140214,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +6201,7141369,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1666,0,"",shellscript,selection_mouse +6202,7141370,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1665,0,"",shellscript,selection_command +6203,7141986,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1687,0,"",shellscript,selection_mouse +6204,7142020,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1686,0,"",shellscript,selection_command +6205,7147164,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1667,20," --val_steps 50 \",shellscript,selection_command +6206,7147380,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1643,44," --val_interval 10 \\n --val_steps 50 \",shellscript,selection_command +6207,7147458,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1599,88," --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \",shellscript,selection_command +6208,7148026,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1599,89,"",shellscript,content +6209,7148054,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1603,0,"",shellscript,selection_command +6210,7151948,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-lam-dev-$slurm_job_id \\r\n --tags lam coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train\r\n\r\n",,terminal_output +6211,7152095,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +6212,7152223,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +6213,7154310,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +6214,7155139,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1590,0,"",shellscript,selection_mouse +6215,7159939,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +6216,7160748,"TERMINAL",0,0,"wandb: creating run\r\nwandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_152529-89219jx6\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-lam-dev-3468835\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/89219jx6\r\n",,terminal_output +6217,7164451,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['action_in', 'action_up', 'decoder', 'encoder', 'patch_up', 'vq']\r\nParameter counts:\r\n{'action_in': 768, 'action_up': 16896, 'decoder': 17474816, 'encoder': 17228832, 'patch_up': 393728, 'vq': 192, 'total': 35115232}\r\nStarting training from step 0...\r\n",,terminal_output +6218,7172885,"TERMINAL",0,0,"2025-09-05 15:25:42.612902: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:25:42.614057: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:25:42.614075: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:25:42.614541: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6219,7218834,"TERMINAL",0,0,"Step 0, loss: 0.29082542657852173\r\nStep 1, loss: 0.2048691064119339\r\nStep 2, loss: 0.19221149384975433\r\nStep 3, loss: 0.20193631947040558\r\nStep 4, loss: 0.1634262055158615\r\nStep 5, loss: 0.16224892437458038\r\nStep 6, loss: 0.1465519517660141\r\nStep 7, loss: 0.13969092071056366\r\nStep 8, loss: 0.13379880785942078\r\nStep 9, loss: 0.13682898879051208\r\nCalculating validation metrics...\r\n",,terminal_output +6220,7322063,"TERMINAL",0,0,"Step 10, validation loss: 0.11593025177717209\r\nStep 10, loss: 0.10182199627161026\r\nStep 11, loss: 0.10439193993806839\r\nStep 12, loss: 0.09406972676515579\r\nStep 13, loss: 0.09772490710020065\r\nStep 14, loss: 0.09554266184568405\r\nStep 15, loss: 0.09632503986358643\r\nStep 16, loss: 0.0835835412144661\r\nStep 17, loss: 0.09580384939908981\r\nStep 18, loss: 0.07298149913549423\r\nStep 19, loss: 0.07869017869234085\r\nCalculating validation metrics...\r\nStep 20, validation loss: 0.0711536630988121\r\nStep 20, loss: 0.07823637127876282\r\nStep 21, loss: 0.06644692271947861\r\nStep 22, loss: 0.061057426035404205\r\nStep 23, loss: 0.06661047786474228\r\nStep 24, loss: 0.05326160788536072\r\nStep 25, loss: 0.05032860487699509\r\nStep 26, loss: 0.04621792584657669\r\nStep 27, loss: 0.058809492737054825\r\nStep 28, loss: 0.05668637156486511\r\nStep 29, loss: 0.049171317368745804\r\nCalculating validation metrics...\r\nStep 30, validation loss: 0.049885015934705734\r\nStep 30, loss: 0.054714083671569824\r\nStep 31, loss: 0.03886327147483826\r\nStep 32, loss: 0.04377226158976555\r\nStep 33, loss: 0.04706248268485069\r\nStep 34, loss: 0.03870921954512596\r\nStep 35, loss: 0.04759908840060234\r\nStep 36, loss: 0.03624310344457626\r\nStep 37, loss: 0.03723645582795143\r\nStep 38, loss: 0.03415235877037048\r\nStep 39, loss: 0.03045961633324623\r\nCalculating validation metrics...\r\nStep 40, validation loss: 0.03741709515452385\r\nStep 40, loss: 0.04029426723718643\r\nStep 41, loss: 0.037842318415641785\r\nStep 42, loss: 0.036140669137239456\r\nStep 43, loss: 0.042215485125780106\r\nStep 44, loss: 0.04040057957172394\r\nStep 45, loss: 0.03682219609618187\r\nStep 46, loss: 0.02909393236041069\r\nStep 47, loss: 0.03433133289217949\r\nStep 48, loss: 0.02926039509475231\r\nStep 49, loss: 0.029214782640337944\r\nCalculating validation metrics...\r\nStep 50, validation loss: 0.030060311779379845\r\nStep 50, loss: 0.03843321651220322\r\nStep 51, loss: 0.025400329381227493\r\nStep 52, loss: 0.025296125560998917\r\nStep 53, loss: 0.030991123989224434\r\nStep 54, loss: 0.03036041185259819\r\nStep 55, loss: 0.030432844534516335\r\nStep 56, loss: 0.035055261105298996\r\nStep 57, loss: 0.026996567845344543\r\nStep 58, loss: 0.023726535961031914\r\nStep 59, loss: 0.025234583765268326\r\nCalculating validation metrics...\r\nStep 60, validation loss: 0.026439407840371132\r\nStep 60, loss: 0.025718191638588905\r\nStep 61, loss: 0.02394203655421734\r\nStep 62, loss: 0.019223101437091827\r\nStep 63, loss: 0.03018917515873909\r\nStep 64, loss: 0.019092077389359474\r\nStep 65, loss: 0.02832338586449623\r\nStep 66, loss: 0.025073956698179245\r\nStep 67, loss: 0.026441961526870728\r\nStep 68, loss: 0.023691991344094276\r\nStep 69, loss: 0.02619829587638378\r\nCalculating validation metrics...\r\nStep 70, validation loss: 0.02398490160703659\r\nStep 70, loss: 0.022210925817489624\r\nStep 71, loss: 0.024554163217544556\r\nStep 72, loss: 0.020051466301083565\r\nStep 73, loss: 0.026315253227949142\r\nStep 74, loss: 0.02303328365087509\r\nStep 75, loss: 0.02504853531718254\r\nStep 76, loss: 0.024052560329437256\r\nStep 77, loss: 0.017280658707022667\r\nStep 78, loss: 0.02129630371928215\r\nStep 79, loss: 0.01993608847260475\r\nCalculating validation metrics...\r\nStep 80, validation loss: 0.023131655529141426\r\nStep 80, loss: 0.020155316218733788\r\nStep 81, loss: 0.023086024448275566\r\nStep 82, loss: 0.024431785568594933\r\nStep 83, loss: 0.026739178225398064\r\nStep 84, loss: 0.015072423964738846\r\nStep 85, loss: 0.018576988950371742\r\nStep 86, loss: 0.018114814534783363\r\nStep 87, loss: 0.02167176641523838\r\nStep 88, loss: 0.020324597135186195\r\nStep 89, loss: 0.022067170590162277\r\nCalculating validation metrics...\r\nStep 90, validation loss: 0.020371397957205772\r\nStep 90, loss: 0.022112559527158737\r\nStep 91, loss: 0.017119891941547394\r\nStep 92, loss: 0.018622132018208504\r\nStep 93, loss: 0.02364751696586609\r\nStep 94, loss: 0.016421979293227196\r\nStep 95, loss: 0.01979394629597664\r\nStep 96, loss: 0.02461426332592964\r\nStep 97, loss: 0.013342678546905518\r\nStep 98, loss: 0.02081390470266342\r\nStep 99, loss: 0.01698136515915394\r\nCalculating validation metrics...\r\nStep 100, validation loss: 0.017996439710259438\r\nStep 100, loss: 0.014271106570959091\r\nStep 101, loss: 0.015460925176739693\r\nStep 102, loss: 0.02133229561150074\r\nStep 103, loss: 0.01839534379541874\r\nStep 104, loss: 0.014909209683537483\r\nStep 105, loss: 0.0216820165514946\r\nStep 106, loss: 0.017482278868556023\r\nStep 107, loss: 0.019355399534106255\r\nStep 108, loss: 0.021315664052963257\r\nStep 109, loss: 0.017344119027256966\r\nCalculating validation metrics...\r\nStep 110, validation loss: 0.022626372054219246\r\nStep 110, loss: 0.021562399342656136\r\nStep 111, loss: 0.023074287921190262\r\nStep 112, loss: 0.02012011967599392\r\nStep 113, loss: 0.018549803644418716\r\nStep 114, loss: 0.017611799761652946\r\nStep 115, loss: 0.018848247826099396\r\nStep 116, loss: 0.021569330245256424\r\nStep 117, loss: 0.01499425433576107\r\nStep 118, loss: 0.018216868862509727\r\nStep 119, loss: 0.020395347848534584\r\nCalculating validation metrics...\r\nStep 120, validation loss: 0.017758255824446678\r\nStep 120, loss: 0.01547687966376543\r\nStep 121, loss: 0.02094418928027153\r\nStep 122, loss: 0.022697914391756058\r\nStep 123, loss: 0.01736081950366497\r\nStep 124, loss: 0.017741763964295387\r\nStep 125, loss: 0.015631554648280144\r\nStep 126, loss: 0.012124142609536648\r\nStep 127, loss: 0.01789083704352379\r\nStep 128, loss: 0.021962514147162437\r\nStep 129, loss: 0.016020454466342926\r\nCalculating validation metrics...\r\nStep 130, validation loss: 0.01725502498447895\r\nStep 130, loss: 0.019696876406669617\r\nStep 131, loss: 0.01857110857963562\r\nStep 132, loss: 0.01647316850721836\r\nStep 133, loss: 0.01836518943309784\r\nStep 134, loss: 0.016473202034831047\r\nStep 135, loss: 0.016738222911953926\r\nStep 136, loss: 0.01536999549716711\r\nStep 137, loss: 0.01699412614107132\r\nStep 138, loss: 0.016084039583802223\r\nStep 139, loss: 0.01579427719116211\r\nCalculating validation metrics...\r\nStep 140, validation loss: 0.016312312334775925\r\nStep 140, loss: 0.014250640757381916\r\nStep 141, loss: 0.014106394723057747\r\nStep 142, loss: 0.016424495726823807\r\nStep 143, loss: 0.01616048812866211\r\nStep 144, loss: 0.014203733764588833\r\nStep 145, loss: 0.015149195678532124\r\nStep 146, loss: 0.014280597679316998\r\nStep 147, loss: 0.014177494682371616\r\nStep 148, loss: 0.013405081816017628\r\nStep 149, loss: 0.015345013700425625\r\nCalculating validation metrics...\r\nStep 150, validation loss: 0.015228742733597755\r\nStep 150, loss: 0.014193126931786537\r\nStep 151, loss: 0.01628248207271099\r\nStep 152, loss: 0.013444878160953522\r\nStep 153, loss: 0.015320662409067154\r\nStep 154, loss: 0.01651136949658394\r\nStep 155, loss: 0.016237348318099976\r\nStep 156, loss: 0.021084539592266083\r\nStep 157, loss: 0.016573647037148476\r\nStep 158, loss: 0.01246537547558546\r\nStep 159, loss: 0.016983801499009132\r\nCalculating validation metrics...\r\nStep 160, validation loss: 0.01473038550466299\r\nStep 160, loss: 0.014932197518646717\r\nStep 161, loss: 0.015716800466179848\r\nStep 162, loss: 0.016133969649672508\r\nStep 163, loss: 0.01650039292871952\r\nStep 164, loss: 0.011889860965311527\r\nStep 165, loss: 0.01749376580119133\r\nStep 166, loss: 0.015083805657923222\r\nStep 167, loss: 0.014838323928415775\r\nStep 168, loss: 0.016809701919555664\r\nStep 169, loss: 0.01259633433073759\r\nCalculating validation metrics...\r\nStep 170, validation loss: 0.014309453777968884\r\nStep 170, loss: 0.013360106386244297\r\nStep 171, loss: 0.014926302246749401\r\nStep 172, loss: 0.014792452566325665\r\nStep 173, loss: 0.01827901042997837\r\nStep 174, loss: 0.012912094593048096\r\nStep 175, loss: 0.01655402034521103\r\nStep 176, loss: 0.015054124407470226\r\nStep 177, loss: 0.012808599509298801\r\nStep 178, loss: 0.012069622054696083\r\nStep 179, loss: 0.013416714034974575\r\nCalculating validation metrics...\r\nStep 180, validation loss: 0.01410871185362339\r\nStep 180, loss: 0.01643146388232708\r\nStep 181, loss: 0.015925981104373932\r\nStep 182, loss: 0.011302188970148563\r\nStep 183, loss: 0.012242967262864113\r\nStep 184, loss: 0.013118250295519829\r\nStep 185, loss: 0.013792001642286777\r\nStep 186, loss: 0.012728353030979633\r\nStep 187, loss: 0.012617716565728188\r\nStep 188, loss: 0.013227613642811775\r\nStep 189, loss: 0.014567919075489044\r\nCalculating validation metrics...\r\nStep 190, validation loss: 0.014551262371242046\r\nStep 190, loss: 0.017684191465377808\r\nStep 191, loss: 0.012563856318593025\r\nStep 192, loss: 0.01123756356537342\r\nStep 193, loss: 0.011305016465485096\r\nStep 194, loss: 0.013284462504088879\r\n",,terminal_output +6221,7325528,"TERMINAL",0,0,"Step 195, loss: 0.019599024206399918\r\nStep 196, loss: 0.01167325209826231\r\nStep 197, loss: 0.016840780153870583\r\nStep 198, loss: 0.011455724015831947\r\nStep 199, loss: 0.014831987209618092\r\nCalculating validation metrics...\r\nStep 200, validation loss: 0.013660582713782787\r\n",,terminal_output +6222,7327547,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-lam-dev-3468835 at: https://wandb.ai/instant-uv/jafar/runs/89219jx6\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_152529-89219jx6/logs\r\n",,terminal_output +6223,7328551,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +6224,7328931,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +6225,7355599,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",,terminal_output +6226,7356472,"TERMINAL",0,0,"",,terminal_output +6227,7357308,"TERMINAL",0,0,"\r\r\n\r",,terminal_output +6228,7357468,"TERMINAL",0,0,"",,terminal_output +6229,7357528,"TERMINAL",0,0,"",,terminal_output +6230,7357668,"TERMINAL",0,0,"",,terminal_output +6231,7357792,"TERMINAL",0,0,"",,terminal_output +6232,7358939,"TERMINAL",0,0,"n",,terminal_output +6233,7359380,"TERMINAL",0,0,"_",,terminal_output +6234,7359645,"TERMINAL",0,0,"t",,terminal_output +6235,7359702,"TERMINAL",0,0,"o",,terminal_output +6236,7359830,"TERMINAL",0,0,"k",,terminal_output +6237,7359972,"TERMINAL",0,0,"enizer_single_gpu.sh ",,terminal_output +6238,7365987,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_tokenizer.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-tokenizer-dev-$slurm_job_id \\r\n --tags tokenizer coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --data_dir $array_records_dir_train\r\n",,terminal_output +6239,7366134,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +6240,7366270,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +6241,7367764,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +6242,7373731,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +6243,7374430,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +6244,7374536,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_152903-7t38f3nh\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-tokenizer-dev-3468835\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/7t38f3nh\r\n",,terminal_output +6245,7377483,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +6246,7377836,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['decoder', 'encoder', 'vq']\r\nParameter counts:\r\n{'decoder': 16858736, 'encoder': 16858752, 'vq': 32768, 'total': 33750256}\r\nStarting training from step 0...\r\n",,terminal_output +6247,7379386,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1598,0,"\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \",shellscript,content +6248,7379421,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1603,0,"",shellscript,selection_command +6249,7386287,"TERMINAL",0,0,"2025-09-05 15:29:16.015559: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:29:16.015997: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:29:16.016031: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:29:16.016163: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:29:16.017719: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6250,7394152,"TERMINAL",0,0,"^[[A",,terminal_output +6251,7394564,"TERMINAL",0,0,"\r\n",,terminal_output +6252,7400533,"TERMINAL",0,0,"bash",,terminal_focus +6253,7401714,"TERMINAL",0,0,"queue",,terminal_command +6254,7401725,"TERMINAL",0,0,"]633;C",,terminal_output +6255,7401802,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 15:29:31 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:13:36\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:13:36\t 1 hkn07363468835 dev_accel interact tum_cte0 R17:16\t 1 hkn0402",,terminal_output +6256,7402813,"TERMINAL",0,0,"2777",,terminal_output +6257,7403880,"TERMINAL",0,0,"3888",,terminal_output +6258,7404896,"TERMINAL",0,0,"4999",,terminal_output +6259,7406036,"TERMINAL",0,0,"5404020",,terminal_output +6260,7407046,"TERMINAL",0,0,"6111",,terminal_output +6261,7408071,"TERMINAL",0,0,"7222",,terminal_output +6262,7409075,"TERMINAL",0,0,"8333",,terminal_output +6263,7410149,"TERMINAL",0,0,"9444",,terminal_output +6264,7411188,"TERMINAL",0,0,"40555",,terminal_output +6265,7412204,"TERMINAL",0,0,"1666",,terminal_output +6266,7413247,"TERMINAL",0,0,"2777",,terminal_output +6267,7414389,"TERMINAL",0,0,"3999",,terminal_output +6268,7415447,"TERMINAL",0,0,"5505030",,terminal_output +6269,7416448,"TERMINAL",0,0,"6111",,terminal_output +6270,7417492,"TERMINAL",0,0,"7222",,terminal_output +6271,7418515,"TERMINAL",0,0,"8333",,terminal_output +6272,7419654,"TERMINAL",0,0,"9444",,terminal_output +6273,7420666,"TERMINAL",0,0,"50555",,terminal_output +6274,7421654,"TERMINAL",0,0,"1666",,terminal_output +6275,7422721,"TERMINAL",0,0,"2777",,terminal_output +6276,7423746,"TERMINAL",0,0,"3888",,terminal_output +6277,7424865,"TERMINAL",0,0,"4999",,terminal_output +6278,7425886,"TERMINAL",0,0,"54:004:0040",,terminal_output +6279,7426828,"TERMINAL",0,0,"6111",,terminal_output +6280,7427869,"TERMINAL",0,0,"7222",,terminal_output +6281,7428960,"TERMINAL",0,0,"8333",,terminal_output +6282,7429953,"TERMINAL",0,0,"9444",,terminal_output +6283,7430996,"TERMINAL",0,0,"30:00555",,terminal_output +6284,7432093,"TERMINAL",0,0,"1666",,terminal_output +6285,7433079,"TERMINAL",0,0,"2777",,terminal_output +6286,7434193,"TERMINAL",0,0,"3888",,terminal_output +6287,7435210,"TERMINAL",0,0,"4999",,terminal_output +6288,7436210,"TERMINAL",0,0,"5101050",,terminal_output +6289,7437263,"TERMINAL",0,0,"6111",,terminal_output +6290,7438320,"TERMINAL",0,0,"7333",,terminal_output +6291,7439344,"TERMINAL",0,0,"9444",,terminal_output +6292,7440390,"TERMINAL",0,0,"10555",,terminal_output +6293,7441430,"TERMINAL",0,0,"1666",,terminal_output +6294,7442587,"TERMINAL",0,0,"2777",,terminal_output +6295,7443613,"TERMINAL",0,0,"3888",,terminal_output +6296,7444566,"TERMINAL",0,0,"4999",,terminal_output +6297,7445595,"TERMINAL",0,0,"520208:00",,terminal_output +6298,7446636,"TERMINAL",0,0,"6111",,terminal_output +6299,7447679,"TERMINAL",0,0,"7222",,terminal_output +6300,7448829,"TERMINAL",0,0,"8333",,terminal_output +6301,7449850,"TERMINAL",0,0,"9444",,terminal_output +6302,7450876,"TERMINAL",0,0,"20555",,terminal_output +6303,7451906,"TERMINAL",0,0,"1666",,terminal_output +6304,7452937,"TERMINAL",0,0,"2777",,terminal_output +6305,7453954,"TERMINAL",0,0,"3888",,terminal_output +6306,7454979,"TERMINAL",0,0,"4999",,terminal_output +6307,7456058,"TERMINAL",0,0,"5303010",,terminal_output +6308,7457102,"TERMINAL",0,0,"6111",,terminal_output +6309,7458153,"TERMINAL",0,0,"7222",,terminal_output +6310,7459178,"TERMINAL",0,0,"8333",,terminal_output +6311,7460203,"TERMINAL",0,0,"9444",,terminal_output +6312,7461245,"TERMINAL",0,0,"30555",,terminal_output +6313,7462288,"TERMINAL",0,0,"1777",,terminal_output +6314,7463326,"TERMINAL",0,0,"3888",,terminal_output +6315,7464374,"TERMINAL",0,0,"4999",,terminal_output +6316,7465423,"TERMINAL",0,0,"5404020",,terminal_output +6317,7467279,"TERMINAL",0,0,"6222",,terminal_output +6318,7468384,"TERMINAL",0,0,"8333",,terminal_output +6319,7469363,"TERMINAL",0,0,"9444",,terminal_output +6320,7469724,"TERMINAL",0,0,"Step 0, loss: 0.2621576189994812\r\nStep 1, loss: 0.19697336852550507\r\nStep 2, loss: 0.2387561947107315\r\nStep 3, loss: 0.2858649492263794\r\nStep 4, loss: 0.23166227340698242\r\nStep 5, loss: 0.23567236959934235\r\nStep 6, loss: 0.1995115876197815\r\nStep 7, loss: 0.20674186944961548\r\nStep 8, loss: 0.20116974413394928\r\nStep 9, loss: 0.19741825759410858\r\nStep 10, loss: 0.1460346132516861\r\nStep 11, loss: 0.17125190794467926\r\nStep 12, loss: 0.14480994641780853\r\nStep 13, loss: 0.17529481649398804\r\nStep 14, loss: 0.17897336184978485\r\nStep 15, loss: 0.19770151376724243\r\nStep 16, loss: 0.16904665529727936\r\nStep 17, loss: 0.19001226127147675\r\nStep 18, loss: 0.15586881339550018\r\nStep 19, loss: 0.15351799130439758\r\nStep 20, loss: 0.1758403778076172\r\nStep 21, loss: 0.14021089673042297\r\nStep 22, loss: 0.1402209848165512\r\nStep 23, loss: 0.14610913395881653\r\nStep 24, loss: 0.13572874665260315\r\nStep 25, loss: 0.13391976058483124\r\nStep 26, loss: 0.11312685161828995\r\nStep 27, loss: 0.15238571166992188\r\nStep 28, loss: 0.15280021727085114\r\nStep 29, loss: 0.13717208802700043\r\nStep 30, loss: 0.1706196814775467\r\nStep 31, loss: 0.12276384979486465\r\nStep 32, loss: 0.11968272924423218\r\nStep 33, loss: 0.14826332032680511\r\nStep 34, loss: 0.12294314801692963\r\nStep 35, loss: 0.16594377160072327\r\nStep 36, loss: 0.11887625604867935\r\nStep 37, loss: 0.10697607696056366\r\nStep 38, loss: 0.098540760576725\r\nStep 39, loss: 0.09651784598827362\r\nStep 40, loss: 0.13246378302574158\r\nStep 41, loss: 0.11524302512407303\r\nStep 42, loss: 0.11768466979265213\r\nStep 43, loss: 0.1307675987482071\r\nStep 44, loss: 0.1357353925704956\r\nStep 45, loss: 0.11262912303209305\r\nStep 46, loss: 0.10638497024774551\r\nStep 47, loss: 0.10752815008163452\r\nStep 48, loss: 0.09907734394073486\r\nStep 49, loss: 0.10351433604955673\r\nStep 50, loss: 0.12397469580173492\r\nStep 51, loss: 0.10056990385055542\r\nStep 52, loss: 0.0990840494632721\r\nStep 53, loss: 0.14063651859760284\r\nStep 54, loss: 0.10178665816783905\r\nStep 55, loss: 0.14543376863002777\r\nStep 56, loss: 0.1541532427072525\r\nStep 57, loss: 0.11101311445236206\r\nStep 58, loss: 0.1389668732881546\r\nStep 59, loss: 0.1237134113907814\r\nStep 60, loss: 0.10969188809394836\r\nStep 61, loss: 0.13702338933944702\r\nStep 62, loss: 0.09639759361743927\r\nStep 63, loss: 0.1115933507680893\r\nStep 64, loss: 0.1032077968120575\r\nStep 65, loss: 0.1341322660446167\r\nStep 66, loss: 0.10452044010162354\r\nStep 67, loss: 0.10430781543254852\r\nStep 68, loss: 0.11932600289583206\r\nStep 69, loss: 0.11143650114536285\r\nStep 70, loss: 0.09934670478105545\r\nStep 71, loss: 0.09278514236211777\r\nStep 72, loss: 0.07228729873895645\r\nStep 73, loss: 0.11781609058380127\r\nStep 74, loss: 0.0841342881321907\r\nStep 75, loss: 0.12020515650510788\r\nStep 76, loss: 0.09674785286188126\r\nStep 77, loss: 0.09416897594928741\r\nStep 78, loss: 0.09638963639736176\r\nStep 79, loss: 0.09490116685628891\r\nStep 80, loss: 0.11033648997545242\r\nStep 81, loss: 0.10906785726547241\r\nStep 82, loss: 0.08715588599443436\r\nStep 83, loss: 0.1075439304113388\r\nStep 84, loss: 0.08672358840703964\r\nStep 85, loss: 0.07847541570663452\r\nStep 86, loss: 0.09122798591852188\r\nStep 87, loss: 0.10623936355113983\r\nStep 88, loss: 0.12319765239953995\r\nStep 89, loss: 0.11143064498901367\r\nStep 90, loss: 0.09347511827945709\r\nStep 91, loss: 0.09237879514694214\r\nStep 92, loss: 0.10201961547136307\r\nStep 93, loss: 0.10298234969377518\r\nStep 94, loss: 0.08434539288282394\r\nStep 95, loss: 0.08807066082954407\r\nStep 96, loss: 0.09430982172489166\r\nStep 97, loss: 0.08078678697347641\r\nStep 98, loss: 0.08928526937961578\r\nStep 99, loss: 0.08123796433210373\r\nStep 100, loss: 0.07916299253702164\r\nStep 101, loss: 0.08548562973737717\r\nStep 102, loss: 0.10400892049074173\r\nStep 103, loss: 0.08785359561443329\r\nStep 104, loss: 0.07245521247386932\r\nStep 105, loss: 0.10487835109233856\r\nStep 106, loss: 0.08806225657463074\r\nStep 107, loss: 0.09895077347755432\r\nStep 108, loss: 0.11084534972906113\r\nStep 109, loss: 0.08474700152873993\r\nStep 110, loss: 0.10105938464403152\r\nStep 111, loss: 0.09515724331140518\r\nStep 112, loss: 0.07362188398838043\r\nStep 113, loss: 0.07982225716114044\r\nStep 114, loss: 0.08722911775112152\r\nStep 115, loss: 0.08471964299678802\r\nStep 116, loss: 0.08886875212192535\r\nStep 117, loss: 0.07489152252674103\r\nStep 118, loss: 0.10803071409463882\r\nStep 119, loss: 0.07747205346822739\r\nStep 120, loss: 0.07314997911453247\r\nStep 121, loss: 0.09985804557800293\r\nStep 122, loss: 0.08617926388978958\r\nStep 123, loss: 0.09267939627170563\r\nStep 124, loss: 0.07889409363269806\r\nStep 125, loss: 0.07506301999092102\r\nStep 126, loss: 0.07115086913108826\r\nStep 127, loss: 0.07707677036523819\r\nStep 128, loss: 0.08562266826629639\r\nStep 129, loss: 0.0851345956325531\r\nStep 130, loss: 0.09328888356685638\r\nStep 131, loss: 0.07815632969141006\r\nStep 132, loss: 0.08658241480588913\r\nStep 133, loss: 0.0755985677242279\r\nStep 134, loss: 0.08588843047618866\r\nStep 135, loss: 0.07334265112876892\r\nStep 136, loss: 0.09098932147026062\r\nStep 137, loss: 0.08716198056936264\r\nStep 138, loss: 0.08042892068624496\r\nStep 139, loss: 0.08268754929304123\r\nStep 140, loss: 0.0901811271905899\r\nStep 141, loss: 0.06975813955068588\r\nStep 142, loss: 0.06910686939954758\r\nStep 143, loss: 0.0700777992606163\r\nStep 144, loss: 0.07262977212667465\r\nStep 145, loss: 0.0635688453912735\r\nStep 146, loss: 0.07048730552196503\r\nStep 147, loss: 0.0691511332988739\r\nStep 148, loss: 0.06615249067544937\r\nStep 149, loss: 0.07267475128173828\r\nStep 150, loss: 0.06305628269910812\r\nStep 151, loss: 0.06658092886209488\r\nStep 152, loss: 0.062002032995224\r\nStep 153, loss: 0.07644370198249817\r\nStep 154, loss: 0.07342284917831421\r\nStep 155, loss: 0.07589100301265717\r\nStep 156, loss: 0.11294777691364288\r\nStep 157, loss: 0.0828903317451477\r\nStep 158, loss: 0.06909303367137909\r\nStep 159, loss: 0.07406385987997055\r\nStep 160, loss: 0.07374496757984161\r\nStep 161, loss: 0.07997973263263702\r\nStep 162, loss: 0.07747697830200195\r\nStep 163, loss: 0.07599590718746185\r\nStep 164, loss: 0.0641099363565445\r\nStep 165, loss: 0.07929695397615433\r\nStep 166, loss: 0.06717640906572342\r\nStep 167, loss: 0.07343209534883499\r\nStep 168, loss: 0.06544764339923859\r\nStep 169, loss: 0.06154217571020126\r\nStep 170, loss: 0.07112019509077072\r\nStep 171, loss: 0.080157071352005\r\nStep 172, loss: 0.07761450111865997\r\nStep 173, loss: 0.08031099289655685\r\nStep 174, loss: 0.07048558443784714\r\nStep 175, loss: 0.07333800941705704\r\nStep 176, loss: 0.07939965277910233\r\nStep 177, loss: 0.06236756592988968\r\nStep 178, loss: 0.06534676253795624\r\nStep 179, loss: 0.06965811550617218\r\nStep 180, loss: 0.07333913445472717\r\nStep 181, loss: 0.07045414298772812\r\nStep 182, loss: 0.0668000876903534\r\nStep 183, loss: 0.068470798432827\r\nStep 184, loss: 0.0684795156121254\r\nStep 185, loss: 0.07266470789909363\r\nStep 186, loss: 0.06425268203020096\r\nStep 187, loss: 0.0736430436372757\r\nStep 188, loss: 0.07285036146640778\r\nStep 189, loss: 0.07335030287504196\r\nStep 190, loss: 0.07419675588607788\r\nStep 191, loss: 0.057193297892808914\r\nStep 192, loss: 0.07628758996725082\r\nStep 193, loss: 0.06322365254163742\r\nStep 194, loss: 0.07289388030767441\r\nStep 195, loss: 0.08022410422563553\r\nStep 196, loss: 0.06848118454217911\r\nStep 197, loss: 0.06844446808099747\r\nStep 198, loss: 0.05541624128818512\r\nStep 199, loss: 0.06497132033109665\r\n",,terminal_output +6321,7470436,"TERMINAL",0,0,"40555",,terminal_output +6322,7471346,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-tokenizer-dev-3468835 at: https://wandb.ai/instant-uv/jafar/runs/7t38f3nh\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_152903-7t38f3nh/logs\r\n",,terminal_output +6323,7471484,"TERMINAL",0,0,"1666",,terminal_output +6324,7472173,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 10 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +6325,7472347,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh: line 53: l: command not found\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +6326,7472489,"TERMINAL",0,0,"2777",,terminal_output +6327,7473524,"TERMINAL",0,0,"3888",,terminal_output +6328,7474634,"TERMINAL",0,0,"4999",,terminal_output +6329,7475776,"TERMINAL",0,0,"srun",,terminal_focus +6330,7475838,"TERMINAL",0,0,"5505030",,terminal_output +6331,7476524,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh ",,terminal_output +6332,7476663,"TERMINAL",0,0,"6111",,terminal_output +6333,7477364,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_tokenizer.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-tokenizer-dev-$slurm_job_id \\r\n --tags tokenizer coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train\r\n",,terminal_output +6334,7477507,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +6335,7477596,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +6336,7477734,"TERMINAL",0,0,"7222",,terminal_output +6337,7478852,"TERMINAL",0,0,"8333",,terminal_output +6338,7479796,"TERMINAL",0,0,"9444",,terminal_output +6339,7480877,"TERMINAL",0,0,"50555",,terminal_output +6340,7481931,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +6341,7482347,"TERMINAL",0,0,"1666",,terminal_output +6342,7482942,"TERMINAL",0,0,"2777",,terminal_output +6343,7484012,"TERMINAL",0,0,"3888",,terminal_output +6344,7484703,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +6345,7485030,"TERMINAL",0,0,"4999",,terminal_output +6346,7485479,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_153054-q2ok55si\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-tokenizer-dev-3468835\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/q2ok55si\r\n",,terminal_output +6347,7486085,"TERMINAL",0,0,"55:005:0040",,terminal_output +6348,7487138,"TERMINAL",0,0,"6111",,terminal_output +6349,7488193,"TERMINAL",0,0,"7222",,terminal_output +6350,7488722,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['decoder', 'encoder', 'vq']\r\nParameter counts:\r\n{'decoder': 16858736, 'encoder': 16858752, 'vq': 32768, 'total': 33750256}\r\nStarting training from step 0...\r\n",,terminal_output +6351,7489219,"TERMINAL",0,0,"8333",,terminal_output +6352,7490258,"TERMINAL",0,0,"9444",,terminal_output +6353,7491297,"TERMINAL",0,0,"1:00666",,terminal_output +6354,7492337,"TERMINAL",0,0,"2777",,terminal_output +6355,7493380,"TERMINAL",0,0,"3888",,terminal_output +6356,7494428,"TERMINAL",0,0,"4999",,terminal_output +6357,7495470,"TERMINAL",0,0,"5101050",,terminal_output +6358,7496510,"TERMINAL",0,0,"6111",,terminal_output +6359,7497059,"TERMINAL",0,0,"2025-09-05 15:31:06.736422: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:31:06.736824: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:31:06.736846: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:31:06.736962: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:31:06.738523: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6360,7497578,"TERMINAL",0,0,"7222",,terminal_output +6361,7498643,"TERMINAL",0,0,"8333",,terminal_output +6362,7499648,"TERMINAL",0,0,"9444",,terminal_output +6363,7500754,"TERMINAL",0,0,"10555",,terminal_output +6364,7501737,"TERMINAL",0,0,"1666",,terminal_output +6365,7502784,"TERMINAL",0,0,"2777",,terminal_output +6366,7503922,"TERMINAL",0,0,"3888",,terminal_output +6367,7504951,"TERMINAL",0,0,"4999",,terminal_output +6368,7505918,"TERMINAL",0,0,"520209:00",,terminal_output +6369,7507008,"TERMINAL",0,0,"6111",,terminal_output +6370,7508125,"TERMINAL",0,0,"7222",,terminal_output +6371,7509193,"TERMINAL",0,0,"8333",,terminal_output +6372,7510094,"TERMINAL",0,0,"9444",,terminal_output +6373,7511223,"TERMINAL",0,0,"20555",,terminal_output +6374,7512220,"TERMINAL",0,0,"1666",,terminal_output +6375,7513293,"TERMINAL",0,0,"2777",,terminal_output +6376,7514318,"TERMINAL",0,0,"3999",,terminal_output +6377,7515322,"TERMINAL",0,0,"5303010",,terminal_output +6378,7516367,"TERMINAL",0,0,"6111",,terminal_output +6379,7517443,"TERMINAL",0,0,"7222",,terminal_output +6380,7518575,"TERMINAL",0,0,"8333",,terminal_output +6381,7519501,"TERMINAL",0,0,"9444",,terminal_output +6382,7520537,"TERMINAL",0,0,"30555",,terminal_output +6383,7521650,"TERMINAL",0,0,"1666",,terminal_output +6384,7522666,"TERMINAL",0,0,"2777",,terminal_output +6385,7523711,"TERMINAL",0,0,"3888",,terminal_output +6386,7524768,"TERMINAL",0,0,"4999",,terminal_output +6387,7525737,"TERMINAL",0,0,"5404020",,terminal_output +6388,7526865,"TERMINAL",0,0,"6111",,terminal_output +6389,7527857,"TERMINAL",0,0,"7222",,terminal_output +6390,7528904,"TERMINAL",0,0,"8333",,terminal_output +6391,7529912,"TERMINAL",0,0,"9444",,terminal_output +6392,7531068,"TERMINAL",0,0,"40555",,terminal_output +6393,7531996,"TERMINAL",0,0,"1666",,terminal_output +6394,7533110,"TERMINAL",0,0,"2777",,terminal_output +6395,7534141,"TERMINAL",0,0,"3888",,terminal_output +6396,7535199,"TERMINAL",0,0,"Step 0, loss: 0.2621576189994812\r\nStep 1, loss: 0.19697336852550507\r\nStep 2, loss: 0.2387561947107315\r\nStep 3, loss: 0.2858649492263794\r\nStep 4, loss: 0.23164957761764526\r\nStep 5, loss: 0.2356664389371872\r\nStep 6, loss: 0.1995270997285843\r\nStep 7, loss: 0.20673780143260956\r\nStep 8, loss: 0.20121027529239655\r\nStep 9, loss: 0.19745968282222748\r\nCalculating validation metrics...\r\n",,terminal_output +6397,7535200,"TERMINAL",0,0,"4999",,terminal_output +6398,7536190,"TERMINAL",0,0,"5505030",,terminal_output +6399,7537282,"TERMINAL",0,0,"6111",,terminal_output +6400,7538225,"TERMINAL",0,0,"7222",,terminal_output +6401,7539265,"TERMINAL",0,0,"8333",,terminal_output +6402,7539818,"TERMINAL",0,0,"2025-09-05 15:31:49.550050: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6403,7540302,"TERMINAL",0,0,"9555",,terminal_output +6404,7541562,"TERMINAL",0,0,"51666",,terminal_output +6405,7542611,"TERMINAL",0,0,"2777",,terminal_output +6406,7543653,"TERMINAL",0,0,"3888",,terminal_output +6407,7544704,"TERMINAL",0,0,"4999",,terminal_output +6408,7545725,"TERMINAL",0,0,"56:006:0040",,terminal_output +6409,7546767,"TERMINAL",0,0,"6111",,terminal_output +6410,7547827,"TERMINAL",0,0,"7222",,terminal_output +6411,7548850,"TERMINAL",0,0,"8333",,terminal_output +6412,7549897,"TERMINAL",0,0,"9444",,terminal_output +6413,7550928,"TERMINAL",0,0,"2:00555",,terminal_output +6414,7551977,"TERMINAL",0,0,"1666",,terminal_output +6415,7553095,"TERMINAL",0,0,"2777",,terminal_output +6416,7554059,"TERMINAL",0,0,"3888",,terminal_output +6417,7555120,"TERMINAL",0,0,"4999",,terminal_output +6418,7556151,"TERMINAL",0,0,"5101050",,terminal_output +6419,7557192,"TERMINAL",0,0,"6111",,terminal_output +6420,7558262,"TERMINAL",0,0,"7222",,terminal_output +6421,7559269,"TERMINAL",0,0,"8444",,terminal_output +6422,7560313,"TERMINAL",0,0,"10555",,terminal_output +6423,7561363,"TERMINAL",0,0,"1666",,terminal_output +6424,7562490,"TERMINAL",0,0,"2777",,terminal_output +6425,7563445,"TERMINAL",0,0,"3888",,terminal_output +6426,7564488,"TERMINAL",0,0,"4999",,terminal_output +6427,7565571,"TERMINAL",0,0,"5202020:00",,terminal_output +6428,7566590,"TERMINAL",0,0,"6111",,terminal_output +6429,7567620,"TERMINAL",0,0,"7222",,terminal_output +6430,7568676,"TERMINAL",0,0,"8333",,terminal_output +6431,7569694,"TERMINAL",0,0,"9444",,terminal_output +6432,7570790,"TERMINAL",0,0,"20555",,terminal_output +6433,7571787,"TERMINAL",0,0,"1666",,terminal_output +6434,7572831,"TERMINAL",0,0,"2777",,terminal_output +6435,7573897,"TERMINAL",0,0,"3888",,terminal_output +6436,7574923,"TERMINAL",0,0,"4999",,terminal_output +6437,7575964,"TERMINAL",0,0,"5303010",,terminal_output +6438,7577033,"TERMINAL",0,0,"6111",,terminal_output +6439,7578074,"TERMINAL",0,0,"7222",,terminal_output +6440,7579118,"TERMINAL",0,0,"8333",,terminal_output +6441,7580220,"TERMINAL",0,0,"9444",,terminal_output +6442,7581197,"TERMINAL",0,0,"30555",,terminal_output +6443,7582268,"TERMINAL",0,0,"1666",,terminal_output +6444,7583293,"TERMINAL",0,0,"2888",,terminal_output +6445,7584317,"TERMINAL",0,0,"4999",,terminal_output +6446,7585429,"TERMINAL",0,0,"5404020",,terminal_output +6447,7586396,"TERMINAL",0,0,"6111",,terminal_output +6448,7588630,"TERMINAL",0,0,"7333",,terminal_output +6449,7589729,"TERMINAL",0,0,"9444",,terminal_output +6450,7590755,"TERMINAL",0,0,"40555",,terminal_output +6451,7591735,"TERMINAL",0,0,"1666",,terminal_output +6452,7592774,"TERMINAL",0,0,"2777",,terminal_output +6453,7593866,"TERMINAL",0,0,"3888",,terminal_output +6454,7594955,"TERMINAL",0,0,"4999",,terminal_output +6455,7595906,"TERMINAL",0,0,"5505030",,terminal_output +6456,7596957,"TERMINAL",0,0,"6111",,terminal_output +6457,7597970,"TERMINAL",0,0,"7222",,terminal_output +6458,7599064,"TERMINAL",0,0,"8333",,terminal_output +6459,7600046,"TERMINAL",0,0,"9444",,terminal_output +6460,7601156,"TERMINAL",0,0,"50555",,terminal_output +6461,7602268,"TERMINAL",0,0,"1666",,terminal_output +6462,7603265,"TERMINAL",0,0,"2777",,terminal_output +6463,7604213,"TERMINAL",0,0,"3888",,terminal_output +6464,7605584,"models/lam.py",0,0,"",python,tab +6465,7605825,"TERMINAL",0,0,"4999",,terminal_output +6466,7606307,"TERMINAL",0,0,"57:017:0141",,terminal_output +6467,7607347,"TERMINAL",0,0,"7222",,terminal_output +6468,7608375,"TERMINAL",0,0,"8333",,terminal_output +6469,7609419,"TERMINAL",0,0,"9444",,terminal_output +6470,7610534,"TERMINAL",0,0,"3:00555",,terminal_output +6471,7611595,"TERMINAL",0,0,"1666",,terminal_output +6472,7612581,"TERMINAL",0,0,"2777",,terminal_output +6473,7613630,"TERMINAL",0,0,"3888",,terminal_output +6474,7614185,"models/lam.py",4130,0,"",python,selection_mouse +6475,7614305,"models/lam.py",4126,5,"recon",python,selection_mouse +6476,7614475,"models/lam.py",4109,45," outputs[""recon""] = video_recon_BTHWC\n",python,selection_mouse +6477,7614779,"TERMINAL",0,0,"4999",,terminal_output +6478,7614991,"models/lam.py",4046,0,"",python,selection_mouse +6479,7615064,"models/lam.py",4036,17,"video_recon_BTHWC",python,selection_mouse +6480,7615208,"models/lam.py",4028,81," video_recon_BTHWC = unpatchify(video_recon_BTm1P, self.patch_size, H, W)\n",python,selection_mouse +6481,7615798,"TERMINAL",0,0,"5101050",,terminal_output +6482,7616771,"TERMINAL",0,0,"6111",,terminal_output +6483,7617713,"TERMINAL",0,0,"7222",,terminal_output +6484,7618813,"TERMINAL",0,0,"8333",,terminal_output +6485,7619869,"TERMINAL",0,0,"9444",,terminal_output +6486,7620825,"TERMINAL",0,0,"10555",,terminal_output +6487,7621869,"TERMINAL",0,0,"1666",,terminal_output +6488,7622917,"TERMINAL",0,0,"2777",,terminal_output +6489,7623966,"TERMINAL",0,0,"3888",,terminal_output +6490,7625009,"TERMINAL",0,0,"4999",,terminal_output +6491,7626049,"TERMINAL",0,0,"520201:00",,terminal_output +6492,7627126,"TERMINAL",0,0,"6111",,terminal_output +6493,7628139,"TERMINAL",0,0,"7222",,terminal_output +6494,7629157,"TERMINAL",0,0,"8333",,terminal_output +6495,7630246,"TERMINAL",0,0,"9444",,terminal_output +6496,7631318,"TERMINAL",0,0,"20555",,terminal_output +6497,7632321,"TERMINAL",0,0,"1777",,terminal_output +6498,7633310,"TERMINAL",0,0,"3888",,terminal_output +6499,7634351,"TERMINAL",0,0,"4999",,terminal_output +6500,7635391,"TERMINAL",0,0,"5303010",,terminal_output +6501,7636423,"TERMINAL",0,0,"6111",,terminal_output +6502,7637462,"TERMINAL",0,0,"7222",,terminal_output +6503,7638585,"TERMINAL",0,0,"8333",,terminal_output +6504,7639538,"TERMINAL",0,0,"9444",,terminal_output +6505,7640572,"TERMINAL",0,0,"30555",,terminal_output +6506,7641647,"TERMINAL",0,0,"1666",,terminal_output +6507,7642674,"TERMINAL",0,0,"2777",,terminal_output +6508,7643703,"models/lam.py",4176,0,"",python,selection_mouse +6509,7643776,"models/lam.py",4175,0,"",python,selection_command +6510,7643777,"models/lam.py",4169,7,"outputs",python,selection_mouse +6511,7643815,"models/lam.py",4170,6,"utputs",python,selection_command +6512,7643860,"TERMINAL",0,0,"3888",,terminal_output +6513,7644294,"models/lam.py",4144,0,"",python,selection_mouse +6514,7644452,"models/lam.py",4136,17,"video_recon_BTHWC",python,selection_mouse +6515,7644585,"models/lam.py",4109,45," outputs[""recon""] = video_recon_BTHWC\n",python,selection_mouse +6516,7644764,"TERMINAL",0,0,"4999",,terminal_output +6517,7646048,"TERMINAL",0,0,"5404020",,terminal_output +6518,7646870,"TERMINAL",0,0,"6111",,terminal_output +6519,7647870,"TERMINAL",0,0,"7222",,terminal_output +6520,7648948,"TERMINAL",0,0,"8333",,terminal_output +6521,7649961,"TERMINAL",0,0,"9444",,terminal_output +6522,7651080,"TERMINAL",0,0,"40555",,terminal_output +6523,7652101,"TERMINAL",0,0,"1666",,terminal_output +6524,7653047,"TERMINAL",0,0,"2777",,terminal_output +6525,7654087,"TERMINAL",0,0,"3888",,terminal_output +6526,7655217,"TERMINAL",0,0,"4999",,terminal_output +6527,7656171,"TERMINAL",0,0,"5505030",,terminal_output +6528,7657221,"TERMINAL",0,0,"6111",,terminal_output +6529,7658360,"TERMINAL",0,0,"7222",,terminal_output +6530,7659299,"TERMINAL",0,0,"8444",,terminal_output +6531,7660349,"TERMINAL",0,0,"50555",,terminal_output +6532,7661434,"TERMINAL",0,0,"1666",,terminal_output +6533,7662420,"TERMINAL",0,0,"2777",,terminal_output +6534,7663444,"TERMINAL",0,0,"3888",,terminal_output +6535,7664491,"TERMINAL",0,0,"4999",,terminal_output +6536,7665522,"TERMINAL",0,0,"58:008:0040",,terminal_output +6537,7665823,"TERMINAL",0,0,"Step 10, validation loss: 0.18186911940574646\r\nStep 10, loss: 0.14603623747825623\r\nStep 11, loss: 0.17129400372505188\r\nStep 12, loss: 0.1448291689157486\r\nStep 13, loss: 0.1753568947315216\r\nStep 14, loss: 0.1789560467004776\r\nStep 15, loss: 0.1977154165506363\r\nStep 16, loss: 0.16903680562973022\r\nStep 17, loss: 0.18998360633850098\r\nStep 18, loss: 0.15581057965755463\r\nStep 19, loss: 0.1535489410161972\r\nCalculating validation metrics...\r\nStep 20, validation loss: 0.1487566977739334\r\nStep 20, loss: 0.17582687735557556\r\nStep 21, loss: 0.14025507867336273\r\nStep 22, loss: 0.14018729329109192\r\nStep 23, loss: 0.14608745276927948\r\nStep 24, loss: 0.13574212789535522\r\nStep 25, loss: 0.13397984206676483\r\nStep 26, loss: 0.11320656538009644\r\nStep 27, loss: 0.15227316319942474\r\nStep 28, loss: 0.15284864604473114\r\nStep 29, loss: 0.13722454011440277\r\nCalculating validation metrics...\r\nStep 30, validation loss: 0.13762938976287842\r\nStep 30, loss: 0.17057912051677704\r\nStep 31, loss: 0.12271925806999207\r\nStep 32, loss: 0.11971545964479446\r\nStep 33, loss: 0.14823856949806213\r\nStep 34, loss: 0.12285562604665756\r\nStep 35, loss: 0.1659739911556244\r\nStep 36, loss: 0.11899988353252411\r\nStep 37, loss: 0.10695432871580124\r\nStep 38, loss: 0.09854669123888016\r\nStep 39, loss: 0.09656848758459091\r\nCalculating validation metrics...\r\nStep 40, validation loss: 0.1154126301407814\r\nStep 40, loss: 0.13244307041168213\r\nStep 41, loss: 0.11523383855819702\r\nStep 42, loss: 0.11766320466995239\r\nStep 43, loss: 0.13090427219867706\r\nStep 44, loss: 0.13575927913188934\r\nStep 45, loss: 0.112705297768116\r\nStep 46, loss: 0.10634584724903107\r\nStep 47, loss: 0.1075650304555893\r\nStep 48, loss: 0.09899072349071503\r\nStep 49, loss: 0.1033715158700943\r\nCalculating validation metrics...\r\nStep 50, validation loss: 0.11282989382743835\r\nStep 50, loss: 0.1239476203918457\r\nStep 51, loss: 0.10058023035526276\r\nStep 52, loss: 0.09900233149528503\r\nStep 53, loss: 0.14080223441123962\r\nStep 54, loss: 0.10177990794181824\r\nStep 55, loss: 0.14537139236927032\r\nStep 56, loss: 0.15391919016838074\r\nStep 57, loss: 0.11081042140722275\r\nStep 58, loss: 0.13884615898132324\r\nStep 59, loss: 0.12367606163024902\r\nCalculating validation metrics...\r\nStep 60, validation loss: 0.12434732913970947\r\nStep 60, loss: 0.10970782488584518\r\nStep 61, loss: 0.13693813979625702\r\nStep 62, loss: 0.09650307148694992\r\nStep 63, loss: 0.11191411316394806\r\nStep 64, loss: 0.10351762920618057\r\nStep 65, loss: 0.13442571461200714\r\nStep 66, loss: 0.10416898876428604\r\nStep 67, loss: 0.10431097447872162\r\nStep 68, loss: 0.12018392980098724\r\nStep 69, loss: 0.11104366183280945\r\nCalculating validation metrics...\r\nStep 70, validation loss: 0.09921552985906601\r\nStep 70, loss: 0.09943544864654541\r\nStep 71, loss: 0.09274129569530487\r\nStep 72, loss: 0.07251519709825516\r\nStep 73, loss: 0.11852119863033295\r\nStep 74, loss: 0.08458246290683746\r\nStep 75, loss: 0.12040414661169052\r\nStep 76, loss: 0.09674044698476791\r\nStep 77, loss: 0.09429728984832764\r\nStep 78, loss: 0.09694889932870865\r\nStep 79, loss: 0.09546640515327454\r\nCalculating validation metrics...\r\nStep 80, validation loss: 0.10861951112747192\r\nStep 80, loss: 0.11059125512838364\r\nStep 81, loss: 0.10890159010887146\r\nStep 82, loss: 0.08699482679367065\r\nStep 83, loss: 0.10750582069158554\r\nStep 84, loss: 0.08621765673160553\r\nStep 85, loss: 0.07805921882390976\r\nStep 86, loss: 0.09097910672426224\r\nStep 87, loss: 0.10617780685424805\r\nStep 88, loss: 0.12378572672605515\r\nStep 89, loss: 0.11185530573129654\r\nCalculating validation metrics...\r\nStep 90, validation loss: 0.09612264484167099\r\nStep 90, loss: 0.093210369348526\r\nStep 91, loss: 0.09250490367412567\r\nStep 92, loss: 0.1020175889134407\r\nStep 93, loss: 0.10305141657590866\r\nStep 94, loss: 0.08422510325908661\r\nStep 95, loss: 0.08876614272594452\r\nStep 96, loss: 0.09503351897001266\r\nStep 97, loss: 0.07995656877756119\r\nStep 98, loss: 0.08946827799081802\r\nStep 99, loss: 0.08148126304149628\r\nCalculating validation metrics...\r\nStep 100, validation loss: 0.09151852130889893\r\nStep 100, loss: 0.07913738489151001\r\nStep 101, loss: 0.08529503643512726\r\nStep 102, loss: 0.10413426905870438\r\nStep 103, loss: 0.08793904632329941\r\nStep 104, loss: 0.07357306033372879\r\nStep 105, loss: 0.1059909537434578\r\nStep 106, loss: 0.08757221698760986\r\nStep 107, loss: 0.09803812205791473\r\nStep 108, loss: 0.11270152777433395\r\nStep 109, loss: 0.08623924106359482\r\nCalculating validation metrics...\r\nStep 110, validation loss: 0.09086646884679794\r\nStep 110, loss: 0.10063786804676056\r\nStep 111, loss: 0.09479731321334839\r\nStep 112, loss: 0.07501718401908875\r\nStep 113, loss: 0.0813295841217041\r\nStep 114, loss: 0.08792933076620102\r\nStep 115, loss: 0.08428329229354858\r\nStep 116, loss: 0.08875951170921326\r\nStep 117, loss: 0.07537969201803207\r\nStep 118, loss: 0.10844677686691284\r\nStep 119, loss: 0.0768902599811554\r\nCalculating validation metrics...\r\nStep 120, validation loss: 0.07628843188285828\r\nStep 120, loss: 0.07305249571800232\r\nStep 121, loss: 0.1004968211054802\r\nStep 122, loss: 0.08694268763065338\r\nStep 123, loss: 0.09250162541866302\r\nStep 124, loss: 0.07813321053981781\r\nStep 125, loss: 0.07460576295852661\r\nStep 126, loss: 0.07095856964588165\r\nStep 127, loss: 0.07615485787391663\r\nStep 128, loss: 0.08630219101905823\r\nStep 129, loss: 0.08435630798339844\r\nCalculating validation metrics...\r\nStep 130, validation loss: 0.08231014013290405\r\nStep 130, loss: 0.09173770248889923\r\nStep 131, loss: 0.07654808461666107\r\nStep 132, loss: 0.08638434112071991\r\nStep 133, loss: 0.07651061564683914\r\nStep 134, loss: 0.08546781539916992\r\nStep 135, loss: 0.07340903580188751\r\nStep 136, loss: 0.09120877087116241\r\nStep 137, loss: 0.08909863978624344\r\nStep 138, loss: 0.0816253200173378\r\nStep 139, loss: 0.08258403092622757\r\nCalculating validation metrics...\r\nStep 140, validation loss: 0.07701151818037033\r\nStep 140, loss: 0.08892424404621124\r\nStep 141, loss: 0.06986069679260254\r\nStep 142, loss: 0.0682133212685585\r\nStep 143, loss: 0.06985481083393097\r\nStep 144, loss: 0.07204529643058777\r\nStep 145, loss: 0.0630284771323204\r\nStep 146, loss: 0.07048811763525009\r\nStep 147, loss: 0.06919355690479279\r\nStep 148, loss: 0.06424468010663986\r\nStep 149, loss: 0.07035860419273376\r\nCalculating validation metrics...\r\nStep 150, validation loss: 0.07235091179609299\r\nStep 150, loss: 0.062165480107069016\r\nStep 151, loss: 0.06533610075712204\r\nStep 152, loss: 0.0618521086871624\r\nStep 153, loss: 0.07403673976659775\r\nStep 154, loss: 0.06945805251598358\r\nStep 155, loss: 0.07378161698579788\r\nStep 156, loss: 0.11113869398832321\r\nStep 157, loss: 0.07923005521297455\r\nStep 158, loss: 0.06844563037157059\r\nStep 159, loss: 0.07577453553676605\r\nCalculating validation metrics...\r\nStep 160, validation loss: 0.07211273908615112\r\nStep 160, loss: 0.07330204546451569\r\nStep 161, loss: 0.08049419522285461\r\nStep 162, loss: 0.07887805253267288\r\nStep 163, loss: 0.07563560456037521\r\nStep 164, loss: 0.06409566849470139\r\nStep 165, loss: 0.08025914430618286\r\nStep 166, loss: 0.06702690571546555\r\nStep 167, loss: 0.0725436583161354\r\nStep 168, loss: 0.06685611605644226\r\nStep 169, loss: 0.06177767738699913\r\nCalculating validation metrics...\r\nStep 170, validation loss: 0.07327161729335785\r\nStep 170, loss: 0.06944683194160461\r\nStep 171, loss: 0.08201400190591812\r\nStep 172, loss: 0.0841384306550026\r\nStep 173, loss: 0.08257897943258286\r\nStep 174, loss: 0.06839806586503983\r\nStep 175, loss: 0.07117056846618652\r\nStep 176, loss: 0.08025733381509781\r\nStep 177, loss: 0.06429417431354523\r\nStep 178, loss: 0.06636917591094971\r\nStep 179, loss: 0.06842794269323349\r\nCalculating validation metrics...\r\nStep 180, validation loss: 0.06809297204017639\r\nStep 180, loss: 0.0743337869644165\r\nStep 181, loss: 0.07242957502603531\r\nStep 182, loss: 0.06643103063106537\r\nStep 183, loss: 0.07068014144897461\r\nStep 184, loss: 0.06838323175907135\r\nStep 185, loss: 0.07246718555688858\r\nStep 186, loss: 0.06453365832567215\r\nStep 187, loss: 0.07257916778326035\r\nStep 188, loss: 0.07510711997747421\r\nStep 189, loss: 0.0729839950799942\r\nCalculating validation metrics...\r\nStep 190, validation loss: 0.07002780586481094\r\nStep 190, loss: 0.07588020712137222\r\nStep 191, loss: 0.057884331792593\r\nStep 192, loss: 0.07354746758937836\r\nStep 193, loss: 0.06308203935623169\r\nStep 194, loss: 0.07464630156755447\r\nStep 195, loss: 0.0830283910036087\r\nStep 196, loss: 0.06903381645679474\r\nStep 197, loss: 0.0682685375213623\r\nStep 198, loss: 0.0570366233587265\r\n",,terminal_output +6538,7666641,"TERMINAL",0,0,"6111",,terminal_output +6539,7667610,"TERMINAL",0,0,"7222",,terminal_output +6540,7668689,"TERMINAL",0,0,"8333",,terminal_output +6541,7668801,"TERMINAL",0,0,"Step 199, loss: 0.06748887151479721\r\nCalculating validation metrics...\r\nStep 200, validation loss: 0.06881790608167648\r\n",,terminal_output +6542,7669713,"TERMINAL",0,0,"9444",,terminal_output +6543,7670697,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-tokenizer-dev-3468835 at: https://wandb.ai/instant-uv/jafar/runs/q2ok55si\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_153054-q2ok55si/logs\r\n",,terminal_output +6544,7670731,"TERMINAL",0,0,"4:00555",,terminal_output +6545,7671696,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +6546,7671809,"TERMINAL",0,0,"1666",,terminal_output +6547,7671958,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +6548,7672811,"TERMINAL",0,0,"2777",,terminal_output +6549,7673851,"TERMINAL",0,0,"3888",,terminal_output +6550,7674935,"TERMINAL",0,0,"4999",,terminal_output +6551,7675930,"TERMINAL",0,0,"5101050",,terminal_output +6552,7676978,"TERMINAL",0,0,"6111",,terminal_output +6553,7678049,"TERMINAL",0,0,"7222",,terminal_output +6554,7679061,"TERMINAL",0,0,"8333",,terminal_output +6555,7680099,"TERMINAL",0,0,"9444",,terminal_output +6556,7681190,"TERMINAL",0,0,"10555",,terminal_output +6557,7682266,"TERMINAL",0,0,"1666",,terminal_output +6558,7683261,"TERMINAL",0,0,"2777",,terminal_output +6559,7684260,"TERMINAL",0,0,"3888",,terminal_output +6560,7685292,"TERMINAL",0,0,"420202:00",,terminal_output +6561,7686365,"TERMINAL",0,0,"6111",,terminal_output +6562,7687370,"TERMINAL",0,0,"7222",,terminal_output +6563,7688409,"TERMINAL",0,0,"8333",,terminal_output +6564,7689460,"TERMINAL",0,0,"9444",,terminal_output +6565,7690597,"TERMINAL",0,0,"20555",,terminal_output +6566,7691631,"TERMINAL",0,0,"1666",,terminal_output +6567,7692564,"TERMINAL",0,0,"2777",,terminal_output +6568,7693667,"TERMINAL",0,0,"3888",,terminal_output +6569,7694699,"TERMINAL",0,0,"4999",,terminal_output +6570,7695714,"TERMINAL",0,0,"5303010",,terminal_output +6571,7696707,"TERMINAL",0,0,"6111",,terminal_output +6572,7697741,"TERMINAL",0,0,"7222",,terminal_output +6573,7698776,"TERMINAL",0,0,"8333",,terminal_output +6574,7699820,"TERMINAL",0,0,"9444",,terminal_output +6575,7700993,"TERMINAL",0,0,"30555",,terminal_output +6576,7702027,"TERMINAL",0,0,"1666",,terminal_output +6577,7702951,"TERMINAL",0,0,"2777",,terminal_output +6578,7703972,"TERMINAL",0,0,"3888",,terminal_output +6579,7705016,"TERMINAL",0,0,"4999",,terminal_output +6580,7706054,"TERMINAL",0,0,"5404020",,terminal_output +6581,7707192,"TERMINAL",0,0,"6111",,terminal_output +6582,7708222,"TERMINAL",0,0,"7222",,terminal_output +6583,7709917,"TERMINAL",0,0,"8444",,terminal_output +6584,7710929,"TERMINAL",0,0,"40555",,terminal_output +6585,7711997,"TERMINAL",0,0,"1666",,terminal_output +6586,7712999,"TERMINAL",0,0,"2777",,terminal_output +6587,7714045,"TERMINAL",0,0,"3888",,terminal_output +6588,7715076,"TERMINAL",0,0,"4999",,terminal_output +6589,7716120,"TERMINAL",0,0,"5505030",,terminal_output +6590,7717167,"TERMINAL",0,0,"6111",,terminal_output +6591,7718239,"TERMINAL",0,0,"7222",,terminal_output +6592,7719241,"TERMINAL",0,0,"8333",,terminal_output +6593,7720299,"TERMINAL",0,0,"9555",,terminal_output +6594,7721358,"TERMINAL",0,0,"51666",,terminal_output +6595,7722411,"TERMINAL",0,0,"2777",,terminal_output +6596,7723435,"TERMINAL",0,0,"3888",,terminal_output +6597,7724475,"TERMINAL",0,0,"4999",,terminal_output +6598,7725513,"TERMINAL",0,0,"59:009:0040",,terminal_output +6599,7726546,"TERMINAL",0,0,"6111",,terminal_output +6600,7727582,"TERMINAL",0,0,"7222",,terminal_output +6601,7728690,"TERMINAL",0,0,"8333",,terminal_output +6602,7729697,"TERMINAL",0,0,"9444",,terminal_output +6603,7730790,"TERMINAL",0,0,"5:00555",,terminal_output +6604,7731746,"TERMINAL",0,0,"1666",,terminal_output +6605,7732780,"TERMINAL",0,0,"2777",,terminal_output +6606,7733843,"TERMINAL",0,0,"3888",,terminal_output +6607,7734947,"TERMINAL",0,0,"4999",,terminal_output +6608,7735913,"TERMINAL",0,0,"5101050",,terminal_output +6609,7736984,"TERMINAL",0,0,"6111",,terminal_output +6610,7738122,"TERMINAL",0,0,"7222",,terminal_output +6611,7739141,"TERMINAL",0,0,"8333",,terminal_output +6612,7740062,"TERMINAL",0,0,"9444",,terminal_output +6613,7741188,"TERMINAL",0,0,"10555",,terminal_output +6614,7742268,"TERMINAL",0,0,"1666",,terminal_output +6615,7743247,"TERMINAL",0,0,"2777",,terminal_output +6616,7744279,"TERMINAL",0,0,"3888",,terminal_output +6617,7745284,"TERMINAL",0,0,"420203:00",,terminal_output +6618,7746313,"TERMINAL",0,0,"6111",,terminal_output +6619,7747351,"TERMINAL",0,0,"7222",,terminal_output +6620,7748395,"TERMINAL",0,0,"8333",,terminal_output +6621,7748870,"train_lam.py",0,0,"",python,tab +6622,7748871,"train_lam.py",17184,0,"",python,selection_mouse +6623,7749296,"train_lam.py",17239,0,"",python,selection_mouse +6624,7749429,"TERMINAL",0,0,"9444",,terminal_output +6625,7749696,"train_lam.py",17108,0,"",python,selection_mouse +6626,7750380,"train_lam.py",17132,0,"",python,selection_mouse +6627,7750490,"TERMINAL",0,0,"20555",,terminal_output +6628,7750989,"train_lam.py",17144,0,"",python,selection_mouse +6629,7751548,"TERMINAL",0,0,"1666",,terminal_output +6630,7751580,"train_lam.py",17127,0,"",python,selection_mouse +6631,7751797,"train_lam.py",17118,20," ",python,selection_mouse +6632,7752540,"TERMINAL",0,0,"2777",,terminal_output +6633,7752599,"train_lam.py",17121,0,"",python,selection_mouse +6634,7752809,"train_lam.py",17118,20," ",python,selection_mouse +6635,7753392,"train_lam.py",17043,0,"",python,selection_mouse +6636,7753525,"train_lam.py",17035,28," ",python,selection_mouse +6637,7753627,"TERMINAL",0,0,"3888",,terminal_output +6638,7754202,"train_lam.py",17099,0,"",python,selection_mouse +6639,7754627,"TERMINAL",0,0,"4999",,terminal_output +6640,7755667,"TERMINAL",0,0,"5303010",,terminal_output +6641,7756793,"TERMINAL",0,0,"6111",,terminal_output +6642,7757759,"TERMINAL",0,0,"7222",,terminal_output +6643,7758802,"TERMINAL",0,0,"8333",,terminal_output +6644,7759845,"TERMINAL",0,0,"9444",,terminal_output +6645,7760887,"TERMINAL",0,0,"30555",,terminal_output +6646,7761976,"TERMINAL",0,0,"1666",,terminal_output +6647,7762953,"TERMINAL",0,0,"2777",,terminal_output +6648,7763990,"TERMINAL",0,0,"3888",,terminal_output +6649,7765114,"TERMINAL",0,0,"4999",,terminal_output +6650,7766080,"TERMINAL",0,0,"5404020",,terminal_output +6651,7767191,"TERMINAL",0,0,"6111",,terminal_output +6652,7768157,"TERMINAL",0,0,"7222",,terminal_output +6653,7769196,"TERMINAL",0,0,"8333",,terminal_output +6654,7770286,"TERMINAL",0,0,"9444",,terminal_output +6655,7771291,"TERMINAL",0,0,"40666",,terminal_output +6656,7771555,"train_tokenizer.py",0,0,"",python,tab +6657,7772916,"TERMINAL",0,0,"2777",,terminal_output +6658,7773227,"train_tokenizer.py",0,0,"",python,tab +6659,7773451,"TERMINAL",0,0,"3888",,terminal_output +6660,7774413,"TERMINAL",0,0,"4999",,terminal_output +6661,7774854,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +6662,7775652,"TERMINAL",0,0,"5505030",,terminal_output +6663,7776530,"TERMINAL",0,0,"6111",,terminal_output +6664,7777557,"TERMINAL",0,0,"7222",,terminal_output +6665,7778710,"train_tokenizer.py",0,0,"",python,tab +6666,7778982,"TERMINAL",0,0,"8333",,terminal_output +6667,7779744,"TERMINAL",0,0,"9444",,terminal_output +6668,7780779,"TERMINAL",0,0,"50555",,terminal_output +6669,7781836,"TERMINAL",0,0,"1666",,terminal_output +6670,7782880,"TERMINAL",0,0,"2777",,terminal_output +6671,7783917,"TERMINAL",0,0,"3888",,terminal_output +6672,7784954,"TERMINAL",0,0,"4999",,terminal_output +6673,7786047,"TERMINAL",0,0,"520:0020:0040",,terminal_output +6674,7786816,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +6675,7787160,"TERMINAL",0,0,"6111",,terminal_output +6676,7788112,"TERMINAL",0,0,"7222",,terminal_output +6677,7789003,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(\n model: Genie, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n """"""Compute masked dynamics loss""""""\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n model.train()\n outputs = model(inputs, training=True)\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@nnx.jit\ndef train_step(\n model: Genie, optimizer: nnx.Optimizer, inputs: dict\n) -> tuple[jax.Array, jax.Array, dict]:\n """"""Update state and compute metrics""""""\n\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(model)\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(genie, tx)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n optimizer = restore_genie_components(optimizer, replicated_sharding, rng, args)\n # NOTE: We have to remove the (unused) tokenizer vq dropout due flax.nnx lazily initializing modules.\n # Specifically, the first dynamics model checkpoint will contain the vq dropout module,\n # but the first full restore will fail due to nnx not initializing the module when\n # dropout is set to 0.0.\n del optimizer.model.tokenizer.vq.drop\n\n # --- TRAIN LOOP ---\n dataloader = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +6678,7789187,"TERMINAL",0,0,"8333",,terminal_output +6679,7790188,"TERMINAL",0,0,"9444",,terminal_output +6680,7791238,"TERMINAL",0,0,"6:00555",,terminal_output +6681,7792296,"TERMINAL",0,0,"1777",,terminal_output +6682,7793390,"TERMINAL",0,0,"3888",,terminal_output +6683,7794455,"TERMINAL",0,0,"4999",,terminal_output +6684,7795404,"TERMINAL",0,0,"5101050",,terminal_output +6685,7796146,"train_tokenizer.py",0,0,"",python,tab +6686,7796446,"TERMINAL",0,0,"6111",,terminal_output +6687,7796796,"train_tokenizer.py",16151,12,"val_data_dir",python,selection_command +6688,7797488,"train_tokenizer.py",1915,12,"val_data_dir",python,selection_command +6689,7797514,"TERMINAL",0,0,"7222",,terminal_output +6690,7798550,"TERMINAL",0,0,"8333",,terminal_output +6691,7798787,"train_tokenizer.py",2015,0,"",python,selection_mouse +6692,7799362,"train_tokenizer.py",1992,0,"",python,selection_mouse +6693,7799530,"train_tokenizer.py",1991,0,"",python,selection_command +6694,7799618,"TERMINAL",0,0,"9444",,terminal_output +6695,7800300,"train_tokenizer.py",1969,23," val_steps: int = 50",python,selection_command +6696,7800499,"train_tokenizer.py",1938,54," val_interval: int = 20_000\n val_steps: int = 50",python,selection_command +6697,7800634,"train_tokenizer.py",1911,81," val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50",python,selection_command +6698,7800634,"TERMINAL",0,0,"10555",,terminal_output +6699,7800956,"train_tokenizer.py",1911,0,"",python,selection_command +6700,7801669,"TERMINAL",0,0,"1666",,terminal_output +6701,7802764,"TERMINAL",0,0,"2777",,terminal_output +6702,7804008,"train_dynamics.py",0,0,"",python,tab +6703,7804009,"train_dynamics.py",2401,0,"",python,selection_mouse +6704,7804022,"TERMINAL",0,0,"3888",,terminal_output +6705,7804807,"TERMINAL",0,0,"4999",,terminal_output +6706,7805713,"train_dynamics.py",2418,0,"\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50",python,content +6707,7805745,"train_dynamics.py",2423,0,"",python,selection_command +6708,7805879,"TERMINAL",0,0,"520204:00",,terminal_output +6709,7806905,"TERMINAL",0,0,"6111",,terminal_output +6710,7807947,"TERMINAL",0,0,"7222",,terminal_output +6711,7808996,"TERMINAL",0,0,"8333",,terminal_output +6712,7810015,"TERMINAL",0,0,"9444",,terminal_output +6713,7811059,"TERMINAL",0,0,"20555",,terminal_output +6714,7812102,"TERMINAL",0,0,"1666",,terminal_output +6715,7813216,"train_tokenizer.py",0,0,"",python,tab +6716,7813216,"train_tokenizer.py",2426,0,"",python,selection_mouse +6717,7813297,"TERMINAL",0,0,"2777",,terminal_output +6718,7814239,"TERMINAL",0,0,"3888",,terminal_output +6719,7814757,"train_tokenizer.py",9444,12,"val_data_dir",python,selection_command +6720,7815214,"TERMINAL",0,0,"4999",,terminal_output +6721,7815877,"train_tokenizer.py",9442,0,"",python,selection_mouse +6722,7816239,"TERMINAL",0,0,"5303010",,terminal_output +6723,7816819,"train_dynamics.py",0,0,"",python,tab +6724,7816819,"train_dynamics.py",2727,0,"",python,selection_mouse +6725,7817291,"TERMINAL",0,0,"6222",,terminal_output +6726,7818375,"TERMINAL",0,0,"8333",,terminal_output +6727,7819502,"TERMINAL",0,0,"9444",,terminal_output +6728,7820406,"TERMINAL",0,0,"30555",,terminal_output +6729,7821457,"TERMINAL",0,0,"1666",,terminal_output +6730,7822540,"TERMINAL",0,0,"2777",,terminal_output +6731,7823539,"TERMINAL",0,0,"3888",,terminal_output +6732,7824600,"TERMINAL",0,0,"4999",,terminal_output +6733,7825672,"TERMINAL",0,0,"5404020",,terminal_output +6734,7826829,"TERMINAL",0,0,"6111",,terminal_output +6735,7827718,"TERMINAL",0,0,"7222",,terminal_output +6736,7828773,"TERMINAL",0,0,"8333",,terminal_output +6737,7829859,"TERMINAL",0,0,"9444",,terminal_output +6738,7831096,"TERMINAL",0,0,"40555",,terminal_output +6739,7831726,"train_tokenizer.py",0,0,"",python,tab +6740,7831727,"train_tokenizer.py",9875,0,"",python,selection_mouse +6741,7832051,"train_tokenizer.py",9874,1,"\n",python,selection_mouse +6742,7832052,"train_tokenizer.py",9739,136," grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6743,7832052,"train_tokenizer.py",9702,173," ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6744,7832053,"train_tokenizer.py",9671,204," handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6745,7832053,"train_tokenizer.py",9572,303," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6746,7832143,"train_tokenizer.py",9874,1,"\n",python,selection_command +6747,7832144,"train_tokenizer.py",9490,385," ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6748,7832144,"train_tokenizer.py",9427,448," )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6749,7832144,"train_tokenizer.py",9266,609," ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6750,7832145,"train_tokenizer.py",9234,641," )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6751,7832146,"train_tokenizer.py",9152,723," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6752,7832190,"train_tokenizer.py",9111,764," grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6753,7832231,"train_tokenizer.py",9077,798," ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6754,7832276,"TERMINAL",0,0,"1666",,terminal_output +6755,7832326,"train_tokenizer.py",9051,824," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,selection_mouse +6756,7833113,"TERMINAL",0,0,"2777",,terminal_output +6757,7834250,"TERMINAL",0,0,"3888",,terminal_output +6758,7835271,"TERMINAL",0,0,"4999",,terminal_output +6759,7836313,"TERMINAL",0,0,"5505030",,terminal_output +6760,7837343,"TERMINAL",0,0,"6111",,terminal_output +6761,7838367,"TERMINAL",0,0,"7333",,terminal_output +6762,7839370,"TERMINAL",0,0,"9444",,terminal_output +6763,7840496,"TERMINAL",0,0,"50555",,terminal_output +6764,7841415,"TERMINAL",0,0,"1666",,terminal_output +6765,7842080,"train_dynamics.py",0,0,"",python,tab +6766,7842081,"train_dynamics.py",9563,0,"",python,selection_mouse +6767,7842121,"train_dynamics.py",9562,0,"",python,selection_command +6768,7842143,"train_dynamics.py",9562,1,")",python,selection_mouse +6769,7842171,"train_dynamics.py",9563,0,"",python,selection_command +6770,7842196,"train_dynamics.py",9480,83," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6771,7842259,"train_dynamics.py",9479,84," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6772,7842260,"train_dynamics.py",9478,85," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6773,7842260,"train_dynamics.py",9434,129," grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6774,7842308,"train_dynamics.py",9433,130," grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6775,7842349,"train_dynamics.py",9432,131," grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6776,7842362,"train_dynamics.py",9404,159," ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6777,7842446,"train_dynamics.py",9378,185," handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6778,7842489,"TERMINAL",0,0,"2777",,terminal_output +6779,7842531,"train_dynamics.py",9372,191," )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6780,7842600,"train_dynamics.py",9290,273," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6781,7842652,"train_dynamics.py",9249,314," grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6782,7842663,"train_dynamics.py",9221,342," ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6783,7842724,"train_dynamics.py",9195,368," handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6784,7842816,"train_dynamics.py",9189,374," )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6785,7842987,"train_dynamics.py",9105,458," ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6786,7843036,"train_dynamics.py",9079,484," handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6787,7843479,"TERMINAL",0,0,"3888",,terminal_output +6788,7843607,"train_dynamics.py",9105,458," ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6789,7843903,"train_dynamics.py",9189,374," )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6790,7844514,"TERMINAL",0,0,"4999",,terminal_output +6791,7845555,"TERMINAL",0,0,"51:001:0040",,terminal_output +6792,7846466,"train_dynamics.py",9261,0,"",python,selection_mouse +6793,7846631,"TERMINAL",0,0,"6111",,terminal_output +6794,7847367,"train_dynamics.py",9563,0,"",python,selection_mouse +6795,7847369,"train_dynamics.py",9562,0,"",python,selection_command +6796,7847591,"train_dynamics.py",9562,1,")",python,selection_mouse +6797,7847592,"train_dynamics.py",9563,0,"",python,selection_command +6798,7847786,"train_dynamics.py",9484,79,"cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6799,7847787,"train_dynamics.py",9438,125," grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6800,7847787,"train_dynamics.py",9409,154," ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6801,7847787,"train_dynamics.py",9408,155," ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6802,7847788,"train_dynamics.py",9382,181,"handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6803,7847788,"train_dynamics.py",9381,182," handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6804,7847788,"train_dynamics.py",9375,188," )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6805,7847788,"train_dynamics.py",9374,189," )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6806,7847789,"train_dynamics.py",9292,271," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6807,7847829,"train_dynamics.py",9291,272," cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6808,7847857,"train_dynamics.py",9250,313," grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6809,7847886,"train_dynamics.py",9249,314," grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6810,7847912,"TERMINAL",0,0,"7222",,terminal_output +6811,7847963,"train_dynamics.py",9221,342," ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6812,7848014,"train_dynamics.py",9195,368," handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )",python,selection_mouse +6813,7848518,"train_dynamics.py",9195,368,"",python,content +6814,7848738,"TERMINAL",0,0,"8333",,terminal_output +6815,7849725,"train_dynamics.py",9195,0," handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n",python,content +6816,7849748,"TERMINAL",0,0,"9444",,terminal_output +6817,7850781,"TERMINAL",0,0,"7:00555",,terminal_output +6818,7851877,"TERMINAL",0,0,"1666",,terminal_output +6819,7852453,"train_dynamics.py",9667,0,"",python,selection_mouse +6820,7852850,"TERMINAL",0,0,"2777",,terminal_output +6821,7853926,"TERMINAL",0,0,"3888",,terminal_output +6822,7854956,"TERMINAL",0,0,"4999",,terminal_output +6823,7855986,"TERMINAL",0,0,"5101050",,terminal_output +6824,7856789,"train_tokenizer.py",0,0,"",python,tab +6825,7856789,"train_tokenizer.py",10144,0,"",python,selection_mouse +6826,7856829,"train_tokenizer.py",10143,0,"",python,selection_command +6827,7857063,"TERMINAL",0,0,"6111",,terminal_output +6828,7858353,"train_tokenizer.py",10566,12,"val_data_dir",python,selection_command +6829,7858394,"TERMINAL",0,0,"7333",,terminal_output +6830,7859415,"TERMINAL",0,0,"9444",,terminal_output +6831,7860473,"TERMINAL",0,0,"10555",,terminal_output +6832,7861479,"TERMINAL",0,0,"1666",,terminal_output +6833,7862512,"TERMINAL",0,0,"2777",,terminal_output +6834,7863656,"TERMINAL",0,0,"3888",,terminal_output +6835,7864586,"TERMINAL",0,0,"4999",,terminal_output +6836,7864763,"train_tokenizer.py",10593,0,"",python,selection_mouse +6837,7864927,"train_tokenizer.py",10588,12,"val_iterator",python,selection_mouse +6838,7865574,"train_tokenizer.py",10704,0,"",python,selection_mouse +6839,7865663,"TERMINAL",0,0,"520205:00",,terminal_output +6840,7865860,"train_tokenizer.py",10584,120," val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +6841,7865921,"train_tokenizer.py",10583,121," val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +6842,7865922,"train_tokenizer.py",10582,122," val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +6843,7865922,"train_tokenizer.py",10555,149," if args.val_data_dir:\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +6844,7865964,"train_tokenizer.py",10554,150," if args.val_data_dir:\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +6845,7866104,"train_tokenizer.py",10436,268," train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n if args.val_data_dir:\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,selection_mouse +6846,7866712,"TERMINAL",0,0,"6111",,terminal_output +6847,7867736,"TERMINAL",0,0,"7222",,terminal_output +6848,7868888,"TERMINAL",0,0,"8333",,terminal_output +6849,7869845,"TERMINAL",0,0,"9444",,terminal_output +6850,7871074,"train_dynamics.py",0,0,"",python,tab +6851,7871074,"train_dynamics.py",11220,0,"",python,selection_mouse +6852,7871075,"train_dynamics.py",11147,73,"ain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6853,7871075,"train_dynamics.py",11085,135,"nitial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6854,7871155,"train_dynamics.py",11053,167," seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6855,7871156,"train_dynamics.py",11019,201," prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6856,7871156,"train_dynamics.py",10995,225," num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6857,7871157,"TERMINAL",0,0,"20555",,terminal_output +6858,7871197,"train_dynamics.py",10973,247," *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6859,7871203,"train_dynamics.py",10948,272," args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6860,7871223,"train_dynamics.py",10883,337," # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6861,7871285,"train_dynamics.py",10824,396," # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6862,7871286,"train_dynamics.py",10802,418," args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6863,7871320,"train_dynamics.py",10774,446," array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6864,7871335,"train_dynamics.py",10735,485," grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6865,7871385,"train_dynamics.py",10729,491," ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6866,7871406,"train_dynamics.py",10690,530," if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6867,7871438,"train_dynamics.py",10647,573," for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6868,7871463,"train_dynamics.py",10608,612," os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6869,7871825,"train_dynamics.py",10581,639," array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n",python,selection_mouse +6870,7871956,"TERMINAL",0,0,"1666",,terminal_output +6871,7872418,"train_dynamics.py",10581,639,"´",python,content +6872,7872954,"TERMINAL",0,0,"2777",,terminal_output +6873,7873003,"train_dynamics.py",10581,1,"",python,content +6874,7873005,"train_dynamics.py",10581,0,"",python,selection_keyboard +6875,7873603,"train_dynamics.py",10581,0," train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n if args.val_data_dir:\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n",python,content +6876,7874024,"TERMINAL",0,0,"3888",,terminal_output +6877,7875065,"TERMINAL",0,0,"4999",,terminal_output +6878,7876103,"TERMINAL",0,0,"5303010",,terminal_output +6879,7877115,"TERMINAL",0,0,"6111",,terminal_output +6880,7878147,"TERMINAL",0,0,"7222",,terminal_output +6881,7879239,"TERMINAL",0,0,"8333",,terminal_output +6882,7880286,"TERMINAL",0,0,"9444",,terminal_output +6883,7880811,"train_tokenizer.py",0,0,"",python,tab +6884,7880811,"train_tokenizer.py",11712,0,"",python,selection_mouse +6885,7880944,"train_tokenizer.py",11711,0,"",python,selection_command +6886,7880945,"train_tokenizer.py",11711,1,")",python,selection_mouse +6887,7880945,"train_tokenizer.py",11685,26,"args=restore_args\n ",python,selection_mouse +6888,7880945,"train_tokenizer.py",11683,28," args=restore_args\n ",python,selection_mouse +6889,7880945,"train_tokenizer.py",11682,29," args=restore_args\n ",python,selection_mouse +6890,7880946,"train_tokenizer.py",11635,76," checkpoint_manager.latest_step(),\n args=restore_args\n ",python,selection_mouse +6891,7881051,"train_tokenizer.py",11712,0,"",python,selection_command +6892,7881052,"train_tokenizer.py",11634,78," checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6893,7881052,"train_tokenizer.py",11633,79," checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6894,7881053,"train_tokenizer.py",11585,127," restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6895,7881090,"train_tokenizer.py",11584,128," restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6896,7881223,"train_tokenizer.py",11583,129," restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6897,7881224,"train_tokenizer.py",11565,147," )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6898,7881224,"train_tokenizer.py",11459,253," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6899,7881224,"train_tokenizer.py",11360,352," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6900,7881224,"train_tokenizer.py",11279,433," )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6901,7881225,"train_tokenizer.py",11172,540," val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6902,7881225,"train_tokenizer.py",11067,645," dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6903,7881286,"train_tokenizer.py",10969,743," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6904,7881377,"train_tokenizer.py",10922,790," restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6905,7881465,"train_tokenizer.py",10892,820," if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6906,7881517,"train_tokenizer.py",10827,885," abstract_optimizer_state = nnx.state(abstract_optimizer)\n if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6907,7881519,"TERMINAL",0,0,"30555",,terminal_output +6908,7881930,"train_tokenizer.py",10892,820," if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,selection_mouse +6909,7882339,"TERMINAL",0,0,"1777",,terminal_output +6910,7883597,"TERMINAL",0,0,"3888",,terminal_output +6911,7884372,"TERMINAL",0,0,"4999",,terminal_output +6912,7885433,"TERMINAL",0,0,"5404020",,terminal_output +6913,7886475,"TERMINAL",0,0,"6111",,terminal_output +6914,7887596,"TERMINAL",0,0,"7222",,terminal_output +6915,7888646,"TERMINAL",0,0,"8333",,terminal_output +6916,7889389,"train_dynamics.py",0,0,"",python,tab +6917,7889389,"train_dynamics.py",11386,0,"",python,selection_mouse +6918,7889539,"train_dynamics.py",11385,1,")",python,selection_mouse +6919,7889540,"train_dynamics.py",11384,2," )",python,selection_mouse +6920,7889540,"train_dynamics.py",11383,3," )",python,selection_mouse +6921,7889580,"train_dynamics.py",11382,4," )",python,selection_mouse +6922,7889618,"train_dynamics.py",11367,19," ),\n )",python,selection_mouse +6923,7889619,"train_dynamics.py",11366,20," ),\n )",python,selection_mouse +6924,7889624,"TERMINAL",0,0,"9444",,terminal_output +6925,7889663,"train_dynamics.py",11365,21," ),\n )",python,selection_mouse +6926,7889744,"train_dynamics.py",11263,123," dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )",python,selection_mouse +6927,7889745,"train_dynamics.py",11168,218," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )",python,selection_mouse +6928,7889746,"train_dynamics.py",11167,219," model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )",python,selection_mouse +6929,7889787,"train_dynamics.py",11130,256," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )",python,selection_mouse +6930,7889787,"train_dynamics.py",11084,302," checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )",python,selection_mouse +6931,7889834,"train_dynamics.py",11037,349," restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )",python,selection_mouse +6932,7889866,"train_dynamics.py",10972,414," abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )",python,selection_mouse +6933,7889955,"train_dynamics.py",10909,477," abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )",python,selection_mouse +6934,7890593,"TERMINAL",0,0,"40555",,terminal_output +6935,7891330,"train_dynamics.py",10972,414," abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )",python,selection_mouse +6936,7891653,"TERMINAL",0,0,"1666",,terminal_output +6937,7892230,"train_dynamics.py",11037,349," restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )",python,selection_mouse +6938,7892705,"TERMINAL",0,0,"2777",,terminal_output +6939,7893163,"train_dynamics.py",11037,349,"",python,content +6940,7893723,"TERMINAL",0,0,"3888",,terminal_output +6941,7893785,"train_dynamics.py",11037,0," if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )",python,content +6942,7894770,"TERMINAL",0,0,"4999",,terminal_output +6943,7895930,"TERMINAL",0,0,"5505030",,terminal_output +6944,7897291,"TERMINAL",0,0,"6111",,terminal_output +6945,7898461,"TERMINAL",0,0,"7222",,terminal_output +6946,7899429,"train_tokenizer.py",0,0,"",python,tab +6947,7899429,"train_tokenizer.py",11977,0,"",python,selection_mouse +6948,7899619,"train_tokenizer.py",11976,0,"",python,selection_command +6949,7899619,"train_tokenizer.py",11974,2,"e""",python,selection_mouse +6950,7899620,"train_tokenizer.py",11971,5,"tate""",python,selection_mouse +6951,7899620,"train_tokenizer.py",11969,7,"_state""",python,selection_mouse +6952,7899620,"train_tokenizer.py",11965,11,"ader_state""",python,selection_mouse +6953,7899620,"train_tokenizer.py",11963,13,"loader_state""",python,selection_mouse +6954,7899620,"train_tokenizer.py",11961,15,"taloader_state""",python,selection_mouse +6955,7899621,"train_tokenizer.py",11959,17,"dataloader_state""",python,selection_mouse +6956,7899621,"TERMINAL",0,0,"8333",,terminal_output +6957,7899676,"train_tokenizer.py",11974,3,"e""]",python,selection_command +6958,7899677,"train_tokenizer.py",11917,60,"\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6959,7899754,"train_tokenizer.py",11916,61,":\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6960,7899841,"train_tokenizer.py",11912,65,"_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6961,7899880,"train_tokenizer.py",11909,68,"ata_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6962,7899880,"train_tokenizer.py",11907,70,"_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6963,7899881,"train_tokenizer.py",11905,72,"al_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6964,7899882,"train_tokenizer.py",11903,74,".val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6965,7899919,"train_tokenizer.py",11901,76,"gs.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6966,7899962,"train_tokenizer.py",11899,78,"args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6967,7899963,"train_tokenizer.py",11898,79," args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6968,7900005,"train_tokenizer.py",11897,80,"f args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6969,7900012,"train_tokenizer.py",11895,82," if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6970,7900020,"train_tokenizer.py",11894,83," if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6971,7900046,"train_tokenizer.py",11892,85," if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6972,7900047,"TERMINAL",0,0,"9444",,terminal_output +6973,7900088,"train_tokenizer.py",11890,87," if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6974,7900123,"train_tokenizer.py",11889,88," if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6975,7900130,"train_tokenizer.py",11888,89," if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6976,7901200,"TERMINAL",0,0,"50555",,terminal_output +6977,7902030,"TERMINAL",0,0,"1666",,terminal_output +6978,7903051,"TERMINAL",0,0,"2777",,terminal_output +6979,7904142,"TERMINAL",0,0,"3888",,terminal_output +6980,7905231,"TERMINAL",0,0,"4999",,terminal_output +6981,7905703,"train_tokenizer.py",12024,0,"",python,selection_mouse +6982,7906169,"TERMINAL",0,0,"52:002:0040",,terminal_output +6983,7906478,"train_tokenizer.py",11977,0,"",python,selection_mouse +6984,7906493,"train_tokenizer.py",11976,0,"",python,selection_command +6985,7906653,"train_tokenizer.py",11976,1,"]",python,selection_mouse +6986,7906654,"train_tokenizer.py",11977,0,"",python,selection_command +6987,7906880,"train_tokenizer.py",11971,6,"tate""]",python,selection_mouse +6988,7906880,"train_tokenizer.py",11959,18,"dataloader_state""]",python,selection_mouse +6989,7906881,"train_tokenizer.py",11951,26,"ed[""val_dataloader_state""]",python,selection_mouse +6990,7906881,"train_tokenizer.py",11917,60,"\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6991,7906881,"train_tokenizer.py",11908,69,"data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6992,7906881,"train_tokenizer.py",11906,71,"l_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6993,7906882,"train_tokenizer.py",11904,73,"val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6994,7906934,"train_tokenizer.py",11902,75,"s.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6995,7906935,"train_tokenizer.py",11901,76,"gs.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6996,7906935,"train_tokenizer.py",11899,78,"args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6997,7906970,"train_tokenizer.py",11897,80,"f args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6998,7906996,"train_tokenizer.py",11893,84," if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +6999,7907029,"train_tokenizer.py",11891,86," if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +7000,7907081,"train_tokenizer.py",11830,147," train_iterator = restored[""train_dataloader_state""]\n if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +7001,7907111,"train_tokenizer.py",11828,149," train_iterator = restored[""train_dataloader_state""]\n if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,selection_mouse +7002,7907205,"TERMINAL",0,0,"6111",,terminal_output +7003,7908401,"TERMINAL",0,0,"7222",,terminal_output +7004,7909331,"TERMINAL",0,0,"8444",,terminal_output +7005,7910328,"TERMINAL",0,0,"8:00555",,terminal_output +7006,7911374,"TERMINAL",0,0,"1666",,terminal_output +7007,7911732,"train_dynamics.py",0,0,"",python,tab +7008,7911733,"train_dynamics.py",12026,0,"",python,selection_mouse +7009,7912382,"TERMINAL",0,0,"2777",,terminal_output +7010,7913280,"train_dynamics.py",12026,0,"o",python,content +7011,7913281,"train_dynamics.py",12027,0,"",python,selection_keyboard +7012,7913504,"TERMINAL",0,0,"3888",,terminal_output +7013,7913967,"train_dynamics.py",12026,1,"",python,content +7014,7914412,"train_dynamics.py",12024,2,"",python,content +7015,7914631,"TERMINAL",0,0,"4999",,terminal_output +7016,7914634,"train_dynamics.py",12008,16,"",python,content +7017,7914809,"train_dynamics.py",12006,2,"",python,content +7018,7915061,"train_dynamics.py",11998,8,"",python,content +7019,7915226,"train_dynamics.py",11996,2,"",python,content +7020,7915499,"train_dynamics.py",11981,15,"",python,content +7021,7915627,"TERMINAL",0,0,"5101050",,terminal_output +7022,7916044,"train_dynamics.py",11977,4,"",python,content +7023,7916349,"train_dynamics.py",11973,4,"",python,content +7024,7916552,"TERMINAL",0,0,"6111",,terminal_output +7025,7916812,"train_dynamics.py",11973,0," train_iterator = restored[""train_dataloader_state""]\n if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]",python,content +7026,7917601,"TERMINAL",0,0,"7222",,terminal_output +7027,7918717,"TERMINAL",0,0,"8333",,terminal_output +7028,7919819,"TERMINAL",0,0,"9444",,terminal_output +7029,7920110,"train_dynamics.py",11992,0,"",python,selection_mouse +7030,7921106,"TERMINAL",0,0,"10555",,terminal_output +7031,7921823,"TERMINAL",0,0,"1666",,terminal_output +7032,7922862,"TERMINAL",0,0,"2777",,terminal_output +7033,7923852,"TERMINAL",0,0,"3888",,terminal_output +7034,7925050,"TERMINAL",0,0,"4999",,terminal_output +7035,7925919,"TERMINAL",0,0,"520206:00",,terminal_output +7036,7926349,"train_tokenizer.py",0,0,"",python,tab +7037,7926350,"train_tokenizer.py",12442,0,"",python,selection_mouse +7038,7926524,"train_tokenizer.py",12441,1,")",python,selection_mouse +7039,7926524,"train_tokenizer.py",12440,2," )",python,selection_mouse +7040,7926524,"train_tokenizer.py",12439,3," )",python,selection_mouse +7041,7926525,"train_tokenizer.py",12441,0,"",python,selection_command +7042,7926525,"train_tokenizer.py",12439,2," ",python,selection_mouse +7043,7926525,"train_tokenizer.py",12438,3," ",python,selection_mouse +7044,7926572,"train_tokenizer.py",12441,1,")",python,selection_command +7045,7926573,"train_tokenizer.py",12437,5," )",python,selection_mouse +7046,7926614,"train_tokenizer.py",12399,43," for elem in val_iterator\n )",python,selection_mouse +7047,7926636,"train_tokenizer.py",12398,44," for elem in val_iterator\n )",python,selection_mouse +7048,7926654,"train_tokenizer.py",12323,119," jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +7049,7926679,"train_tokenizer.py",12295,147," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +7050,7926703,"train_tokenizer.py",12269,173," if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +7051,7926729,"train_tokenizer.py",12263,179," )\n if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +7052,7927002,"train_tokenizer.py",12228,214," for elem in train_iterator\n )\n if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +7053,7927003,"train_tokenizer.py",12158,284," jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +7054,7927003,"train_tokenizer.py",12133,309," dataloader_train = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,selection_mouse +7055,7927261,"TERMINAL",0,0,"6111",,terminal_output +7056,7928162,"TERMINAL",0,0,"7222",,terminal_output +7057,7929030,"TERMINAL",0,0,"8333",,terminal_output +7058,7930088,"TERMINAL",0,0,"9444",,terminal_output +7059,7931238,"TERMINAL",0,0,"20555",,terminal_output +7060,7932172,"TERMINAL",0,0,"1666",,terminal_output +7061,7933212,"TERMINAL",0,0,"2777",,terminal_output +7062,7933870,"train_dynamics.py",0,0,"",python,tab +7063,7933870,"train_dynamics.py",12931,0,"",python,selection_mouse +7064,7933871,"train_dynamics.py",12895,36," for elem in grain_iterator\n )",python,selection_mouse +7065,7933871,"train_dynamics.py",12892,39," for elem in grain_iterator\n )",python,selection_mouse +7066,7933871,"train_dynamics.py",12891,40," for elem in grain_iterator\n )",python,selection_mouse +7067,7933910,"train_dynamics.py",12821,110," jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )",python,selection_mouse +7068,7934287,"TERMINAL",0,0,"3888",,terminal_output +7069,7934359,"train_dynamics.py",12802,129," dataloader = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )",python,selection_mouse +7070,7934912,"train_dynamics.py",12802,129,"",python,content +7071,7935272,"train_dynamics.py",12802,0," dataloader_train = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )",python,content +7072,7935352,"TERMINAL",0,0,"4303010",,terminal_output +7073,7936366,"TERMINAL",0,0,"6111",,terminal_output +7074,7937431,"TERMINAL",0,0,"7222",,terminal_output +7075,7938440,"TERMINAL",0,0,"8333",,terminal_output +7076,7939443,"TERMINAL",0,0,"9444",,terminal_output +7077,7939579,"train_dynamics.py",13229,0,"",python,selection_mouse +7078,7940438,"train_dynamics.py",13229,0,"_",python,content +7079,7940439,"train_dynamics.py",13230,0,"",python,selection_keyboard +7080,7940530,"TERMINAL",0,0,"30555",,terminal_output +7081,7940714,"train_dynamics.py",13230,0,"t",python,content +7082,7940716,"train_dynamics.py",13231,0,"",python,selection_keyboard +7083,7940877,"train_dynamics.py",13231,0,"r",python,content +7084,7940878,"train_dynamics.py",13232,0,"",python,selection_keyboard +7085,7941084,"train_dynamics.py",13232,0,"a",python,content +7086,7941085,"train_dynamics.py",13233,0,"",python,selection_keyboard +7087,7941129,"train_dynamics.py",13233,0,"i",python,content +7088,7941130,"train_dynamics.py",13234,0,"",python,selection_keyboard +7089,7941180,"train_dynamics.py",13234,0,"n",python,content +7090,7941181,"train_dynamics.py",13235,0,"",python,selection_keyboard +7091,7941513,"TERMINAL",0,0,"1666",,terminal_output +7092,7942574,"TERMINAL",0,0,"2777",,terminal_output +7093,7943643,"TERMINAL",0,0,"3888",,terminal_output +7094,7944635,"TERMINAL",0,0,"4999",,terminal_output +7095,7945745,"TERMINAL",0,0,"5404020",,terminal_output +7096,7946750,"TERMINAL",0,0,"6111",,terminal_output +7097,7947764,"TERMINAL",0,0,"7222",,terminal_output +7098,7948787,"TERMINAL",0,0,"8333",,terminal_output +7099,7949430,"train_dynamics.py",13613,0,"",python,selection_mouse +7100,7949875,"TERMINAL",0,0,"9444",,terminal_output +7101,7950907,"TERMINAL",0,0,"40555",,terminal_output +7102,7952480,"TERMINAL",0,0,"1777",,terminal_output +7103,7953506,"TERMINAL",0,0,"3888",,terminal_output +7104,7954548,"TERMINAL",0,0,"4999",,terminal_output +7105,7955532,"TERMINAL",0,0,"5505030",,terminal_output +7106,7956569,"TERMINAL",0,0,"6111",,terminal_output +7107,7957599,"TERMINAL",0,0,"7222",,terminal_output +7108,7958639,"TERMINAL",0,0,"8333",,terminal_output +7109,7959689,"TERMINAL",0,0,"9444",,terminal_output +7110,7960764,"TERMINAL",0,0,"50555",,terminal_output +7111,7961797,"TERMINAL",0,0,"1666",,terminal_output +7112,7962799,"TERMINAL",0,0,"2777",,terminal_output +7113,7963940,"TERMINAL",0,0,"3888",,terminal_output +7114,7964896,"TERMINAL",0,0,"4999",,terminal_output +7115,7965950,"TERMINAL",0,0,"53:003:0040",,terminal_output +7116,7966364,"train_tokenizer.py",0,0,"",python,tab +7117,7966365,"train_tokenizer.py",13190,0,"",python,selection_mouse +7118,7966663,"train_tokenizer.py",13190,30," # --- Logging ---\n ",python,selection_mouse +7119,7966812,"train_tokenizer.py",13190,29," # --- Logging ---\n ",python,selection_mouse +7120,7966870,"train_tokenizer.py",13190,54," # --- Logging ---\n if args.log:\n ",python,selection_mouse +7121,7967000,"TERMINAL",0,0,"6111",,terminal_output +7122,7967995,"TERMINAL",0,0,"7222",,terminal_output +7123,7968399,"train_tokenizer.py",12841,0,"",python,selection_mouse +7124,7968569,"train_tokenizer.py",12835,12," ",python,selection_mouse +7125,7968771,"train_tokenizer.py",12835,44," # --- Validation loss ---\n ",python,selection_mouse +7126,7968771,"train_tokenizer.py",12835,112," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n ",python,selection_mouse +7127,7968841,"train_tokenizer.py",12835,172," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n ",python,selection_mouse +7128,7968841,"train_tokenizer.py",12835,283," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n ",python,selection_mouse +7129,7968881,"train_tokenizer.py",12835,343," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +7130,7968930,"train_tokenizer.py",12835,382," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n ",python,selection_mouse +7131,7968955,"train_tokenizer.py",12835,407," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n ",python,selection_mouse +7132,7969058,"TERMINAL",0,0,"8333",,terminal_output +7133,7969290,"train_tokenizer.py",12835,382," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n ",python,selection_mouse +7134,7969291,"train_tokenizer.py",12835,381," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n ",python,selection_mouse +7135,7969291,"train_tokenizer.py",12835,351," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n ",python,selection_mouse +7136,7969291,"train_tokenizer.py",12835,343," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n",python,selection_mouse +7137,7969292,"train_tokenizer.py",12835,170," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n ",python,selection_mouse +7138,7969292,"train_tokenizer.py",12835,110," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n ",python,selection_mouse +7139,7969339,"train_tokenizer.py",12835,41," # --- Validation loss ---\n ",python,selection_mouse +7140,7969340,"train_tokenizer.py",12835,40," # --- Validation loss ---\n ",python,selection_mouse +7141,7969380,"train_tokenizer.py",12835,12," ",python,selection_mouse +7142,7969523,"train_tokenizer.py",12835,169," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n ",python,selection_mouse +7143,7969569,"train_tokenizer.py",12835,593," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n ",python,selection_mouse +7144,7969603,"train_tokenizer.py",12835,933," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n ",python,selection_mouse +7145,7969649,"train_tokenizer.py",12835,1119," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n ",python,selection_mouse +7146,7969703,"train_tokenizer.py",12835,1201," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n ",python,selection_mouse +7147,7969911,"train_tokenizer.py",12835,1200," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n ",python,selection_mouse +7148,7969957,"train_tokenizer.py",12835,1255," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n ",python,selection_mouse +7149,7970004,"train_tokenizer.py",12835,1324," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n ",python,selection_mouse +7150,7970040,"train_tokenizer.py",12835,1347," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n ",python,selection_mouse +7151,7970093,"train_tokenizer.py",12835,1423," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n ",python,selection_mouse +7152,7970279,"TERMINAL",0,0,"9444",,terminal_output +7153,7970280,"train_tokenizer.py",12835,1567," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)",python,selection_mouse +7154,7970369,"train_tokenizer.py",12835,1514," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n ",python,selection_mouse +7155,7970369,"train_tokenizer.py",12835,1578," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n ",python,selection_mouse +7156,7970430,"train_tokenizer.py",12835,1823," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )",python,selection_mouse +7157,7970430,"train_tokenizer.py",12835,2058," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.",python,selection_mouse +7158,7970496,"train_tokenizer.py",12835,2293," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),",python,selection_mouse +7159,7970558,"train_tokenizer.py",12835,2561," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:",python,selection_mouse +7160,7970644,"train_tokenizer.py",12835,2888," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(",python,selection_mouse +7161,7970700,"train_tokenizer.py",12835,3059," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n ",python,selection_mouse +7162,7970743,"train_tokenizer.py",12835,3329," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:",python,selection_mouse +7163,7970804,"train_tokenizer.py",12835,3661," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),",python,selection_mouse +7164,7970834,"train_tokenizer.py",12835,3823," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ",python,selection_mouse +7165,7970929,"train_tokenizer.py",12835,3491," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n ",python,selection_mouse +7166,7971014,"train_tokenizer.py",12835,3138," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)\n #",python,selection_mouse +7167,7971074,"train_tokenizer.py",12835,2992," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ",python,selection_mouse +7168,7971107,"train_tokenizer.py",12835,2664," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n ",python,selection_mouse +7169,7971176,"train_tokenizer.py",12835,2626," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n ",python,selection_mouse +7170,7971257,"train_tokenizer.py",12835,2579," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n ",python,selection_mouse +7171,7971284,"TERMINAL",0,0,"9:00555",,terminal_output +7172,7971410,"train_tokenizer.py",12835,2626," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n ",python,selection_mouse +7173,7971446,"train_tokenizer.py",12835,2625," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n ",python,selection_mouse +7174,7971479,"train_tokenizer.py",12835,2663," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n ",python,selection_mouse +7175,7971514,"train_tokenizer.py",12835,2749," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n ",python,selection_mouse +7176,7971569,"train_tokenizer.py",12835,2838," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n ",python,selection_mouse +7177,7971938,"train_tokenizer.py",12835,2905," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n ",python,selection_mouse +7178,7971939,"train_tokenizer.py",12835,2904," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n ",python,selection_mouse +7179,7972030,"train_tokenizer.py",12835,2992," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ",python,selection_mouse +7180,7972072,"train_tokenizer.py",12835,2991," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n ",python,selection_mouse +7181,7972118,"train_tokenizer.py",12835,3028," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n ",python,selection_mouse +7182,7972161,"train_tokenizer.py",12835,3027," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n ",python,selection_mouse +7183,7972204,"train_tokenizer.py",12835,3026," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n ",python,selection_mouse +7184,7972246,"train_tokenizer.py",12835,3060," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n ",python,selection_mouse +7185,7972256,"TERMINAL",0,0,"1666",,terminal_output +7186,7972395,"train_tokenizer.py",12835,3061," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n ",python,selection_mouse +7187,7972425,"train_tokenizer.py",12835,3062," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n ",python,selection_mouse +7188,7972457,"train_tokenizer.py",12835,3064," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n ",python,selection_mouse +7189,7972473,"train_tokenizer.py",12835,3066," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n ",python,selection_mouse +7190,7972530,"train_tokenizer.py",12835,3102," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n ",python,selection_mouse +7191,7972552,"train_tokenizer.py",12835,3108," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb",python,selection_mouse +7192,7972589,"train_tokenizer.py",12835,3123," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images",python,selection_mouse +7193,7972695,"train_tokenizer.py",12835,3124," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)",python,selection_mouse +7194,7973228,"TERMINAL",0,0,"2777",,terminal_output +7195,7974383,"TERMINAL",0,0,"3888",,terminal_output +7196,7975277,"TERMINAL",0,0,"4999",,terminal_output +7197,7976302,"TERMINAL",0,0,"5111151",,terminal_output +7198,7977341,"TERMINAL",0,0,"7222",,terminal_output +7199,7978395,"TERMINAL",0,0,"8333",,terminal_output +7200,7979808,"train_dynamics.py",0,0,"",python,tab +7201,7979808,"train_dynamics.py",14857,0,"",python,selection_mouse +7202,7979808,"train_dynamics.py",14713,144,".asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7203,7979808,"train_dynamics.py",14653,204,"rue_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7204,7979808,"train_dynamics.py",14562,295," recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7205,7979808,"train_dynamics.py",14473,384," image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7206,7979808,"train_dynamics.py",14428,429," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7207,7979808,"train_dynamics.py",14378,479," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7208,7979809,"train_dynamics.py",14283,574," comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7209,7979928,"train_dynamics.py",13957,900," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7210,7979929,"TERMINAL",0,0,"9444",,terminal_output +7211,7980054,"train_dynamics.py",13907,950," }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7212,7980097,"train_dynamics.py",13759,1098," {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7213,7980140,"train_dynamics.py",13785,1072," ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7214,7980186,"train_dynamics.py",13869,988," **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7215,7980204,"train_dynamics.py",13908,949," }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7216,7980245,"train_dynamics.py",13956,901," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7217,7980286,"train_dynamics.py",14091,766," recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7218,7980304,"train_dynamics.py",14226,631," comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7219,7980329,"train_dynamics.py",14282,575," comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7220,7980382,"train_dynamics.py",14351,506," )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7221,7980454,"train_dynamics.py",14373,484," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7222,7980480,"train_dynamics.py",14225,632," comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7223,7980513,"train_dynamics.py",13866,991," **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7224,7980546,"train_dynamics.py",13722,1135," wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7225,7980579,"train_dynamics.py",13642,1215," if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7226,7980580,"TERMINAL",0,0,"10555",,terminal_output +7227,7980746,"train_dynamics.py",13641,1216," if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7228,7980804,"train_dynamics.py",13721,1136," wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7229,7980804,"train_dynamics.py",13752,1105," {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7230,7981015,"train_dynamics.py",13721,1136," wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7231,7981015,"train_dynamics.py",13642,1215," if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7232,7981016,"train_dynamics.py",13616,1241," if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7233,7981444,"train_dynamics.py",13615,1242," if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7234,7981664,"train_dynamics.py",13614,1243," if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7235,7981665,"train_dynamics.py",13584,1273," # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7236,7981665,"train_dynamics.py",13467,1390," metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7237,7981666,"train_dynamics.py",13324,1533," inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7238,7981666,"train_dynamics.py",13237,1620," # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7239,7981746,"train_dynamics.py",13164,1693," while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7240,7981747,"TERMINAL",0,0,"1666",,terminal_output +7241,7981816,"train_dynamics.py",12964,1893," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7242,7981863,"train_dynamics.py",12827,2030," jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7243,7981905,"train_dynamics.py",12897,1960," for elem in train_iterator\n )\n if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7244,7981950,"train_dynamics.py",12964,1893," dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7245,7981972,"train_dynamics.py",13065,1792," for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7246,7981990,"train_dynamics.py",13112,1745," print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7247,7982078,"train_dynamics.py",13164,1693," while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7248,7982115,"train_dynamics.py",13237,1620," # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7249,7982166,"train_dynamics.py",13324,1533," inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7250,7982327,"train_dynamics.py",13385,1472," loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7251,7982410,"train_dynamics.py",13467,1390," metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7252,7982411,"train_dynamics.py",13513,1344," print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7253,7982463,"train_dynamics.py",13561,1296," step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7254,7982464,"train_dynamics.py",13583,1274,"\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7255,7982573,"train_dynamics.py",13584,1273," # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +7256,7982574,"TERMINAL",0,0,"2777",,terminal_output +7257,7983618,"TERMINAL",0,0,"3888",,terminal_output +7258,7983896,"train_dynamics.py",13584,1273,"",python,content +7259,7984633,"TERMINAL",0,0,"4999",,terminal_output +7260,7984730,"train_dynamics.py",13584,0," # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)",python,content +7261,7985670,"TERMINAL",0,0,"520207:00",,terminal_output +7262,7986704,"TERMINAL",0,0,"6111",,terminal_output +7263,7987757,"TERMINAL",0,0,"7222",,terminal_output +7264,7988891,"TERMINAL",0,0,"8333",,terminal_output +7265,7989834,"TERMINAL",0,0,"9444",,terminal_output +7266,7990880,"TERMINAL",0,0,"20555",,terminal_output +7267,7991923,"TERMINAL",0,0,"1666",,terminal_output +7268,7992924,"TERMINAL",0,0,"2777",,terminal_output +7269,7993968,"TERMINAL",0,0,"3888",,terminal_output +7270,7995036,"TERMINAL",0,0,"4999",,terminal_output +7271,7996167,"TERMINAL",0,0,"5303010",,terminal_output +7272,7997163,"TERMINAL",0,0,"6111",,terminal_output +7273,7998165,"TERMINAL",0,0,"7222",,terminal_output +7274,7999119,"train_tokenizer.py",0,0,"",python,tab +7275,7999120,"train_tokenizer.py",17204,0,"",python,selection_mouse +7276,7999120,"train_tokenizer.py",17159,45,"args=ckpt_manager_args\n ),",python,selection_mouse +7277,7999120,"train_tokenizer.py",17158,46," args=ckpt_manager_args\n ),",python,selection_mouse +7278,7999120,"train_tokenizer.py",17131,73," step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7279,7999120,"train_tokenizer.py",17129,75," step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7280,7999219,"train_tokenizer.py",17128,76," step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7281,7999220,"train_tokenizer.py",17086,118," checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7282,7999220,"train_tokenizer.py",17203,0,"",python,selection_command +7283,7999220,"train_tokenizer.py",17159,45,"args=ckpt_manager_args\n ),",python,selection_command +7284,7999261,"train_tokenizer.py",17062,142," )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7285,7999284,"train_tokenizer.py",16975,229," train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7286,7999330,"train_tokenizer.py",16726,478," ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7287,7999371,"train_tokenizer.py",16502,702," val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7288,7999372,"TERMINAL",0,0,"8333",,terminal_output +7289,7999397,"train_tokenizer.py",16319,885," train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7290,7999436,"train_tokenizer.py",16169,1035," ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7291,7999665,"train_tokenizer.py",16168,1036," ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7292,7999754,"train_tokenizer.py",16130,1074," if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7293,7999755,"train_tokenizer.py",16129,1075," if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7294,7999946,"train_tokenizer.py",16128,1076," if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7295,8000116,"train_tokenizer.py",16073,1131," optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,selection_mouse +7296,8000250,"TERMINAL",0,0,"9444",,terminal_output +7297,8001404,"TERMINAL",0,0,"30555",,terminal_output +7298,8002317,"TERMINAL",0,0,"1777",,terminal_output +7299,8003378,"TERMINAL",0,0,"3888",,terminal_output +7300,8004346,"train_dynamics.py",0,0,"",python,tab +7301,8004347,"train_dynamics.py",17294,0,"",python,selection_mouse +7302,8004440,"TERMINAL",0,0,"4999",,terminal_output +7303,8005013,"train_dynamics.py",17272,22," ),\n )",python,selection_mouse +7304,8005014,"train_dynamics.py",17270,24," ),\n )",python,selection_mouse +7305,8005014,"train_dynamics.py",17268,26," ),\n )",python,selection_mouse +7306,8005143,"train_dynamics.py",17240,54," ),\n ),\n )",python,selection_mouse +7307,8005144,"train_dynamics.py",17238,56," ),\n ),\n )",python,selection_mouse +7308,8005144,"train_dynamics.py",17237,57," ),\n ),\n )",python,selection_mouse +7309,8005144,"train_dynamics.py",17177,117," grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7310,8005145,"train_dynamics.py",17176,118," grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7311,8005145,"train_dynamics.py",17175,119," grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7312,8005145,"train_dynamics.py",17084,210," dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7313,8005181,"train_dynamics.py",16993,301," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7314,8005185,"train_dynamics.py",16991,303," model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7315,8005209,"train_dynamics.py",16946,348," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7316,8005232,"train_dynamics.py",16944,350," args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7317,8005264,"train_dynamics.py",16918,376," step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7318,8005283,"train_dynamics.py",16917,377," step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7319,8005320,"train_dynamics.py",16876,418," checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7320,8005414,"train_dynamics.py",16821,473," optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7321,8005507,"TERMINAL",0,0,"5404020",,terminal_output +7322,8005830,"train_dynamics.py",16822,472," optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7323,8005886,"train_dynamics.py",16823,471," optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7324,8006456,"TERMINAL",0,0,"6111",,terminal_output +7325,8006695,"train_dynamics.py",16822,472," optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )",python,selection_mouse +7326,8007584,"TERMINAL",0,0,"7222",,terminal_output +7327,8008068,"train_dynamics.py",16822,472,"",python,content +7328,8008549,"train_dynamics.py",16822,0," optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n ),",python,content +7329,8008601,"TERMINAL",0,0,"8333",,terminal_output +7330,8009604,"TERMINAL",0,0,"9444",,terminal_output +7331,8010605,"TERMINAL",0,0,"40555",,terminal_output +7332,8011652,"TERMINAL",0,0,"1666",,terminal_output +7333,8012733,"TERMINAL",0,0,"2777",,terminal_output +7334,8013779,"TERMINAL",0,0,"3888",,terminal_output +7335,8014906,"TERMINAL",0,0,"4999",,terminal_output +7336,8015545,"train_dynamics.py",17820,0,"",python,selection_mouse +7337,8015835,"TERMINAL",0,0,"5505030",,terminal_output +7338,8016939,"TERMINAL",0,0,"6111",,terminal_output +7339,8017920,"TERMINAL",0,0,"7222",,terminal_output +7340,8018995,"TERMINAL",0,0,"8333",,terminal_output +7341,8020000,"TERMINAL",0,0,"9444",,terminal_output +7342,8021156,"TERMINAL",0,0,"50555",,terminal_output +7343,8021805,"train_dynamics.py",17953,0,"",python,selection_mouse +7344,8022187,"TERMINAL",0,0,"1666",,terminal_output +7345,8022462,"train_dynamics.py",17952,1,"",python,content +7346,8023123,"TERMINAL",0,0,"2777",,terminal_output +7347,8024230,"train_tokenizer.py",0,0,"",python,tab +7348,8024230,"train_tokenizer.py",17204,0,"",python,selection_mouse +7349,8024286,"train_tokenizer.py",17203,0,"",python,selection_command +7350,8024368,"TERMINAL",0,0,"3888",,terminal_output +7351,8024586,"train_tokenizer.py",17202,0,"",python,selection_command +7352,8025220,"TERMINAL",0,0,"4999",,terminal_output +7353,8025536,"train_tokenizer.py",17204,0,"",python,selection_command +7354,8025703,"train_tokenizer.py",17203,1,"",python,content +7355,8026268,"TERMINAL",0,0,"54:004:0040",,terminal_output +7356,8027113,"train_lam.py",0,0,"",python,tab +7357,8027396,"TERMINAL",0,0,"6222",,terminal_output +7358,8028437,"TERMINAL",0,0,"8333",,terminal_output +7359,8028802,"train_lam.py",17648,0,"",python,selection_mouse +7360,8028817,"train_lam.py",17647,0,"",python,selection_command +7361,8029159,"train_lam.py",17648,0,"",python,selection_command +7362,8029286,"train_lam.py",17647,1,"",python,content +7363,8029437,"TERMINAL",0,0,"9444",,terminal_output +7364,8029708,"train_lam.py",17646,0,"",python,selection_command +7365,8030463,"TERMINAL",0,0,"40:00555",,terminal_output +7366,8031448,"train_tokenizer.py",0,0,"",python,tab +7367,8031656,"TERMINAL",0,0,"1666",,terminal_output +7368,8032513,"TERMINAL",0,0,"2777",,terminal_output +7369,8033544,"TERMINAL",0,0,"3888",,terminal_output +7370,8034579,"TERMINAL",0,0,"4999",,terminal_output +7371,8035662,"TERMINAL",0,0,"5101050",,terminal_output +7372,8036745,"TERMINAL",0,0,"6111",,terminal_output +7373,8037744,"TERMINAL",0,0,"7222",,terminal_output +7374,8038774,"TERMINAL",0,0,"8333",,terminal_output +7375,8039771,"TERMINAL",0,0,"9444",,terminal_output +7376,8040845,"TERMINAL",0,0,"10555",,terminal_output +7377,8041919,"TERMINAL",0,0,"1666",,terminal_output +7378,8042880,"TERMINAL",0,0,"2777",,terminal_output +7379,8043992,"TERMINAL",0,0,"3888",,terminal_output +7380,8044961,"TERMINAL",0,0,"4999",,terminal_output +7381,8045994,"TERMINAL",0,0,"520208:00",,terminal_output +7382,8047118,"TERMINAL",0,0,"6111",,terminal_output +7383,8048138,"TERMINAL",0,0,"7222",,terminal_output +7384,8049121,"TERMINAL",0,0,"8333",,terminal_output +7385,8050161,"TERMINAL",0,0,"9444",,terminal_output +7386,8051210,"TERMINAL",0,0,"20555",,terminal_output +7387,8052353,"TERMINAL",0,0,"1666",,terminal_output +7388,8053277,"TERMINAL",0,0,"2888",,terminal_output +7389,8053819,"train_tokenizer.py",5543,0,"",python,selection_mouse +7390,8053984,"train_tokenizer.py",5541,2,"on",python,selection_mouse +7391,8054040,"train_tokenizer.py",5495,48,"\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7392,8054041,"train_tokenizer.py",5541,2,"on",python,selection_command +7393,8054041,"train_tokenizer.py",5480,63,"0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7394,8054088,"train_tokenizer.py",5410,133,"[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7395,8054089,"train_tokenizer.py",5408,135,"[m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7396,8054089,"train_tokenizer.py",5377,166,"\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7397,8054135,"train_tokenizer.py",5341,202,"n(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7398,8054178,"train_tokenizer.py",5196,347,"Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7399,8054184,"train_tokenizer.py",5162,381,"< args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7400,8054221,"train_tokenizer.py",5104,439," if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7401,8054271,"train_tokenizer.py",4909,634," inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7402,8054283,"train_tokenizer.py",4875,668," for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7403,8054330,"train_tokenizer.py",4849,694," metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7404,8054369,"TERMINAL",0,0,"4999",,terminal_output +7405,8054418,"train_tokenizer.py",4848,695," metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7406,8054433,"train_tokenizer.py",4825,718," loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7407,8054485,"train_tokenizer.py",4824,719," loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7408,8054536,"train_tokenizer.py",4811,732," step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7409,8054946,"train_tokenizer.py",4761,782,"def calculate_validation_metrics(val_dataloader):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,selection_mouse +7410,8055374,"TERMINAL",0,0,"5303010",,terminal_output +7411,8056400,"TERMINAL",0,0,"6111",,terminal_output +7412,8057444,"TERMINAL",0,0,"7222",,terminal_output +7413,8058481,"TERMINAL",0,0,"8333",,terminal_output +7414,8059529,"TERMINAL",0,0,"9444",,terminal_output +7415,8060565,"TERMINAL",0,0,"30555",,terminal_output +7416,8061612,"TERMINAL",0,0,"1666",,terminal_output +7417,8062655,"TERMINAL",0,0,"2777",,terminal_output +7418,8063904,"train_dynamics.py",0,0,"",python,tab +7419,8063904,"train_dynamics.py",4998,0,"",python,selection_mouse +7420,8063933,"TERMINAL",0,0,"3888",,terminal_output +7421,8064453,"train_dynamics.py",4998,0,"\n",python,content +7422,8064788,"TERMINAL",0,0,"4999",,terminal_output +7423,8065132,"train_dynamics.py",4998,0,"",python,selection_command +7424,8065828,"TERMINAL",0,0,"5404020",,terminal_output +7425,8066445,"train_dynamics.py",4998,0,"def calculate_validation_metrics(val_dataloader):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(tokenizer, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon",python,content +7426,8066856,"TERMINAL",0,0,"6111",,terminal_output +7427,8067914,"TERMINAL",0,0,"7222",,terminal_output +7428,8068945,"TERMINAL",0,0,"8333",,terminal_output +7429,8070028,"TERMINAL",0,0,"9444",,terminal_output +7430,8071051,"TERMINAL",0,0,"40555",,terminal_output +7431,8072119,"TERMINAL",0,0,"1666",,terminal_output +7432,8072159,"train_dynamics.py",5229,0,"",python,selection_mouse +7433,8072283,"train_dynamics.py",5221,9,"tokenizer",python,selection_mouse +7434,8074404,"TERMINAL",0,0,"2999",,terminal_output +7435,8075442,"TERMINAL",0,0,"5505030",,terminal_output +7436,8076486,"TERMINAL",0,0,"6111",,terminal_output +7437,8077527,"TERMINAL",0,0,"7222",,terminal_output +7438,8078574,"TERMINAL",0,0,"8333",,terminal_output +7439,8079734,"TERMINAL",0,0,"9444",,terminal_output +7440,8080745,"TERMINAL",0,0,"50555",,terminal_output +7441,8081779,"TERMINAL",0,0,"1666",,terminal_output +7442,8081818,"train_dynamics.py",5221,9,"d",python,content +7443,8081819,"train_dynamics.py",5222,0,"",python,selection_keyboard +7444,8082523,"train_dynamics.py",5222,0,"y",python,content +7445,8082524,"train_dynamics.py",5223,0,"",python,selection_keyboard +7446,8082553,"train_dynamics.py",5223,0,"n",python,content +7447,8082554,"train_dynamics.py",5224,0,"",python,selection_keyboard +7448,8082772,"TERMINAL",0,0,"2777",,terminal_output +7449,8083799,"TERMINAL",0,0,"3888",,terminal_output +7450,8084988,"TERMINAL",0,0,"4999",,terminal_output +7451,8085935,"TERMINAL",0,0,"55:005:0040",,terminal_output +7452,8086996,"TERMINAL",0,0,"6111",,terminal_output +7453,8087969,"TERMINAL",0,0,"7222",,terminal_output +7454,8089039,"TERMINAL",0,0,"8333",,terminal_output +7455,8090192,"TERMINAL",0,0,"9444",,terminal_output +7456,8091139,"TERMINAL",0,0,"1:00555",,terminal_output +7457,8092220,"TERMINAL",0,0,"1666",,terminal_output +7458,8093354,"TERMINAL",0,0,"2777",,terminal_output +7459,8094288,"TERMINAL",0,0,"3999",,terminal_output +7460,8095377,"TERMINAL",0,0,"5101050",,terminal_output +7461,8096436,"TERMINAL",0,0,"6111",,terminal_output +7462,8097411,"TERMINAL",0,0,"7222",,terminal_output +7463,8098488,"TERMINAL",0,0,"8333",,terminal_output +7464,8099511,"TERMINAL",0,0,"9444",,terminal_output +7465,8100557,"TERMINAL",0,0,"10555",,terminal_output +7466,8101633,"TERMINAL",0,0,"1666",,terminal_output +7467,8101693,"train_dynamics.py",5143,0,"",python,selection_mouse +7468,8102665,"TERMINAL",0,0,"2777",,terminal_output +7469,8103098,"train_tokenizer.py",0,0,"",python,tab +7470,8103098,"train_tokenizer.py",4989,0,"",python,selection_mouse +7471,8103098,"train_tokenizer.py",4984,9,"tokenizer",python,selection_mouse +7472,8103702,"TERMINAL",0,0,"3888",,terminal_output +7473,8104747,"TERMINAL",0,0,"4999",,terminal_output +7474,8105829,"TERMINAL",0,0,"520209:00",,terminal_output +7475,8106945,"TERMINAL",0,0,"6111",,terminal_output +7476,8107932,"TERMINAL",0,0,"7222",,terminal_output +7477,8108955,"TERMINAL",0,0,"8333",,terminal_output +7478,8110003,"TERMINAL",0,0,"9444",,terminal_output +7479,8111074,"TERMINAL",0,0,"20555",,terminal_output +7480,8112078,"TERMINAL",0,0,"1666",,terminal_output +7481,8113218,"TERMINAL",0,0,"2777",,terminal_output +7482,8114236,"TERMINAL",0,0,"3888",,terminal_output +7483,8115227,"TERMINAL",0,0,"4999",,terminal_output +7484,8116246,"train_tokenizer.py",4987,0,"",python,selection_mouse +7485,8116336,"train_tokenizer.py",6172,0,"",python,selection_command +7486,8116345,"TERMINAL",0,0,"5313111",,terminal_output +7487,8117331,"TERMINAL",0,0,"7222",,terminal_output +7488,8118508,"TERMINAL",0,0,"8333",,terminal_output +7489,8119447,"TERMINAL",0,0,"9444",,terminal_output +7490,8120459,"TERMINAL",0,0,"30555",,terminal_output +7491,8120746,"train_tokenizer.py",4808,0,"",python,selection_mouse +7492,8121512,"TERMINAL",0,0,"1666",,terminal_output +7493,8122554,"TERMINAL",0,0,"2777",,terminal_output +7494,8123603,"TERMINAL",0,0,"3888",,terminal_output +7495,8124681,"TERMINAL",0,0,"4999",,terminal_output +7496,8125813,"TERMINAL",0,0,"5404020",,terminal_output +7497,8126036,"train_tokenizer.py",4931,0,"",python,selection_mouse +7498,8126260,"train_tokenizer.py",4929,6,"videos",python,selection_mouse +7499,8126728,"TERMINAL",0,0,"6111",,terminal_output +7500,8127772,"TERMINAL",0,0,"7222",,terminal_output +7501,8128281,"train_tokenizer.py",4988,0,"",python,selection_mouse +7502,8128446,"train_tokenizer.py",4984,9,"tokenizer",python,selection_mouse +7503,8128815,"TERMINAL",0,0,"8333",,terminal_output +7504,8129741,"train_tokenizer.py",4808,0,"",python,selection_mouse +7505,8129874,"TERMINAL",0,0,"9444",,terminal_output +7506,8130607,"train_tokenizer.py",4808,0,",",python,content +7507,8130608,"train_tokenizer.py",4809,0,"",python,selection_keyboard +7508,8130657,"train_tokenizer.py",4809,0," ",python,content +7509,8130658,"train_tokenizer.py",4810,0,"",python,selection_keyboard +7510,8130807,"train_tokenizer.py",4810,0,"t",python,content +7511,8130807,"train_tokenizer.py",4811,0,"",python,selection_keyboard +7512,8130933,"train_tokenizer.py",4811,0,"o",python,content +7513,8130934,"train_tokenizer.py",4812,0,"",python,selection_keyboard +7514,8130934,"TERMINAL",0,0,"40555",,terminal_output +7515,8130935,"train_tokenizer.py",4812,0,"k",python,content +7516,8130935,"train_tokenizer.py",4813,0,"",python,selection_keyboard +7517,8131084,"train_tokenizer.py",4813,0,"e",python,content +7518,8131085,"train_tokenizer.py",4814,0,"",python,selection_keyboard +7519,8131192,"train_tokenizer.py",4814,0,"n",python,content +7520,8131193,"train_tokenizer.py",4815,0,"",python,selection_keyboard +7521,8131322,"train_tokenizer.py",4815,0,"i",python,content +7522,8131324,"train_tokenizer.py",4816,0,"",python,selection_keyboard +7523,8131510,"train_tokenizer.py",4816,0,"z",python,content +7524,8131511,"train_tokenizer.py",4817,0,"",python,selection_keyboard +7525,8131552,"train_tokenizer.py",4817,0,"e",python,content +7526,8131552,"train_tokenizer.py",4818,0,"",python,selection_keyboard +7527,8131563,"train_tokenizer.py",4818,0,"r",python,content +7528,8131563,"train_tokenizer.py",4819,0,"",python,selection_keyboard +7529,8131959,"TERMINAL",0,0,"1666",,terminal_output +7530,8132984,"TERMINAL",0,0,"2777",,terminal_output +7531,8134101,"TERMINAL",0,0,"3888",,terminal_output +7532,8135288,"train_lam.py",0,0,"",python,tab +7533,8135372,"TERMINAL",0,0,"4999",,terminal_output +7534,8136129,"TERMINAL",0,0,"5505030",,terminal_output +7535,8137178,"TERMINAL",0,0,"6111",,terminal_output +7536,8138272,"TERMINAL",0,0,"7222",,terminal_output +7537,8139310,"TERMINAL",0,0,"8333",,terminal_output +7538,8140312,"TERMINAL",0,0,"9555",,terminal_output +7539,8141396,"TERMINAL",0,0,"51666",,terminal_output +7540,8142407,"TERMINAL",0,0,"2777",,terminal_output +7541,8143495,"train_lam.py",5003,0,"",python,selection_mouse +7542,8143506,"TERMINAL",0,0,"3888",,terminal_output +7543,8144528,"TERMINAL",0,0,"4999",,terminal_output +7544,8144785,"train_lam.py",5003,0,",",python,content +7545,8144785,"train_lam.py",5004,0,"",python,selection_keyboard +7546,8144847,"train_lam.py",5004,0," ",python,content +7547,8144848,"train_lam.py",5005,0,"",python,selection_keyboard +7548,8145047,"train_lam.py",5005,0,"l",python,content +7549,8145048,"train_lam.py",5006,0,"",python,selection_keyboard +7550,8145147,"train_lam.py",5006,0,"a",python,content +7551,8145149,"train_lam.py",5007,0,"",python,selection_keyboard +7552,8145213,"train_lam.py",5007,0,"m",python,content +7553,8145214,"train_lam.py",5008,0,"",python,selection_keyboard +7554,8145559,"TERMINAL",0,0,"56:006:0040",,terminal_output +7555,8146631,"TERMINAL",0,0,"6111",,terminal_output +7556,8147666,"TERMINAL",0,0,"7222",,terminal_output +7557,8148689,"TERMINAL",0,0,"8333",,terminal_output +7558,8149746,"TERMINAL",0,0,"9444",,terminal_output +7559,8150774,"TERMINAL",0,0,"2:00555",,terminal_output +7560,8151861,"TERMINAL",0,0,"1666",,terminal_output +7561,8152895,"TERMINAL",0,0,"2777",,terminal_output +7562,8153865,"train_dynamics.py",0,0,"",python,tab +7563,8153866,"train_dynamics.py",5223,0,"",python,selection_mouse +7564,8153866,"train_dynamics.py",5221,3,"dyn",python,selection_mouse +7565,8153973,"TERMINAL",0,0,"3888",,terminal_output +7566,8154694,"train_dynamics.py",5221,3,"g",python,content +7567,8154696,"train_dynamics.py",5222,0,"",python,selection_keyboard +7568,8154850,"train_dynamics.py",5222,0,"e",python,content +7569,8154851,"train_dynamics.py",5223,0,"",python,selection_keyboard +7570,8154914,"train_dynamics.py",5223,0,"n",python,content +7571,8154915,"train_dynamics.py",5224,0,"",python,selection_keyboard +7572,8154985,"TERMINAL",0,0,"4999",,terminal_output +7573,8155008,"train_dynamics.py",5224,0,"i",python,content +7574,8155009,"train_dynamics.py",5225,0,"",python,selection_keyboard +7575,8155179,"train_dynamics.py",5225,0,"e",python,content +7576,8155181,"train_dynamics.py",5226,0,"",python,selection_keyboard +7577,8156068,"TERMINAL",0,0,"5101050",,terminal_output +7578,8157114,"TERMINAL",0,0,"6111",,terminal_output +7579,8158165,"TERMINAL",0,0,"7222",,terminal_output +7580,8159025,"train_dynamics.py",5381,0,"",python,selection_mouse +7581,8159162,"TERMINAL",0,0,"8333",,terminal_output +7582,8160203,"TERMINAL",0,0,"9444",,terminal_output +7583,8161262,"TERMINAL",0,0,"10555",,terminal_output +7584,8162283,"TERMINAL",0,0,"1666",,terminal_output +7585,8163321,"TERMINAL",0,0,"2888",,terminal_output +7586,8164412,"TERMINAL",0,0,"4999",,terminal_output +7587,8164478,"train_dynamics.py",5270,0,"",python,selection_mouse +7588,8165167,"train_dynamics.py",5216,0,"",python,selection_mouse +7589,8165334,"train_dynamics.py",5212,8,"val_step",python,selection_mouse +7590,8165426,"TERMINAL",0,0,"5202030:00",,terminal_output +7591,8166430,"TERMINAL",0,0,"6111",,terminal_output +7592,8166980,"train_dynamics.py",5045,0,"",python,selection_mouse +7593,8167478,"TERMINAL",0,0,"7222",,terminal_output +7594,8167754,"train_dynamics.py",5045,0,",",python,content +7595,8167754,"train_dynamics.py",5046,0,"",python,selection_keyboard +7596,8167852,"train_dynamics.py",5046,0," ",python,content +7597,8167852,"train_dynamics.py",5047,0,"",python,selection_keyboard +7598,8168012,"train_dynamics.py",5047,0,"g",python,content +7599,8168013,"train_dynamics.py",5048,0,"",python,selection_keyboard +7600,8168111,"train_dynamics.py",5048,0,"e",python,content +7601,8168111,"train_dynamics.py",5049,0,"",python,selection_keyboard +7602,8168548,"train_dynamics.py",5049,0,"n",python,content +7603,8168548,"train_dynamics.py",5050,0,"",python,selection_keyboard +7604,8168549,"TERMINAL",0,0,"8333",,terminal_output +7605,8168563,"train_dynamics.py",5050,0,"i",python,content +7606,8168563,"train_dynamics.py",5051,0,"",python,selection_keyboard +7607,8168638,"train_dynamics.py",5051,0,"e",python,content +7608,8168639,"train_dynamics.py",5052,0,"",python,selection_keyboard +7609,8169590,"TERMINAL",0,0,"9444",,terminal_output +7610,8170715,"TERMINAL",0,0,"20555",,terminal_output +7611,8171761,"TERMINAL",0,0,"1666",,terminal_output +7612,8172047,"train_lam.py",0,0,"",python,tab +7613,8172048,"train_lam.py",5138,0,"",python,selection_mouse +7614,8172475,"train_lam.py",4977,0,"",python,selection_mouse +7615,8172743,"TERMINAL",0,0,"2777",,terminal_output +7616,8172846,"train_lam.py",4960,28,"calculate_validation_metrics",python,selection_mouse +7617,8173415,"train_lam.py",4977,0,"",python,selection_mouse +7618,8173774,"TERMINAL",0,0,"3888",,terminal_output +7619,8173782,"train_lam.py",13507,0,"",python,selection_command +7620,8174856,"TERMINAL",0,0,"4999",,terminal_output +7621,8175977,"train_lam.py",13550,0,"",python,selection_mouse +7622,8175994,"TERMINAL",0,0,"5303010",,terminal_output +7623,8176973,"train_lam.py",13550,0,",",python,content +7624,8176974,"train_lam.py",13551,0,"",python,selection_keyboard +7625,8177086,"train_lam.py",13551,0," ",python,content +7626,8177087,"train_lam.py",13552,0,"",python,selection_keyboard +7627,8177113,"TERMINAL",0,0,"6111",,terminal_output +7628,8177755,"train_lam.py",13552,0,"l",python,content +7629,8177756,"train_lam.py",13553,0,"",python,selection_keyboard +7630,8177812,"train_lam.py",13553,0,"a",python,content +7631,8177813,"train_lam.py",13554,0,"",python,selection_keyboard +7632,8177900,"train_lam.py",13554,0,"m",python,content +7633,8177901,"train_lam.py",13555,0,"",python,selection_keyboard +7634,8178062,"TERMINAL",0,0,"7222",,terminal_output +7635,8179139,"TERMINAL",0,0,"8333",,terminal_output +7636,8180165,"TERMINAL",0,0,"9444",,terminal_output +7637,8180427,"train_tokenizer.py",0,0,"",python,tab +7638,8181232,"TERMINAL",0,0,"30555",,terminal_output +7639,8181790,"train_tokenizer.py",4783,0,"",python,selection_mouse +7640,8182217,"train_tokenizer.py",13077,0,"",python,selection_command +7641,8182279,"TERMINAL",0,0,"1666",,terminal_output +7642,8183284,"TERMINAL",0,0,"2888",,terminal_output +7643,8183907,"train_tokenizer.py",13120,0,"",python,selection_mouse +7644,8184364,"TERMINAL",0,0,"4999",,terminal_output +7645,8184731,"train_tokenizer.py",13120,0,",",python,content +7646,8184732,"train_tokenizer.py",13121,0,"",python,selection_keyboard +7647,8184771,"train_tokenizer.py",13121,0," ",python,content +7648,8184772,"train_tokenizer.py",13122,0,"",python,selection_keyboard +7649,8184929,"train_tokenizer.py",13122,0,"t",python,content +7650,8184930,"train_tokenizer.py",13123,0,"",python,selection_keyboard +7651,8184989,"train_tokenizer.py",13123,0,"o",python,content +7652,8184990,"train_tokenizer.py",13124,0,"",python,selection_keyboard +7653,8185034,"train_tokenizer.py",13124,0,"k",python,content +7654,8185035,"train_tokenizer.py",13125,0,"",python,selection_keyboard +7655,8185175,"train_tokenizer.py",13125,0,"e",python,content +7656,8185176,"train_tokenizer.py",13126,0,"",python,selection_keyboard +7657,8185238,"train_tokenizer.py",13126,0,"n",python,content +7658,8185239,"train_tokenizer.py",13127,0,"",python,selection_keyboard +7659,8185410,"train_tokenizer.py",13127,0,"i",python,content +7660,8185411,"train_tokenizer.py",13128,0,"",python,selection_keyboard +7661,8185412,"TERMINAL",0,0,"5404020",,terminal_output +7662,8185532,"train_tokenizer.py",13128,0,"z",python,content +7663,8185533,"train_tokenizer.py",13129,0,"",python,selection_keyboard +7664,8185580,"train_tokenizer.py",13129,0,"e",python,content +7665,8185581,"train_tokenizer.py",13130,0,"",python,selection_keyboard +7666,8185730,"train_tokenizer.py",13130,0,"r",python,content +7667,8185731,"train_tokenizer.py",13131,0,"",python,selection_keyboard +7668,8186423,"TERMINAL",0,0,"6111",,terminal_output +7669,8187496,"TERMINAL",0,0,"7222",,terminal_output +7670,8188529,"TERMINAL",0,0,"8333",,terminal_output +7671,8189596,"TERMINAL",0,0,"9444",,terminal_output +7672,8190580,"TERMINAL",0,0,"40555",,terminal_output +7673,8190788,"train_tokenizer.py",13199,0,"",python,selection_mouse +7674,8191616,"TERMINAL",0,0,"1666",,terminal_output +7675,8192726,"TERMINAL",0,0,"2777",,terminal_output +7676,8193464,"train_dynamics.py",0,0,"",python,tab +7677,8193464,"train_dynamics.py",5025,0,"",python,selection_mouse +7678,8193711,"TERMINAL",0,0,"3888",,terminal_output +7679,8194962,"train_dynamics.py",14601,0,"",python,selection_command +7680,8195546,"TERMINAL",0,0,"4505030",,terminal_output +7681,8196416,"train_dynamics.py",14644,0,"",python,selection_mouse +7682,8196576,"TERMINAL",0,0,"6111",,terminal_output +7683,8197273,"train_dynamics.py",14644,0,",",python,content +7684,8197274,"train_dynamics.py",14645,0,"",python,selection_keyboard +7685,8197328,"train_dynamics.py",14645,0," ",python,content +7686,8197329,"train_dynamics.py",14646,0,"",python,selection_keyboard +7687,8197513,"train_dynamics.py",14646,0,"g",python,content +7688,8197514,"train_dynamics.py",14647,0,"",python,selection_keyboard +7689,8197610,"train_dynamics.py",14647,0,"e",python,content +7690,8197611,"train_dynamics.py",14648,0,"",python,selection_keyboard +7691,8197629,"TERMINAL",0,0,"7222",,terminal_output +7692,8197711,"train_dynamics.py",14648,0,"n",python,content +7693,8197712,"train_dynamics.py",14649,0,"",python,selection_keyboard +7694,8197797,"train_dynamics.py",14649,0,"i",python,content +7695,8197798,"train_dynamics.py",14650,0,"",python,selection_keyboard +7696,8197912,"train_dynamics.py",14650,0,"e",python,content +7697,8197913,"train_dynamics.py",14651,0,"",python,selection_keyboard +7698,8198641,"TERMINAL",0,0,"8333",,terminal_output +7699,8199684,"TERMINAL",0,0,"9444",,terminal_output +7700,8200763,"TERMINAL",0,0,"50555",,terminal_output +7701,8201783,"TERMINAL",0,0,"1666",,terminal_output +7702,8202828,"TERMINAL",0,0,"2777",,terminal_output +7703,8202920,"train_dynamics.py",14719,0,"",python,selection_mouse +7704,8203912,"train_dynamics.py",14649,0,"",python,selection_mouse +7705,8203938,"TERMINAL",0,0,"3888",,terminal_output +7706,8204080,"train_dynamics.py",14646,5,"genie",python,selection_mouse +7707,8204554,"train_dynamics.py",14646,5,"",python,content +7708,8204859,"train_dynamics.py",14646,0,"o",python,content +7709,8204860,"train_dynamics.py",14647,0,"",python,selection_keyboard +7710,8204942,"TERMINAL",0,0,"4999",,terminal_output +7711,8205019,"train_dynamics.py",14647,0,"m",python,content +7712,8205021,"train_dynamics.py",14648,0,"",python,selection_keyboard +7713,8205578,"train_dynamics.py",14647,1,"",python,content +7714,8205768,"train_dynamics.py",14647,0,"p",python,content +7715,8205769,"train_dynamics.py",14648,0,"",python,selection_keyboard +7716,8205966,"train_dynamics.py",14648,0,"t",python,content +7717,8205967,"train_dynamics.py",14649,0,"",python,selection_keyboard +7718,8206039,"TERMINAL",0,0,"57:007:0040",,terminal_output +7719,8206097,"train_dynamics.py",14649,0,"i",python,content +7720,8206098,"train_dynamics.py",14650,0,"",python,selection_keyboard +7721,8206351,"train_dynamics.py",14650,0,"m",python,content +7722,8206352,"train_dynamics.py",14651,0,"",python,selection_keyboard +7723,8206815,"train_dynamics.py",14651,0,"i",python,content +7724,8206816,"train_dynamics.py",14652,0,"",python,selection_keyboard +7725,8206992,"train_dynamics.py",14652,0,"z",python,content +7726,8206994,"train_dynamics.py",14653,0,"",python,selection_keyboard +7727,8207087,"TERMINAL",0,0,"6111",,terminal_output +7728,8207126,"train_dynamics.py",14653,0,"e",python,content +7729,8207127,"train_dynamics.py",14654,0,"",python,selection_keyboard +7730,8207180,"train_dynamics.py",14654,0,"r",python,content +7731,8207181,"train_dynamics.py",14655,0,"",python,selection_keyboard +7732,8207312,"train_dynamics.py",14655,0,".",python,content +7733,8207314,"train_dynamics.py",14656,0,"",python,selection_keyboard +7734,8207518,"train_dynamics.py",14656,0,"m",python,content +7735,8207519,"train_dynamics.py",14657,0,"",python,selection_keyboard +7736,8207718,"train_dynamics.py",14657,0,"o",python,content +7737,8207720,"train_dynamics.py",14658,0,"",python,selection_keyboard +7738,8207863,"train_dynamics.py",14658,0,"d",python,content +7739,8207864,"train_dynamics.py",14659,0,"",python,selection_keyboard +7740,8207913,"train_dynamics.py",14659,0,"e",python,content +7741,8207914,"train_dynamics.py",14660,0,"",python,selection_keyboard +7742,8207963,"train_dynamics.py",14660,0,"l",python,content +7743,8207963,"train_dynamics.py",14661,0,"",python,selection_keyboard +7744,8208102,"TERMINAL",0,0,"7222",,terminal_output +7745,8209128,"TERMINAL",0,0,"8333",,terminal_output +7746,8210184,"TERMINAL",0,0,"9444",,terminal_output +7747,8211215,"TERMINAL",0,0,"3:00555",,terminal_output +7748,8212257,"TERMINAL",0,0,"1666",,terminal_output +7749,8213291,"TERMINAL",0,0,"2888",,terminal_output +7750,8214408,"TERMINAL",0,0,"4999",,terminal_output +7751,8215462,"TERMINAL",0,0,"5101050",,terminal_output +7752,8216422,"TERMINAL",0,0,"6111",,terminal_output +7753,8217475,"TERMINAL",0,0,"7222",,terminal_output +7754,8217898,"train_dynamics.py",14220,0,"",python,selection_mouse +7755,8218023,"train_dynamics.py",14217,9,"optimizer",python,selection_mouse +7756,8218514,"TERMINAL",0,0,"8333",,terminal_output +7757,8218652,"train_dynamics.py",14227,0,"",python,selection_mouse +7758,8218793,"train_dynamics.py",14227,5,"model",python,selection_mouse +7759,8219344,"train_dynamics.py",14221,0,"",python,selection_mouse +7760,8219472,"train_dynamics.py",14217,9,"optimizer",python,selection_mouse +7761,8219605,"TERMINAL",0,0,"9444",,terminal_output +7762,8219999,"train_dynamics.py",14230,0,"",python,selection_mouse +7763,8220127,"train_dynamics.py",14227,5,"model",python,selection_mouse +7764,8220644,"TERMINAL",0,0,"10555",,terminal_output +7765,8220652,"train_dynamics.py",14221,0,"",python,selection_mouse +7766,8220821,"train_dynamics.py",14217,9,"optimizer",python,selection_mouse +7767,8221350,"train_dynamics.py",14228,0,"",python,selection_mouse +7768,8221462,"train_dynamics.py",14227,5,"model",python,selection_mouse +7769,8221711,"TERMINAL",0,0,"1666",,terminal_output +7770,8221983,"train_dynamics.py",14222,0,"",python,selection_mouse +7771,8222117,"train_dynamics.py",14217,9,"optimizer",python,selection_mouse +7772,8222648,"train_dynamics.py",14231,0,"",python,selection_mouse +7773,8222753,"train_dynamics.py",14227,5,"model",python,selection_mouse +7774,8222754,"TERMINAL",0,0,"2777",,terminal_output +7775,8223804,"TERMINAL",0,0,"3888",,terminal_output +7776,8224925,"TERMINAL",0,0,"4999",,terminal_output +7777,8225954,"TERMINAL",0,0,"520201:00",,terminal_output +7778,8226895,"TERMINAL",0,0,"6111",,terminal_output +7779,8227980,"TERMINAL",0,0,"7222",,terminal_output +7780,8229028,"TERMINAL",0,0,"8333",,terminal_output +7781,8230022,"TERMINAL",0,0,"9444",,terminal_output +7782,8231202,"TERMINAL",0,0,"20555",,terminal_output +7783,8232112,"TERMINAL",0,0,"1666",,terminal_output +7784,8233137,"TERMINAL",0,0,"2777",,terminal_output +7785,8234409,"TERMINAL",0,0,"3888",,terminal_output +7786,8235253,"TERMINAL",0,0,"4999",,terminal_output +7787,8236335,"TERMINAL",0,0,"5303010",,terminal_output +7788,8237393,"TERMINAL",0,0,"6222",,terminal_output +7789,8238346,"TERMINAL",0,0,"8333",,terminal_output +7790,8239415,"TERMINAL",0,0,"9444",,terminal_output +7791,8240495,"TERMINAL",0,0,"30555",,terminal_output +7792,8241455,"TERMINAL",0,0,"1666",,terminal_output +7793,8242552,"TERMINAL",0,0,"2777",,terminal_output +7794,8243545,"TERMINAL",0,0,"3888",,terminal_output +7795,8244586,"TERMINAL",0,0,"4999",,terminal_output +7796,8245653,"TERMINAL",0,0,"5404020",,terminal_output +7797,8246693,"TERMINAL",0,0,"6111",,terminal_output +7798,8247712,"TERMINAL",0,0,"7222",,terminal_output +7799,8248759,"TERMINAL",0,0,"8333",,terminal_output +7800,8249840,"TERMINAL",0,0,"9444",,terminal_output +7801,8250848,"TERMINAL",0,0,"40555",,terminal_output +7802,8251960,"TERMINAL",0,0,"1666",,terminal_output +7803,8252937,"TERMINAL",0,0,"2777",,terminal_output +7804,8253979,"TERMINAL",0,0,"3888",,terminal_output +7805,8255046,"TERMINAL",0,0,"4999",,terminal_output +7806,8256061,"TERMINAL",0,0,"5505030",,terminal_output +7807,8257103,"TERMINAL",0,0,"6111",,terminal_output +7808,8258228,"TERMINAL",0,0,"7222",,terminal_output +7809,8259212,"TERMINAL",0,0,"8333",,terminal_output +7810,8260253,"TERMINAL",0,0,"9444",,terminal_output +7811,8261426,"TERMINAL",0,0,"50555",,terminal_output +7812,8262349,"TERMINAL",0,0,"1777",,terminal_output +7813,8263523,"TERMINAL",0,0,"3888",,terminal_output +7814,8264429,"TERMINAL",0,0,"4999",,terminal_output +7815,8265640,"TERMINAL",0,0,"58:008:0040",,terminal_output +7816,8266768,"TERMINAL",0,0,"6111",,terminal_output +7817,8267760,"TERMINAL",0,0,"7222",,terminal_output +7818,8268863,"TERMINAL",0,0,"8333",,terminal_output +7819,8269845,"TERMINAL",0,0,"9444",,terminal_output +7820,8270858,"TERMINAL",0,0,"4:00555",,terminal_output +7821,8271932,"TERMINAL",0,0,"1666",,terminal_output +7822,8273000,"TERMINAL",0,0,"2777",,terminal_output +7823,8274121,"TERMINAL",0,0,"3888",,terminal_output +7824,8275023,"TERMINAL",0,0,"4999",,terminal_output +7825,8276148,"TERMINAL",0,0,"5101050",,terminal_output +7826,8277174,"TERMINAL",0,0,"6111",,terminal_output +7827,8278002,"train_tokenizer.py",0,0,"",python,tab +7828,8278002,"train_tokenizer.py",4739,0,"",python,selection_mouse +7829,8278189,"train_tokenizer.py",4738,1," ",python,selection_mouse +7830,8278190,"train_tokenizer.py",4737,2,"n ",python,selection_mouse +7831,8278190,"train_tokenizer.py",4736,3,"rn ",python,selection_mouse +7832,8278190,"train_tokenizer.py",4734,5,"turn ",python,selection_mouse +7833,8278191,"train_tokenizer.py",4733,6,"eturn ",python,selection_mouse +7834,8278191,"train_tokenizer.py",4732,7,"return ",python,selection_mouse +7835,8278234,"train_tokenizer.py",4731,8," return ",python,selection_mouse +7836,8278256,"train_tokenizer.py",4730,9," return ",python,selection_mouse +7837,8278296,"TERMINAL",0,0,"7222",,terminal_output +7838,8279126,"train_tokenizer.py",4760,0,"",python,selection_mouse +7839,8279203,"TERMINAL",0,0,"8333",,terminal_output +7840,8279458,"train_tokenizer.py",4730,30," return loss, recon, metrics\n",python,selection_mouse +7841,8279464,"train_tokenizer.py",4662,98," (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n",python,selection_mouse +7842,8279503,"train_tokenizer.py",4640,120," tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n",python,selection_mouse +7843,8279612,"train_tokenizer.py",4547,213,"def val_step(tokenizer: TokenizerVQVAE, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n",python,selection_mouse +7844,8279697,"train_tokenizer.py",4539,221,"nnx.jit\ndef val_step(tokenizer: TokenizerVQVAE, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n",python,selection_mouse +7845,8279787,"train_tokenizer.py",4538,222,"@nnx.jit\ndef val_step(tokenizer: TokenizerVQVAE, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n",python,selection_mouse +7846,8280246,"TERMINAL",0,0,"9444",,terminal_output +7847,8281309,"TERMINAL",0,0,"10666",,terminal_output +7848,8282351,"TERMINAL",0,0,"2777",,terminal_output +7849,8283358,"TERMINAL",0,0,"3888",,terminal_output +7850,8283794,"train_dynamics.py",0,0,"",python,tab +7851,8283795,"train_dynamics.py",4975,0,"",python,selection_mouse +7852,8284149,"train_dynamics.py",4997,0,"",python,selection_mouse +7853,8284467,"TERMINAL",0,0,"4999",,terminal_output +7854,8284521,"train_dynamics.py",4997,0,"\n",python,content +7855,8284722,"train_dynamics.py",4998,0,"\n",python,content +7856,8284925,"train_dynamics.py",4998,0,"",python,selection_command +7857,8285470,"train_dynamics.py",4998,0,"@nnx.jit\ndef val_step(tokenizer: TokenizerVQVAE, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n",python,content +7858,8285515,"TERMINAL",0,0,"520202:00",,terminal_output +7859,8286479,"TERMINAL",0,0,"6111",,terminal_output +7860,8287562,"TERMINAL",0,0,"7222",,terminal_output +7861,8288571,"TERMINAL",0,0,"8333",,terminal_output +7862,8288903,"train_dynamics.py",5221,0,"",python,selection_mouse +7863,8289137,"train_dynamics.py",5220,1,"\n",python,selection_mouse +7864,8289137,"train_dynamics.py",5219,2,"\n\n",python,selection_mouse +7865,8289137,"train_dynamics.py",5213,8,"etrics\n\n",python,selection_mouse +7866,8289137,"train_dynamics.py",5208,13,"on, metrics\n\n",python,selection_mouse +7867,8289148,"train_dynamics.py",5134,87,"con, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7868,8289161,"train_dynamics.py",5130,91," (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7869,8289176,"train_dynamics.py",5128,93,"s, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7870,8289194,"train_dynamics.py",5126,95,"oss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7871,8289213,"train_dynamics.py",5104,117,"okenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7872,8289228,"train_dynamics.py",5103,118,"tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7873,8289246,"train_dynamics.py",5102,119," tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7874,8289314,"train_dynamics.py",5101,120," tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7875,8289356,"train_dynamics.py",5100,121," tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7876,8289357,"train_dynamics.py",5099,122," tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7877,8289358,"train_dynamics.py",5007,214,"def val_step(tokenizer: TokenizerVQVAE, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7878,8289557,"train_dynamics.py",4998,223,"@nnx.jit\ndef val_step(tokenizer: TokenizerVQVAE, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs)\n return loss, recon, metrics\n\n",python,selection_mouse +7879,8289636,"TERMINAL",0,0,"9444",,terminal_output +7880,8290074,"train_dynamics.py",4998,223,"",python,content +7881,8290642,"TERMINAL",0,0,"20555",,terminal_output +7882,8291336,"train_dynamics.py",4998,0,"\n",python,content +7883,8291552,"train_dynamics.py",4998,0,"",python,selection_command +7884,8291688,"TERMINAL",0,0,"1666",,terminal_output +7885,8292808,"TERMINAL",0,0,"2777",,terminal_output +7886,8293857,"TERMINAL",0,0,"3888",,terminal_output +7887,8294830,"TERMINAL",0,0,"4999",,terminal_output +7888,8295872,"TERMINAL",0,0,"5303010",,terminal_output +7889,8296930,"TERMINAL",0,0,"6111",,terminal_output +7890,8297946,"TERMINAL",0,0,"7222",,terminal_output +7891,8299004,"TERMINAL",0,0,"8333",,terminal_output +7892,8300170,"TERMINAL",0,0,"9444",,terminal_output +7893,8301077,"TERMINAL",0,0,"30555",,terminal_output +7894,8302121,"TERMINAL",0,0,"1666[12;196H",,terminal_output +7895,8303173,"TERMINAL",0,0,"2777",,terminal_output +7896,8304263,"TERMINAL",0,0,"3888",,terminal_output +7897,8305446,"TERMINAL",0,0,"4999",,terminal_output +7898,8306299,"TERMINAL",0,0,"5414121",,terminal_output +7899,8307412,"TERMINAL",0,0,"7222",,terminal_output +7900,8308382,"TERMINAL",0,0,"8333",,terminal_output +7901,8308700,"train_dynamics.py",4998,0,"@nnx.jit\n\n",python,content +7902,8309002,"train_dynamics.py",5007,0,"def val_step(genie: Genie, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n",python,content +7903,8309186,"train_dynamics.py",5086,0," """"""Evaluate model and compute metrics""""""\n",python,content +7904,8309247,"train_dynamics.py",5131,0," genie.eval()\n",python,content +7905,8309429,"TERMINAL",0,0,"9444",,terminal_output +7906,8309504,"train_dynamics.py",5148,0," (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n",python,content +7907,8309718,"train_dynamics.py",5211,0," return loss, recon, metrics\n",python,content +7908,8309722,"train_dynamics.py",5243,1,"",python,content +7909,8310476,"TERMINAL",0,0,"40555",,terminal_output +7910,8311513,"TERMINAL",0,0,"1666",,terminal_output +7911,8312561,"TERMINAL",0,0,"2777",,terminal_output +7912,8313598,"TERMINAL",0,0,"3888",,terminal_output +7913,8314633,"TERMINAL",0,0,"4999",,terminal_output +7914,8316589,"TERMINAL",0,0,"5515131",,terminal_output +7915,8317778,"TERMINAL",0,0,"7222",,terminal_output +7916,8318678,"TERMINAL",0,0,"8333",,terminal_output +7917,8319753,"TERMINAL",0,0,"9444",,terminal_output +7918,8320747,"TERMINAL",0,0,"50555",,terminal_output +7919,8321795,"TERMINAL",0,0,"1666",,terminal_output +7920,8322841,"TERMINAL",0,0,"2777",,terminal_output +7921,8323945,"TERMINAL",0,0,"3888",,terminal_output +7922,8324969,"TERMINAL",0,0,"4999",,terminal_output +7923,8326005,"TERMINAL",0,0,"59:009:0040",,terminal_output +7924,8327259,"TERMINAL",0,0,"6111",,terminal_output +7925,8328156,"TERMINAL",0,0,"7222",,terminal_output +7926,8329174,"TERMINAL",0,0,"8333",,terminal_output +7927,8330171,"TERMINAL",0,0,"9444",,terminal_output +7928,8330985,"train_dynamics.py",4628,0,"",python,selection_mouse +7929,8331229,"TERMINAL",0,0,"5:00555",,terminal_output +7930,8332008,"train_dynamics.py",4627,0,"",python,selection_command +7931,8332354,"TERMINAL",0,0,"1666",,terminal_output +7932,8332406,"train_dynamics.py",4655,0,"\n ",python,content +7933,8332722,"train_dynamics.py",4664,0,"m",python,content +7934,8332723,"train_dynamics.py",4665,0,"",python,selection_keyboard +7935,8332892,"train_dynamics.py",4665,0,"o",python,content +7936,8332893,"train_dynamics.py",4666,0,"",python,selection_keyboard +7937,8332979,"train_dynamics.py",4666,0,"d",python,content +7938,8332981,"train_dynamics.py",4667,0,"",python,selection_keyboard +7939,8333097,"train_dynamics.py",4667,0,"e",python,content +7940,8333098,"train_dynamics.py",4668,0,"",python,selection_keyboard +7941,8333168,"train_dynamics.py",4668,0,"l",python,content +7942,8333169,"train_dynamics.py",4669,0,"",python,selection_keyboard +7943,8333325,"TERMINAL",0,0,"2888",,terminal_output +7944,8333378,"train_dynamics.py",4669,0,".",python,content +7945,8333380,"train_dynamics.py",4670,0,"",python,selection_keyboard +7946,8334157,"train_dynamics.py",4670,0,"t",python,content +7947,8334157,"train_dynamics.py",4671,0,"",python,selection_keyboard +7948,8334374,"train_dynamics.py",4671,0,"r",python,content +7949,8334376,"train_dynamics.py",4672,0,"",python,selection_keyboard +7950,8334376,"TERMINAL",0,0,"4999",,terminal_output +7951,8334510,"train_dynamics.py",4672,0,"a",python,content +7952,8334512,"train_dynamics.py",4673,0,"",python,selection_keyboard +7953,8334585,"train_dynamics.py",4673,0,"i",python,content +7954,8334586,"train_dynamics.py",4674,0,"",python,selection_keyboard +7955,8334629,"train_dynamics.py",4674,0,"n",python,content +7956,8334630,"train_dynamics.py",4675,0,"",python,selection_keyboard +7957,8335396,"train_dynamics.py",4675,0,"()",python,content +7958,8335397,"train_dynamics.py",4676,0,"",python,selection_keyboard +7959,8335428,"TERMINAL",0,0,"5101050",,terminal_output +7960,8335446,"train_dynamics.py",4676,1,")",python,content +7961,8335446,"train_dynamics.py",4677,0,"",python,selection_keyboard +7962,8335555,"train_dynamics.py",4676,0,"",python,selection_command +7963,8336526,"TERMINAL",0,0,"6111",,terminal_output +7964,8337455,"TERMINAL",0,0,"7222",,terminal_output +7965,8337959,"train_dynamics.py",2811,0,"",python,selection_mouse +7966,8338429,"train_dynamics.py",2800,18,"",python,content +7967,8338487,"train_dynamics.py",2804,0,"",python,selection_command +7968,8338575,"TERMINAL",0,0,"8333",,terminal_output +7969,8339528,"TERMINAL",0,0,"9444",,terminal_output +7970,8340562,"TERMINAL",0,0,"10555",,terminal_output +7971,8341672,"TERMINAL",0,0,"1666",,terminal_output +7972,8342712,"TERMINAL",0,0,"2777",,terminal_output +7973,8343679,"train_dynamics.py",5431,0,"",python,selection_mouse +7974,8343726,"TERMINAL",0,0,"3888",,terminal_output +7975,8344794,"TERMINAL",0,0,"4999",,terminal_output +7976,8345807,"TERMINAL",0,0,"520203:00",,terminal_output +7977,8346830,"TERMINAL",0,0,"6111",,terminal_output +7978,8347843,"TERMINAL",0,0,"7222",,terminal_output +7979,8348914,"TERMINAL",0,0,"8333",,terminal_output +7980,8349962,"TERMINAL",0,0,"9444",,terminal_output +7981,8351081,"TERMINAL",0,0,"20555",,terminal_output +7982,8352112,"TERMINAL",0,0,"1666",,terminal_output +7983,8353062,"TERMINAL",0,0,"2777",,terminal_output +7984,8354148,"TERMINAL",0,0,"3888",,terminal_output +7985,8355217,"TERMINAL",0,0,"4999",,terminal_output +7986,8356188,"TERMINAL",0,0,"5303010",,terminal_output +7987,8357203,"TERMINAL",0,0,"6111",,terminal_output +7988,8358245,"TERMINAL",0,0,"7222",,terminal_output +7989,8359345,"TERMINAL",0,0,"8444",,terminal_output +7990,8360402,"TERMINAL",0,0,"30555",,terminal_output +7991,8361430,"TERMINAL",0,0,"1666",,terminal_output +7992,8362218,"train_dynamics.py",483,0,"",python,selection_mouse +7993,8362260,"train_dynamics.py",482,0,"",python,selection_command +7994,8362476,"TERMINAL",0,0,"2777",,terminal_output +7995,8363151,"train_dynamics.py",483,0,"",python,selection_command +7996,8363525,"train_dynamics.py",469,14,"",python,content +7997,8363537,"TERMINAL",0,0,"3888",,terminal_output +7998,8363797,"train_dynamics.py",469,0,"c",python,content +7999,8363797,"train_dynamics.py",470,0,"",python,selection_keyboard +8000,8364042,"train_dynamics.py",470,0,"r",python,content +8001,8364043,"train_dynamics.py",471,0,"",python,selection_keyboard +8002,8364438,"train_dynamics.py",469,2,"create_dataloader_iterator",python,content +8003,8364595,"TERMINAL",0,0,"4999",,terminal_output +8004,8365580,"TERMINAL",0,0,"5404020",,terminal_output +8005,8366679,"TERMINAL",0,0,"6111",,terminal_output +8006,8367692,"TERMINAL",0,0,"7222",,terminal_output +8007,8368795,"TERMINAL",0,0,"8333",,terminal_output +8008,8369838,"TERMINAL",0,0,"9444",,terminal_output +8009,8370932,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +8010,8371103,"TERMINAL",0,0,"40555",,terminal_output +8011,8371796,"TERMINAL",0,0,"1666",,terminal_output +8012,8372847,"TERMINAL",0,0,"2777",,terminal_output +8013,8373904,"TERMINAL",0,0,"3888",,terminal_output +8014,8374387,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=coinrun-tokenizer-dev-$slurm_job_id \\n --tags tokenizer coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \\n --data_dir $array_records_dir_train\n",shellscript,tab +8015,8374933,"TERMINAL",0,0,"4999",,terminal_output +8016,8375971,"TERMINAL",0,0,"5505030",,terminal_output +8017,8377045,"TERMINAL",0,0,"6111",,terminal_output +8018,8378102,"TERMINAL",0,0,"7222",,terminal_output +8019,8379159,"TERMINAL",0,0,"8333",,terminal_output +8020,8380138,"TERMINAL",0,0,"9444",,terminal_output +8021,8381219,"TERMINAL",0,0,"50555",,terminal_output +8022,8382313,"TERMINAL",0,0,"1666",,terminal_output +8023,8383291,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=coinrun-tokenizer-dev-$slurm_job_id \\n --tags tokenizer coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \\n --data_dir $array_records_dir_train\n",shellscript,tab +8024,8383429,"TERMINAL",0,0,"2777",,terminal_output +8025,8384322,"TERMINAL",0,0,"3999",,terminal_output +8026,8385410,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",490,0,"",shellscript,selection_mouse +8027,8385426,"TERMINAL",0,0,"530:0030:0040",,terminal_output +8028,8386400,"TERMINAL",0,0,"6111",,terminal_output +8029,8386478,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",504,1,"t",shellscript,selection_command +8030,8386564,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1048,2,"to",shellscript,selection_command +8031,8386604,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1048,3,"tok",shellscript,selection_command +8032,8386746,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1048,4,"toke",shellscript,selection_command +8033,8386816,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1048,5,"token",shellscript,selection_command +8034,8387004,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1048,6,"tokeni",shellscript,selection_command +8035,8387119,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1048,7,"tokeniz",shellscript,selection_command +8036,8387363,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1048,8,"tokenize",shellscript,selection_command +8037,8387364,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1048,9,"tokenizer",shellscript,selection_command +8038,8387470,"TERMINAL",0,0,"7222",,terminal_output +8039,8388447,"TERMINAL",0,0,"8333",,terminal_output +8040,8389521,"TERMINAL",0,0,"9444",,terminal_output +8041,8390519,"TERMINAL",0,0,"6:00555",,terminal_output +8042,8391170,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1048,9,"dyn",shellscript,content +8043,8391174,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1138,9,"tokenizer",shellscript,selection_command +8044,8391554,"TERMINAL",0,0,"1666",,terminal_output +8045,8391705,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1138,9,"dyn",shellscript,content +8046,8391708,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1403,9,"tokenizer",shellscript,selection_command +8047,8392600,"TERMINAL",0,0,"2777",,terminal_output +8048,8392697,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1403,9,"dyn",shellscript,content +8049,8392700,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1438,9,"tokenizer",shellscript,selection_command +8050,8393181,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1438,9,"dyn",shellscript,content +8051,8393184,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",267,9,"tokenizer",shellscript,selection_command +8052,8393662,"TERMINAL",0,0,"3888",,terminal_output +8053,8393760,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",267,9,"dyn",shellscript,content +8054,8393763,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",377,9,"tokenizer",shellscript,selection_command +8055,8394023,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",377,9,"dyn",shellscript,content +8056,8394026,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",416,9,"tokenizer",shellscript,selection_command +8057,8394416,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",416,9,"dyn",shellscript,content +8058,8394723,"TERMINAL",0,0,"4999",,terminal_output +8059,8395702,"TERMINAL",0,0,"5101050",,terminal_output +8060,8396526,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",423,0,"",shellscript,selection_command +8061,8396754,"TERMINAL",0,0,"6111",,terminal_output +8062,8397791,"TERMINAL",0,0,"7222",,terminal_output +8063,8398134,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1124,0,"",shellscript,selection_mouse +8064,8398909,"TERMINAL",0,0,"8333",,terminal_output +8065,8399439,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1123,0,"",shellscript,selection_mouse +8066,8400104,"TERMINAL",0,0,"9444",,terminal_output +8067,8400957,"TERMINAL",0,0,"10555",,terminal_output +8068,8402011,"TERMINAL",0,0,"1666",,terminal_output +8069,8402653,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1123,0,"a",shellscript,content +8070,8402654,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1124,0,"",shellscript,selection_keyboard +8071,8402861,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1124,0,"m",shellscript,content +8072,8402862,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1125,0,"",shellscript,selection_keyboard +8073,8403006,"TERMINAL",0,0,"2777",,terminal_output +8074,8403588,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1125,0,"i",shellscript,content +8075,8403588,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1126,0,"",shellscript,selection_keyboard +8076,8403716,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1126,0,"c",shellscript,content +8077,8403716,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1127,0,"",shellscript,selection_keyboard +8078,8404062,"TERMINAL",0,0,"3888",,terminal_output +8079,8404097,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1127,0,"s",shellscript,content +8080,8404098,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1128,0,"",shellscript,selection_keyboard +8081,8404829,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1127,0,"",shellscript,selection_command +8082,8405096,"TERMINAL",0,0,"4999",,terminal_output +8083,8406117,"TERMINAL",0,0,"520204:00",,terminal_output +8084,8407151,"TERMINAL",0,0,"6111",,terminal_output +8085,8408189,"TERMINAL",0,0,"7222",,terminal_output +8086,8409379,"TERMINAL",0,0,"8333",,terminal_output +8087,8410373,"TERMINAL",0,0,"9555",,terminal_output +8088,8411402,"TERMINAL",0,0,"21666",,terminal_output +8089,8411925,"train_dynamics.py",0,0,"",python,tab +8090,8412408,"TERMINAL",0,0,"2777",,terminal_output +8091,8412833,"train_dynamics.py",0,0,"",python,tab +8092,8413406,"TERMINAL",0,0,"3888",,terminal_output +8093,8414481,"TERMINAL",0,0,"4999",,terminal_output +8094,8415522,"TERMINAL",0,0,"5303010",,terminal_output +8095,8416680,"TERMINAL",0,0,"6111",,terminal_output +8096,8417562,"TERMINAL",0,0,"7222",,terminal_output +8097,8418596,"TERMINAL",0,0,"8333",,terminal_output +8098,8419690,"TERMINAL",0,0,"9444",,terminal_output +8099,8420670,"TERMINAL",0,0,"30555",,terminal_output +8100,8421713,"TERMINAL",0,0,"1666",,terminal_output +8101,8422769,"TERMINAL",0,0,"2777",,terminal_output +8102,8423793,"TERMINAL",0,0,"3888",,terminal_output +8103,8424839,"TERMINAL",0,0,"4999",,terminal_output +8104,8425880,"TERMINAL",0,0,"5404020",,terminal_output +8105,8426961,"TERMINAL",0,0,"6111",,terminal_output +8106,8427962,"TERMINAL",0,0,"7222",,terminal_output +8107,8429110,"TERMINAL",0,0,"8333",,terminal_output +8108,8430091,"TERMINAL",0,0,"9444",,terminal_output +8109,8431166,"TERMINAL",0,0,"40555",,terminal_output +8110,8432194,"TERMINAL",0,0,"1666",,terminal_output +8111,8433213,"TERMINAL",0,0,"2777",,terminal_output +8112,8434339,"TERMINAL",0,0,"3888",,terminal_output +8113,8435363,"TERMINAL",0,0,"4999",,terminal_output +8114,8435747,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +8115,8435747,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1288,0,"",shellscript,selection_mouse +8116,8436309,"TERMINAL",0,0,"5515131",,terminal_output +8117,8436744,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1285,0,"",shellscript,selection_mouse +8118,8437561,"TERMINAL",0,0,"7222",,terminal_output +8119,8438572,"TERMINAL",0,0,"8333",,terminal_output +8120,8439609,"TERMINAL",0,0,"9444",,terminal_output +8121,8440656,"TERMINAL",0,0,"50555",,terminal_output +8122,8441809,"TERMINAL",0,0,"1666",,terminal_output +8123,8442778,"TERMINAL",0,0,"2777",,terminal_output +8124,8443795,"TERMINAL",0,0,"3888",,terminal_output +8125,8444857,"TERMINAL",0,0,"4999",,terminal_output +8126,8445912,"TERMINAL",0,0,"51:001:0040",,terminal_output +8127,8447252,"TERMINAL",0,0,"6111",,terminal_output +8128,8448363,"TERMINAL",0,0,"7333",,terminal_output +8129,8449364,"TERMINAL",0,0,"9444",,terminal_output +8130,8450412,"TERMINAL",0,0,"7:00555",,terminal_output +8131,8451428,"TERMINAL",0,0,"1666",,terminal_output +8132,8452567,"TERMINAL",0,0,"2777",,terminal_output +8133,8453528,"TERMINAL",0,0,"3888",,terminal_output +8134,8454557,"TERMINAL",0,0,"4999",,terminal_output +8135,8455625,"TERMINAL",0,0,"5101050",,terminal_output +8136,8456662,"TERMINAL",0,0,"6111",,terminal_output +8137,8457678,"TERMINAL",0,0,"7222",,terminal_output +8138,8458813,"TERMINAL",0,0,"8333",,terminal_output +8139,8459829,"TERMINAL",0,0,"9444",,terminal_output +8140,8460832,"TERMINAL",0,0,"10555",,terminal_output +8141,8461983,"TERMINAL",0,0,"1666",,terminal_output +8142,8462918,"TERMINAL",0,0,"2777",,terminal_output +8143,8463972,"TERMINAL",0,0,"3888",,terminal_output +8144,8465037,"TERMINAL",0,0,"4999",,terminal_output +8145,8466063,"TERMINAL",0,0,"520205:00",,terminal_output +8146,8467213,"TERMINAL",0,0,"6111",,terminal_output +8147,8468161,"TERMINAL",0,0,"7222",,terminal_output +8148,8469321,"TERMINAL",0,0,"8333",,terminal_output +8149,8470317,"TERMINAL",0,0,"9444",,terminal_output +8150,8471415,"TERMINAL",0,0,"20666",,terminal_output +8151,8472433,"TERMINAL",0,0,"2777",,terminal_output +8152,8473409,"TERMINAL",0,0,"3888",,terminal_output +8153,8474448,"TERMINAL",0,0,"4999",,terminal_output +8154,8475620,"TERMINAL",0,0,"5303010",,terminal_output +8155,8476529,"TERMINAL",0,0,"6111",,terminal_output +8156,8477628,"TERMINAL",0,0,"7222",,terminal_output +8157,8478620,"TERMINAL",0,0,"8333",,terminal_output +8158,8479712,"TERMINAL",0,0,"9444",,terminal_output +8159,8480752,"TERMINAL",0,0,"30555",,terminal_output +8160,8481779,"TERMINAL",0,0,"1666",,terminal_output +8161,8482809,"TERMINAL",0,0,"2777",,terminal_output +8162,8483870,"TERMINAL",0,0,"3888",,terminal_output +8163,8484878,"TERMINAL",0,0,"4999",,terminal_output +8164,8486052,"TERMINAL",0,0,"5404020",,terminal_output +8165,8486963,"TERMINAL",0,0,"6111",,terminal_output +8166,8488002,"TERMINAL",0,0,"7222",,terminal_output +8167,8489061,"TERMINAL",0,0,"8333",,terminal_output +8168,8490096,"TERMINAL",0,0,"9444",,terminal_output +8169,8491230,"TERMINAL",0,0,"40555",,terminal_output +8170,8492196,"TERMINAL",0,0,"1666",,terminal_output +8171,8493325,"TERMINAL",0,0,"2777",,terminal_output +8172,8494346,"TERMINAL",0,0,"3999",,terminal_output +8173,8495405,"TERMINAL",0,0,"5505030",,terminal_output +8174,8496497,"TERMINAL",0,0,"6111",,terminal_output +8175,8497451,"TERMINAL",0,0,"7222",,terminal_output +8176,8498546,"TERMINAL",0,0,"8333",,terminal_output +8177,8499516,"TERMINAL",0,0,"9444",,terminal_output +8178,8500613,"TERMINAL",0,0,"50555",,terminal_output +8179,8501655,"TERMINAL",0,0,"1666",,terminal_output +8180,8502673,"TERMINAL",0,0,"2777",,terminal_output +8181,8503657,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1561,0,"",shellscript,selection_mouse +8182,8503658,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1560,0,"",shellscript,selection_command +8183,8503723,"TERMINAL",0,0,"3888",,terminal_output +8184,8504129,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1629,0,"",shellscript,selection_mouse +8185,8504138,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1628,0,"",shellscript,selection_command +8186,8504756,"TERMINAL",0,0,"4999",,terminal_output +8187,8505167,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1650,0,"",shellscript,selection_mouse +8188,8505213,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1649,0,"",shellscript,selection_command +8189,8505691,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1690,0,"",shellscript,selection_mouse +8190,8505732,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1689,0,"",shellscript,selection_command +8191,8505815,"TERMINAL",0,0,"52:002:0040",,terminal_output +8192,8506943,"TERMINAL",0,0,"6111",,terminal_output +8193,8507166,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1690,0,"",shellscript,selection_command +8194,8507424,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1690,0," ",shellscript,content +8195,8507426,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1691,0,"",shellscript,selection_keyboard +8196,8507637,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1691,0,"\",shellscript,content +8197,8507637,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1692,0,"",shellscript,selection_keyboard +8198,8507902,"TERMINAL",0,0,"7222",,terminal_output +8199,8508127,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1692,0,"\n ",shellscript,content +8200,8508990,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1697,0,"-",shellscript,content +8201,8508991,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1698,0,"",shellscript,selection_keyboard +8202,8509025,"TERMINAL",0,0,"8333",,terminal_output +8203,8509134,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1698,0,"-",shellscript,content +8204,8509135,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1699,0,"",shellscript,selection_keyboard +8205,8510008,"TERMINAL",0,0,"9444",,terminal_output +8206,8511028,"TERMINAL",0,0,"8:00555",,terminal_output +8207,8512102,"TERMINAL",0,0,"1666",,terminal_output +8208,8513096,"TERMINAL",0,0,"2777",,terminal_output +8209,8513659,"train_dynamics.py",0,0,"",python,tab +8210,8513660,"train_dynamics.py",1698,0,"",python,selection_mouse +8211,8513660,"train_dynamics.py",1689,14,"lam_checkpoint",python,selection_mouse +8212,8514322,"TERMINAL",0,0,"3888",,terminal_output +8213,8515173,"TERMINAL",0,0,"4999",,terminal_output +8214,8516311,"TERMINAL",0,0,"5101050",,terminal_output +8215,8517265,"TERMINAL",0,0,"6111",,terminal_output +8216,8518308,"TERMINAL",0,0,"7333",,terminal_output +8217,8519379,"TERMINAL",0,0,"9444",,terminal_output +8218,8520405,"TERMINAL",0,0,"10555",,terminal_output +8219,8520761,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +8220,8520899,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1699,0,"lam_checkpoint",shellscript,content +8221,8521437,"TERMINAL",0,0,"1666",,terminal_output +8222,8522481,"TERMINAL",0,0,"2777",,terminal_output +8223,8523555,"TERMINAL",0,0,"3888",,terminal_output +8224,8524560,"TERMINAL",0,0,"4999",,terminal_output +8225,8524662,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1713,0," ",shellscript,content +8226,8524663,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1714,0,"",shellscript,selection_keyboard +8227,8525182,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1714,0,"\",shellscript,content +8228,8525183,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1715,0,"",shellscript,selection_keyboard +8229,8525599,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1715,0,"\n ",shellscript,content +8230,8525633,"TERMINAL",0,0,"520206:00",,terminal_output +8231,8526121,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1720,0,".",shellscript,content +8232,8526122,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1721,0,"",shellscript,selection_keyboard +8233,8526257,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1721,0,".",shellscript,content +8234,8526258,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1722,0,"",shellscript,selection_keyboard +8235,8526595,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1721,1,"",shellscript,content +8236,8526682,"TERMINAL",0,0,"6111",,terminal_output +8237,8526737,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1720,1,"",shellscript,content +8238,8527091,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1720,0,"-",shellscript,content +8239,8527091,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1721,0,"",shellscript,selection_keyboard +8240,8527313,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1721,0,"-",shellscript,content +8241,8527314,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1722,0,"",shellscript,selection_keyboard +8242,8527679,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1722,0,"t",shellscript,content +8243,8527680,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1723,0,"",shellscript,selection_keyboard +8244,8527729,"TERMINAL",0,0,"7222",,terminal_output +8245,8527833,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1723,0,"o",shellscript,content +8246,8527833,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1724,0,"",shellscript,selection_keyboard +8247,8527909,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1724,0,"k",shellscript,content +8248,8527910,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1725,0,"",shellscript,selection_keyboard +8249,8528018,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1725,0,"e",shellscript,content +8250,8528019,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1726,0,"",shellscript,selection_keyboard +8251,8528079,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1726,0,"n",shellscript,content +8252,8528080,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1727,0,"",shellscript,selection_keyboard +8253,8528234,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1727,0,"i",shellscript,content +8254,8528235,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1728,0,"",shellscript,selection_keyboard +8255,8528408,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1728,0,"z",shellscript,content +8256,8528409,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1729,0,"",shellscript,selection_keyboard +8257,8528475,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1729,0,"e",shellscript,content +8258,8528475,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1730,0,"",shellscript,selection_keyboard +8259,8528549,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1730,0,"r",shellscript,content +8260,8528549,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1731,0,"",shellscript,selection_keyboard +8261,8528722,"TERMINAL",0,0,"8333",,terminal_output +8262,8528816,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1731,0,"_",shellscript,content +8263,8528816,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1732,0,"",shellscript,selection_keyboard +8264,8529486,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1732,0,"c",shellscript,content +8265,8529487,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1733,0,"",shellscript,selection_keyboard +8266,8529662,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1733,0,"h",shellscript,content +8267,8529663,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1734,0,"",shellscript,selection_keyboard +8268,8529779,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1734,0,"e",shellscript,content +8269,8529780,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1735,0,"",shellscript,selection_keyboard +8270,8529809,"TERMINAL",0,0,"9444",,terminal_output +8271,8529894,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1735,0,"c",shellscript,content +8272,8529894,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1736,0,"",shellscript,selection_keyboard +8273,8530015,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1736,0,"j",shellscript,content +8274,8530016,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1737,0,"",shellscript,selection_keyboard +8275,8530409,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1737,0,"p",shellscript,content +8276,8530410,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1738,0,"",shellscript,selection_keyboard +8277,8530676,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1738,0,"o",shellscript,content +8278,8530677,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1739,0,"",shellscript,selection_keyboard +8279,8530810,"TERMINAL",0,0,"20555",,terminal_output +8280,8530958,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1738,1,"",shellscript,content +8281,8531091,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1737,1,"",shellscript,content +8282,8531211,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1736,1,"",shellscript,content +8283,8531861,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1736,0,"k",shellscript,content +8284,8531862,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1737,0,"",shellscript,selection_keyboard +8285,8531950,"TERMINAL",0,0,"1666",,terminal_output +8286,8532034,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1737,0,"p",shellscript,content +8287,8532035,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1738,0,"",shellscript,selection_keyboard +8288,8532219,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1738,0,"o",shellscript,content +8289,8532219,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1739,0,"",shellscript,selection_keyboard +8290,8532403,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1739,0,"i",shellscript,content +8291,8532403,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1740,0,"",shellscript,selection_keyboard +8292,8532465,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1740,0,"n",shellscript,content +8293,8532465,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1741,0,"",shellscript,selection_keyboard +8294,8532560,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1741,0,"t",shellscript,content +8295,8532561,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1742,0,"",shellscript,selection_keyboard +8296,8532913,"TERMINAL",0,0,"2777",,terminal_output +8297,8533982,"TERMINAL",0,0,"3888",,terminal_output +8298,8535054,"TERMINAL",0,0,"4999",,terminal_output +8299,8536044,"TERMINAL",0,0,"5303010",,terminal_output +8300,8537078,"TERMINAL",0,0,"6111",,terminal_output +8301,8537529,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1083,0,"",shellscript,selection_mouse +8302,8538116,"TERMINAL",0,0,"7222",,terminal_output +8303,8538178,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1083,0,"\n",shellscript,content +8304,8538357,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1084,0,"\n",shellscript,content +8305,8538645,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1084,0,"",shellscript,selection_command +8306,8539212,"TERMINAL",0,0,"8333",,terminal_output +8307,8539873,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1084,0,"l",shellscript,content +8308,8539874,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1085,0,"",shellscript,selection_keyboard +8309,8539982,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1085,0,"a",shellscript,content +8310,8539983,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1086,0,"",shellscript,selection_keyboard +8311,8540059,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1086,0,"m",shellscript,content +8312,8540060,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1087,0,"",shellscript,selection_keyboard +8313,8540208,"TERMINAL",0,0,"9444",,terminal_output +8314,8540308,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1087,0,"_",shellscript,content +8315,8540309,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1088,0,"",shellscript,selection_keyboard +8316,8541190,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1088,0,"c",shellscript,content +8317,8541190,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1089,0,"",shellscript,selection_keyboard +8318,8541266,"TERMINAL",0,0,"30555",,terminal_output +8319,8541337,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1089,0,"p",shellscript,content +8320,8541338,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1090,0,"",shellscript,selection_keyboard +8321,8542366,"TERMINAL",0,0,"1777",,terminal_output +8322,8542567,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1084,6,"lam_checkpoint",shellscript,content +8323,8543396,"TERMINAL",0,0,"3888",,terminal_output +8324,8543865,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1098,0,"=",shellscript,content +8325,8543866,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1099,0,"",shellscript,selection_keyboard +8326,8544398,"TERMINAL",0,0,"4999",,terminal_output +8327,8545235,"TERMINAL",0,0,"watch",,terminal_focus +8328,8545463,"TERMINAL",0,0,"5404020",,terminal_output +8329,8546367,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +8330,8570697,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1099,0,"\n",shellscript,content +8331,8571111,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1100,0,"t",shellscript,content +8332,8571112,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1101,0,"",shellscript,selection_keyboard +8333,8571343,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1101,0,"o",shellscript,content +8334,8571344,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1102,0,"",shellscript,selection_keyboard +8335,8571450,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1102,0,"k",shellscript,content +8336,8571451,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1103,0,"",shellscript,selection_keyboard +8337,8571509,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1103,0,"e",shellscript,content +8338,8571510,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1104,0,"",shellscript,selection_keyboard +8339,8571570,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1104,0,"n",shellscript,content +8340,8571571,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1105,0,"",shellscript,selection_keyboard +8341,8571621,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1105,0,"i",shellscript,content +8342,8571622,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1106,0,"",shellscript,selection_keyboard +8343,8571737,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1106,0,"z",shellscript,content +8344,8571738,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1107,0,"",shellscript,selection_keyboard +8345,8571806,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1107,0,"e",shellscript,content +8346,8571807,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1108,0,"",shellscript,selection_keyboard +8347,8571926,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1108,0,"r",shellscript,content +8348,8571926,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1109,0,"",shellscript,selection_keyboard +8349,8572129,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1109,0,"_",shellscript,content +8350,8572130,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1110,0,"",shellscript,selection_keyboard +8351,8572575,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1110,0,"c",shellscript,content +8352,8572576,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1111,0,"",shellscript,selection_keyboard +8353,8573159,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1100,11,"tokenizer_checkpoint",shellscript,content +8354,8573708,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1120,0,"=",shellscript,content +8355,8573709,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1121,0,"",shellscript,selection_keyboard +8356,8573944,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1121,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835",shellscript,content +8357,8575553,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1099,0,"",shellscript,selection_mouse +8358,8603014,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1083,0,"",shellscript,selection_mouse +8359,8603781,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1099,0,"",shellscript,selection_mouse +8360,8604537,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1099,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3468835",shellscript,content +8361,8608695,"TERMINAL",0,0,"srun",,terminal_focus +8362,8609644,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh ",,terminal_output +8363,8613511,"TERMINAL",0,0,"",,terminal_output +8364,8613650,"TERMINAL",0,0,"",,terminal_output +8365,8613842,"TERMINAL",0,0,"",,terminal_output +8366,8614469,"TERMINAL",0,0,"",,terminal_output +8367,8614688,"TERMINAL",0,0,"_single",,terminal_output +8368,8614878,"TERMINAL",0,0,"_single_",,terminal_output +8369,8615021,"TERMINAL",0,0,"_single_g",,terminal_output +8370,8615162,"TERMINAL",0,0,"_single_gp",,terminal_output +8371,8615295,"TERMINAL",0,0,"_single_gpu",,terminal_output +8372,8615430,"TERMINAL",0,0,"_single_gpu.",,terminal_output +8373,8615564,"TERMINAL",0,0,"_single_gpu.s",,terminal_output +8374,8616652,"TERMINAL",0,0,"_single_gpu.sh ",,terminal_output +8375,8617176,"TERMINAL",0,0,"_single_gpu.sh ",,terminal_output +8376,8617361,"TERMINAL",0,0,"d_single_gpu.sh ",,terminal_output +8377,8617580,"TERMINAL",0,0,"y_single_gpu.sh n_single_gpu.[1@s",,terminal_output +8378,8617898,"TERMINAL",0,0,"\r\n\r\r\n[?2004l\r",,terminal_output +8379,8617962,"TERMINAL",0,0,"#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3468835\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train \\r\n --lam_checkpoint \\r\n --tokenizer_checkpoint\r\n",,terminal_output +8380,8618153,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +8381,8618246,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +8382,8620862,"TERMINAL",0,0,"╭─ Parsing error ─────────────────────────────────╮\r\n│ Argument --lam-checkpoint: expected 1 argument │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n",,terminal_output +8383,8621076,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Exited with exit code 2\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +8384,8624057,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +8385,8626335,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1959,0,"",shellscript,selection_mouse +8386,8627788,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1959,0,"$",shellscript,content +8387,8627790,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1960,0,"",shellscript,selection_keyboard +8388,8628671,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1960,0," ",shellscript,content +8389,8628672,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1961,0,"",shellscript,selection_keyboard +8390,8628919,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1960,0,"",shellscript,selection_command +8391,8629774,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1960,0,"l",shellscript,content +8392,8629774,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1961,0,"",shellscript,selection_keyboard +8393,8629868,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1961,0,"a",shellscript,content +8394,8629868,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1962,0,"",shellscript,selection_keyboard +8395,8630728,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1960,2,"lam_checkpoint",shellscript,content +8396,8631187,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2003,0,"",shellscript,selection_command +8397,8631800,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2003,0," ",shellscript,content +8398,8631801,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2004,0,"",shellscript,selection_keyboard +8399,8632530,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2004,0,"$",shellscript,content +8400,8632531,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2005,0,"",shellscript,selection_keyboard +8401,8633010,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2005,0,"t",shellscript,content +8402,8633010,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2006,0,"",shellscript,selection_keyboard +8403,8633068,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2006,0,"o",shellscript,content +8404,8633069,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2007,0,"",shellscript,selection_keyboard +8405,8633121,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2007,0,"k",shellscript,content +8406,8633122,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2008,0,"",shellscript,selection_keyboard +8407,8634104,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2005,3,"tokenizer_checkpoint",shellscript,content +8408,8637948,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh ",,terminal_output +8409,8638147,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3468835\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train \\r\n --lam_checkpoint $lam_checkpoint \\r\n --tokenizer_checkpoint $tokenizer_checkpoint\r\n",,terminal_output +8410,8638287,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +8411,8638380,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +8412,8647831,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +8413,8648644,"TERMINAL",0,0,"wandb: creating run\r\nwandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_155017-05z48f0b\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-dyn-dev-3468835\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/05z48f0b\r\n",,terminal_output +8414,8653472,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35115232, 'tokenizer': 33750256, 'total': 95421392}\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 387, in \r\n optimizer = restore_genie_components(optimizer, replicated_sharding, rng, args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py"", line 473, in restore_genie_components\r\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1621, in restore\r\n raise FileNotFoundError(f'No steps found in {self.directory}.')\r\nFileNotFoundError: No steps found in /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835.\r\n",,terminal_output +8415,8654367,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-dyn-dev-3468835 at: https://wandb.ai/instant-uv/jafar/runs/05z48f0b\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_155017-05z48f0b/logs\r\n",,terminal_output +8416,8655022,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +8417,8693501,"TERMINAL",0,0,"i",,terminal_output +8418,8693607,"TERMINAL",0,0,"d",,terminal_output +8419,8693716,"TERMINAL",0,0,"l",,terminal_output +8420,8693901,"TERMINAL",0,0,"in",,terminal_output +8421,8693953,"TERMINAL",0,0,"g",,terminal_output +8422,8694165,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn0402.localdomain: Fri Sep 5 15:51:03 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 14 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 4 nodes idle\rPartition accelerated-h200:\t 5 nodes idle",,terminal_output +8423,8695162,"TERMINAL",0,0,"4",,terminal_output +8424,8696173,"TERMINAL",0,0,"5",,terminal_output +8425,8696295,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +8426,8699577,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +8427,8700587,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1278,0,"",shellscript,selection_mouse +8428,8700603,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1277,0,"",shellscript,selection_command +8429,8701645,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1296,0,"",shellscript,selection_mouse +8430,8701645,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1295,0,"",shellscript,selection_command +8431,8702211,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1278,0,"",shellscript,selection_mouse +8432,8702226,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1277,0,"",shellscript,selection_command +8433,8707864,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1278,0,"\n ",shellscript,content +8434,8708341,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1283,0,"-",shellscript,content +8435,8708342,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1284,0,"",shellscript,selection_keyboard +8436,8708494,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1284,0,"-",shellscript,content +8437,8708495,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1285,0,"",shellscript,selection_keyboard +8438,8711778,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1284,1,"",shellscript,content +8439,8711904,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1283,1,"",shellscript,content +8440,8712039,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1279,4,"",shellscript,content +8441,8712387,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1278,1,"",shellscript,content +8442,8712827,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1296,0,"",shellscript,selection_command +8443,8713039,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1314,0,"",shellscript,selection_command +8444,8713183,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1334,0,"",shellscript,selection_command +8445,8713336,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1364,0,"",shellscript,selection_command +8446,8714217,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1380,1,"",shellscript,content +8447,8716372,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +8448,8717304,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1320,0,"",shellscript,selection_mouse +8449,8717316,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1319,0,"",shellscript,selection_command +8450,8717761,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1355,0,"",shellscript,selection_mouse +8451,8718637,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1354,1,"",shellscript,content +8452,8720186,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +8453,8720296,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1424,0,"",shellscript,selection_mouse +8454,8722763,"TERMINAL",0,0,"bash",,terminal_focus +8455,8723403,"TERMINAL",0,0,"queue",,terminal_command +8456,8723444,"TERMINAL",0,0,"]633;C",,terminal_output +8457,8723702,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 15:51:33 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:35:38\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:35:38\t 1 hkn07363468835 dev_accel interact tum_cte0 R39:18\t 1 hkn0402",,terminal_output +8458,8724557,"TERMINAL",0,0,"4999",,terminal_output +8459,8724743,"TERMINAL",0,0,"srun",,terminal_focus +8460,8725327,"TERMINAL",0,0,"idling",,terminal_output +8461,8725522,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh ",,terminal_output +8462,8725588,"TERMINAL",0,0,"5404020",,terminal_output +8463,8726598,"TERMINAL",0,0,"tokenizer_singl[6@e_gpu.",,terminal_output +8464,8726627,"TERMINAL",0,0,"6111",,terminal_output +8465,8727632,"TERMINAL",0,0,"7222",,terminal_output +8466,8728678,"TERMINAL",0,0,"8333",,terminal_output +8467,8729729,"TERMINAL",0,0,"9444",,terminal_output +8468,8730116,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +8469,8730754,"TERMINAL",0,0,"40555",,terminal_output +8470,8731795,"TERMINAL",0,0,"1666",,terminal_output +8471,8732884,"TERMINAL",0,0,"2777",,terminal_output +8472,8733868,"TERMINAL",0,0,"3888",,terminal_output +8473,8734989,"TERMINAL",0,0,"4999",,terminal_output +8474,8736046,"TERMINAL",0,0,"5505030",,terminal_output +8475,8737042,"TERMINAL",0,0,"6111",,terminal_output +8476,8738066,"TERMINAL",0,0,"7222",,terminal_output +8477,8739204,"TERMINAL",0,0,"8333",,terminal_output +8478,8740127,"TERMINAL",0,0,"9444",,terminal_output +8479,8741251,"TERMINAL",0,0,"50555",,terminal_output +8480,8742223,"TERMINAL",0,0,"1666",,terminal_output +8481,8743278,"TERMINAL",0,0,"2777",,terminal_output +8482,8743921,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\r\n#SBATCH --job-name=train_tokenizer_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_tokenizer.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=100 \\r\n --log \\r\n --name=coinrun-tokenizer-dev-$slurm_job_id \\r\n --tags tokenizer coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train\r\n",,terminal_output +8483,8744060,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +8484,8744158,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +8485,8744379,"TERMINAL",0,0,"3999",,terminal_output +8486,8745347,"TERMINAL",0,0,"56:006:0040",,terminal_output +8487,8746412,"TERMINAL",0,0,"6111",,terminal_output +8488,8747531,"TERMINAL",0,0,"7222",,terminal_output +8489,8748492,"TERMINAL",0,0,"8333",,terminal_output +8490,8749510,"TERMINAL",0,0,"9444",,terminal_output +8491,8750594,"TERMINAL",0,0,"2:00555",,terminal_output +8492,8751562,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +8493,8751583,"TERMINAL",0,0,"1666",,terminal_output +8494,8752387,"TERMINAL",0,0,"wandb: creating run\r\nwandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_155201-qrch1ogo\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-tokenizer-dev-3468835\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/qrch1ogo\r\n",,terminal_output +8495,8752627,"TERMINAL",0,0,"2777",,terminal_output +8496,8753664,"TERMINAL",0,0,"3888",,terminal_output +8497,8754706,"TERMINAL",0,0,"4999",,terminal_output +8498,8755787,"TERMINAL",0,0,"5101050",,terminal_output +8499,8755814,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['decoder', 'encoder', 'vq']\r\nParameter counts:\r\n{'decoder': 16858736, 'encoder': 16858752, 'vq': 32768, 'total': 33750256}\r\nStarting training from step 0...\r\n",,terminal_output +8500,8756790,"TERMINAL",0,0,"6111",,terminal_output +8501,8757833,"TERMINAL",0,0,"7222",,terminal_output +8502,8758950,"TERMINAL",0,0,"8333",,terminal_output +8503,8759918,"TERMINAL",0,0,"9444",,terminal_output +8504,8760994,"TERMINAL",0,0,"10555",,terminal_output +8505,8762016,"TERMINAL",0,0,"1666",,terminal_output +8506,8763039,"TERMINAL",0,0,"2777",,terminal_output +8507,8764173,"TERMINAL",0,0,"3888",,terminal_output +8508,8764174,"TERMINAL",0,0,"2025-09-05 15:52:13.885701: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:52:13.886106: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:52:13.886128: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:52:13.886245: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:52:13.887786: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8509,8765108,"TERMINAL",0,0,"4999",,terminal_output +8510,8766151,"TERMINAL",0,0,"5202040:00",,terminal_output +8511,8767253,"TERMINAL",0,0,"6111",,terminal_output +8512,8768262,"TERMINAL",0,0,"7222",,terminal_output +8513,8769364,"TERMINAL",0,0,"8444",,terminal_output +8514,8770412,"TERMINAL",0,0,"20555",,terminal_output +8515,8771448,"TERMINAL",0,0,"1666",,terminal_output +8516,8772409,"TERMINAL",0,0,"2777",,terminal_output +8517,8773447,"TERMINAL",0,0,"3888",,terminal_output +8518,8774507,"TERMINAL",0,0,"4999",,terminal_output +8519,8775642,"TERMINAL",0,0,"5303010",,terminal_output +8520,8776563,"TERMINAL",0,0,"6111",,terminal_output +8521,8777602,"TERMINAL",0,0,"7222",,terminal_output +8522,8778664,"TERMINAL",0,0,"8333",,terminal_output +8523,8779693,"TERMINAL",0,0,"9444",,terminal_output +8524,8780720,"TERMINAL",0,0,"30555",,terminal_output +8525,8781761,"TERMINAL",0,0,"1666",,terminal_output +8526,8782791,"TERMINAL",0,0,"2777",,terminal_output +8527,8783833,"TERMINAL",0,0,"3888",,terminal_output +8528,8784953,"TERMINAL",0,0,"4999",,terminal_output +8529,8785983,"TERMINAL",0,0,"5404020",,terminal_output +8530,8787009,"TERMINAL",0,0,"6111",,terminal_output +8531,8788456,"TERMINAL",0,0,"7333",,terminal_output +8532,8789421,"TERMINAL",0,0,"9444",,terminal_output +8533,8790489,"TERMINAL",0,0,"40555",,terminal_output +8534,8791501,"TERMINAL",0,0,"1666",,terminal_output +8535,8792593,"TERMINAL",0,0,"2777",,terminal_output +8536,8793658,"TERMINAL",0,0,"3888",,terminal_output +8537,8794644,"TERMINAL",0,0,"4999",,terminal_output +8538,8795673,"TERMINAL",0,0,"5505030",,terminal_output +8539,8796707,"TERMINAL",0,0,"6111",,terminal_output +8540,8797748,"TERMINAL",0,0,"7222",,terminal_output +8541,8798791,"TERMINAL",0,0,"8333",,terminal_output +8542,8799847,"TERMINAL",0,0,"9444",,terminal_output +8543,8801028,"TERMINAL",0,0,"Step 0, loss: 0.2621576189994812\r\nStep 1, loss: 0.19697336852550507\r\nStep 2, loss: 0.2387561947107315\r\nStep 3, loss: 0.2858649492263794\r\nStep 4, loss: 0.23166227340698242\r\nStep 5, loss: 0.23567236959934235\r\nStep 6, loss: 0.1995115876197815\r\nStep 7, loss: 0.20674186944961548\r\nStep 8, loss: 0.20116974413394928\r\nStep 9, loss: 0.1974135935306549\r\nCalculating validation metrics...\r\n",,terminal_output +8544,8801211,"TERMINAL",0,0,"50555",,terminal_output +8545,8802367,"TERMINAL",0,0,"1666",,terminal_output +8546,8803398,"TERMINAL",0,0,"2888",,terminal_output +8547,8804362,"TERMINAL",0,0,"4999",,terminal_output +8548,8805435,"TERMINAL",0,0,"57:007:0040",,terminal_output +8549,8805722,"TERMINAL",0,0,"2025-09-05 15:52:55.449652: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8550,8806422,"TERMINAL",0,0,"6111",,terminal_output +8551,8807482,"TERMINAL",0,0,"7222",,terminal_output +8552,8808512,"TERMINAL",0,0,"8333",,terminal_output +8553,8809632,"TERMINAL",0,0,"9444",,terminal_output +8554,8810655,"TERMINAL",0,0,"3:00555",,terminal_output +8555,8811616,"TERMINAL",0,0,"1666",,terminal_output +8556,8812647,"TERMINAL",0,0,"2777",,terminal_output +8557,8813688,"TERMINAL",0,0,"3888",,terminal_output +8558,8814911,"TERMINAL",0,0,"4999",,terminal_output +8559,8815947,"TERMINAL",0,0,"5101050",,terminal_output +8560,8816910,"TERMINAL",0,0,"6111",,terminal_output +8561,8817824,"TERMINAL",0,0,"7222",,terminal_output +8562,8818867,"TERMINAL",0,0,"8333",,terminal_output +8563,8819909,"TERMINAL",0,0,"9444",,terminal_output +8564,8821003,"TERMINAL",0,0,"10555",,terminal_output +8565,8821988,"TERMINAL",0,0,"1666",,terminal_output +8566,8823061,"TERMINAL",0,0,"2777",,terminal_output +8567,8824071,"TERMINAL",0,0,"3888",,terminal_output +8568,8825116,"TERMINAL",0,0,"4999",,terminal_output +8569,8826139,"TERMINAL",0,0,"520201:00",,terminal_output +8570,8827245,"TERMINAL",0,0,"6111",,terminal_output +8571,8828216,"TERMINAL",0,0,"7222",,terminal_output +8572,8829295,"TERMINAL",0,0,"8333",,terminal_output +8573,8830325,"TERMINAL",0,0,"9555",,terminal_output +8574,8831450,"TERMINAL",0,0,"21666",,terminal_output +8575,8832467,"TERMINAL",0,0,"2777",,terminal_output +8576,8833439,"TERMINAL",0,0,"3888",,terminal_output +8577,8834458,"TERMINAL",0,0,"4999",,terminal_output +8578,8835577,"TERMINAL",0,0,"5303010",,terminal_output +8579,8836544,"TERMINAL",0,0,"6111",,terminal_output +8580,8837580,"TERMINAL",0,0,"7222",,terminal_output +8581,8838618,"TERMINAL",0,0,"8333",,terminal_output +8582,8839713,"TERMINAL",0,0,"9444",,terminal_output +8583,8840707,"TERMINAL",0,0,"30555",,terminal_output +8584,8841768,"TERMINAL",0,0,"1666",,terminal_output +8585,8842791,"TERMINAL",0,0,"2777",,terminal_output +8586,8843938,"TERMINAL",0,0,"3888",,terminal_output +8587,8844963,"TERMINAL",0,0,"4999",,terminal_output +8588,8845911,"TERMINAL",0,0,"5404020",,terminal_output +8589,8847011,"TERMINAL",0,0,"6111",,terminal_output +8590,8847978,"TERMINAL",0,0,"7222",,terminal_output +8591,8849068,"TERMINAL",0,0,"8333",,terminal_output +8592,8850047,"TERMINAL",0,0,"9444",,terminal_output +8593,8851117,"TERMINAL",0,0,"40555",,terminal_output +8594,8852135,"TERMINAL",0,0,"1666",,terminal_output +8595,8853256,"TERMINAL",0,0,"2777",,terminal_output +8596,8854229,"TERMINAL",0,0,"3888",,terminal_output +8597,8855261,"TERMINAL",0,0,"4999",,terminal_output +8598,8856328,"TERMINAL",0,0,"5515131",,terminal_output +8599,8857357,"TERMINAL",0,0,"7222",,terminal_output +8600,8858405,"TERMINAL",0,0,"8333",,terminal_output +8601,8859430,"TERMINAL",0,0,"9444",,terminal_output +8602,8860589,"TERMINAL",0,0,"50555",,terminal_output +8603,8861511,"TERMINAL",0,0,"1666",,terminal_output +8604,8862575,"TERMINAL",0,0,"2777",,terminal_output +8605,8863702,"TERMINAL",0,0,"3888",,terminal_output +8606,8864633,"TERMINAL",0,0,"4999",,terminal_output +8607,8865669,"TERMINAL",0,0,"58:008:0040",,terminal_output +8608,8866778,"TERMINAL",0,0,"6111",,terminal_output +8609,8867796,"TERMINAL",0,0,"7222",,terminal_output +8610,8868794,"TERMINAL",0,0,"8333",,terminal_output +8611,8869953,"TERMINAL",0,0,"9444",,terminal_output +8612,8870972,"TERMINAL",0,0,"4:00555",,terminal_output +8613,8871995,"TERMINAL",0,0,"1666",,terminal_output +8614,8872962,"TERMINAL",0,0,"2777",,terminal_output +8615,8874028,"TERMINAL",0,0,"3888",,terminal_output +8616,8875073,"TERMINAL",0,0,"4999",,terminal_output +8617,8876071,"TERMINAL",0,0,"5101050",,terminal_output +8618,8877169,"TERMINAL",0,0,"6111",,terminal_output +8619,8878197,"TERMINAL",0,0,"7222",,terminal_output +8620,8879193,"TERMINAL",0,0,"8333",,terminal_output +8621,8879511,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +8622,8880277,"TERMINAL",0,0,"9444",,terminal_output +8623,8881277,"TERMINAL",0,0,"10666",,terminal_output +8624,8881551,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1627,0,"",shellscript,selection_mouse +8625,8882087,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1648,0,"",shellscript,selection_mouse +8626,8882383,"TERMINAL",0,0,"2777",,terminal_output +8627,8883242,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1647,0,"",shellscript,selection_command +8628,8883379,"TERMINAL",0,0,"3888",,terminal_output +8629,8884433,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1628,20," --val_steps 50 \",shellscript,selection_command +8630,8884434,"TERMINAL",0,0,"4999",,terminal_output +8631,8884820,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1604,44," --val_interval 10 \\n --val_steps 50 \",shellscript,selection_command +8632,8884965,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1560,88," --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \",shellscript,selection_command +8633,8885391,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1560,89,"",shellscript,content +8634,8885440,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1564,0,"",shellscript,selection_command +8635,8885476,"TERMINAL",0,0,"520202:00",,terminal_output +8636,8886496,"TERMINAL",0,0,"6111",,terminal_output +8637,8887553,"TERMINAL",0,0,"7222",,terminal_output +8638,8888606,"TERMINAL",0,0,"8333",,terminal_output +8639,8889661,"TERMINAL",0,0,"9444",,terminal_output +8640,8890169,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +8641,8890682,"TERMINAL",0,0,"20555",,terminal_output +8642,8891856,"TERMINAL",0,0,"1666",,terminal_output +8643,8892861,"TERMINAL",0,0,"2777",,terminal_output +8644,8893793,"TERMINAL",0,0,"3888",,terminal_output +8645,8894892,"TERMINAL",0,0,"4999",,terminal_output +8646,8895878,"TERMINAL",0,0,"5303010",,terminal_output +8647,8896914,"TERMINAL",0,0,"6111",,terminal_output +8648,8897981,"TERMINAL",0,0,"7222",,terminal_output +8649,8899239,"TERMINAL",0,0,"8333",,terminal_output +8650,8900284,"TERMINAL",0,0,"9444",,terminal_output +8651,8901289,"TERMINAL",0,0,"30666",,terminal_output +8652,8902324,"TERMINAL",0,0,"2777",,terminal_output +8653,8903439,"TERMINAL",0,0,"3888",,terminal_output +8654,8904429,"TERMINAL",0,0,"4999",,terminal_output +8655,8905477,"TERMINAL",0,0,"5404020",,terminal_output +8656,8906514,"TERMINAL",0,0,"6111",,terminal_output +8657,8907645,"TERMINAL",0,0,"7222",,terminal_output +8658,8908687,"TERMINAL",0,0,"8333",,terminal_output +8659,8909598,"TERMINAL",0,0,"9444",,terminal_output +8660,8910635,"TERMINAL",0,0,"40555",,terminal_output +8661,8911668,"TERMINAL",0,0,"1666",,terminal_output +8662,8912711,"TERMINAL",0,0,"2777",,terminal_output +8663,8913744,"TERMINAL",0,0,"3888",,terminal_output +8664,8914791,"TERMINAL",0,0,"4999",,terminal_output +8665,8915925,"TERMINAL",0,0,"5505030",,terminal_output +8666,8916955,"TERMINAL",0,0,"6111",,terminal_output +8667,8917920,"TERMINAL",0,0,"7222",,terminal_output +8668,8918995,"TERMINAL",0,0,"8333",,terminal_output +8669,8920020,"TERMINAL",0,0,"9444",,terminal_output +8670,8921146,"TERMINAL",0,0,"50555",,terminal_output +8671,8922279,"TERMINAL",0,0,"1666",,terminal_output +8672,8923401,"TERMINAL",0,0,"2888",,terminal_output +8673,8924430,"TERMINAL",0,0,"4999",,terminal_output +8674,8925391,"TERMINAL",0,0,"59:009:0040",,terminal_output +8675,8926471,"TERMINAL",0,0,"6111",,terminal_output +8676,8927431,"TERMINAL",0,0,"7222",,terminal_output +8677,8928493,"TERMINAL",0,0,"8333",,terminal_output +8678,8929507,"TERMINAL",0,0,"9444",,terminal_output +8679,8929711,"TERMINAL",0,0,"Step 10, validation loss: 0.18187496066093445\r\nStep 10, loss: 0.1460321992635727\r\nStep 11, loss: 0.1712970733642578\r\nStep 12, loss: 0.14481860399246216\r\nStep 13, loss: 0.17531439661979675\r\nStep 14, loss: 0.17896421253681183\r\nStep 15, loss: 0.19768431782722473\r\nStep 16, loss: 0.16900265216827393\r\nStep 17, loss: 0.19003376364707947\r\nStep 18, loss: 0.15590409934520721\r\nStep 19, loss: 0.15347588062286377\r\nCalculating validation metrics...\r\nStep 20, validation loss: 0.14868588745594025\r\nStep 20, loss: 0.17579180002212524\r\nStep 21, loss: 0.1402260810136795\r\nStep 22, loss: 0.1402587592601776\r\nStep 23, loss: 0.14610332250595093\r\nStep 24, loss: 0.13572093844413757\r\nStep 25, loss: 0.13391639292240143\r\nStep 26, loss: 0.11312375217676163\r\nStep 27, loss: 0.15244707465171814\r\nStep 28, loss: 0.15283985435962677\r\nStep 29, loss: 0.13718965649604797\r\nCalculating validation metrics...\r\nStep 30, validation loss: 0.13765759766101837\r\nStep 30, loss: 0.17064014077186584\r\nStep 31, loss: 0.12281255424022675\r\nStep 32, loss: 0.11974624544382095\r\nStep 33, loss: 0.14825329184532166\r\nStep 34, loss: 0.12291015684604645\r\nStep 35, loss: 0.16604965925216675\r\nStep 36, loss: 0.11892037838697433\r\nStep 37, loss: 0.10691781342029572\r\nStep 38, loss: 0.09853478521108627\r\nStep 39, loss: 0.09658374637365341\r\nCalculating validation metrics...\r\nStep 40, validation loss: 0.11540428549051285\r\nStep 40, loss: 0.13238421082496643\r\nStep 41, loss: 0.11523795872926712\r\nStep 42, loss: 0.11773140728473663\r\nStep 43, loss: 0.13079145550727844\r\nStep 44, loss: 0.13574346899986267\r\nStep 45, loss: 0.11259935051202774\r\nStep 46, loss: 0.10639111697673798\r\nStep 47, loss: 0.10747742652893066\r\nStep 48, loss: 0.09892918169498444\r\nStep 49, loss: 0.10342658311128616\r\nCalculating validation metrics...\r\nStep 50, validation loss: 0.11283054202795029\r\nStep 50, loss: 0.12395506352186203\r\nStep 51, loss: 0.10058515518903732\r\nStep 52, loss: 0.09901111572980881\r\nStep 53, loss: 0.1407279074192047\r\nStep 54, loss: 0.10172712057828903\r\nStep 55, loss: 0.14542528986930847\r\nStep 56, loss: 0.1538066416978836\r\nStep 57, loss: 0.11065907031297684\r\nStep 58, loss: 0.1388990432024002\r\nStep 59, loss: 0.12373080849647522\r\nCalculating validation metrics...\r\nStep 60, validation loss: 0.12430300563573837\r\nStep 60, loss: 0.10972626507282257\r\nStep 61, loss: 0.13690009713172913\r\nStep 62, loss: 0.09644867479801178\r\nStep 63, loss: 0.11174969375133514\r\nStep 64, loss: 0.10339435189962387\r\nStep 65, loss: 0.1341489553451538\r\nStep 66, loss: 0.10440479218959808\r\nStep 67, loss: 0.10439705103635788\r\nStep 68, loss: 0.11961660534143448\r\nStep 69, loss: 0.11122949421405792\r\nCalculating validation metrics...\r\nStep 70, validation loss: 0.0991554856300354\r\nStep 70, loss: 0.09937877207994461\r\nStep 71, loss: 0.09287536144256592\r\nStep 72, loss: 0.07230173051357269\r\nStep 73, loss: 0.11810803413391113\r\nStep 74, loss: 0.0843992829322815\r\nStep 75, loss: 0.12040487676858902\r\nStep 76, loss: 0.09682764858007431\r\nStep 77, loss: 0.09416238963603973\r\nStep 78, loss: 0.09660561382770538\r\nStep 79, loss: 0.09503719955682755\r\nCalculating validation metrics...\r\nStep 80, validation loss: 0.10872339457273483\r\nStep 80, loss: 0.11055693030357361\r\nStep 81, loss: 0.10903745144605637\r\nStep 82, loss: 0.08702915906906128\r\nStep 83, loss: 0.10747984051704407\r\nStep 84, loss: 0.08675380051136017\r\nStep 85, loss: 0.07848641276359558\r\nStep 86, loss: 0.09089099615812302\r\nStep 87, loss: 0.10623789578676224\r\nStep 88, loss: 0.12308396399021149\r\nStep 89, loss: 0.11174097657203674\r\nCalculating validation metrics...\r\nStep 90, validation loss: 0.09634791314601898\r\nStep 90, loss: 0.09366447478532791\r\nStep 91, loss: 0.09256725013256073\r\nStep 92, loss: 0.10207675397396088\r\nStep 93, loss: 0.1033632755279541\r\nStep 94, loss: 0.08453680574893951\r\nStep 95, loss: 0.08819650113582611\r\nStep 96, loss: 0.09541334211826324\r\nStep 97, loss: 0.08158412575721741\r\nStep 98, loss: 0.0894506573677063\r\nStep 99, loss: 0.08139083534479141\r\nCalculating validation metrics...\r\nStep 100, validation loss: 0.09245947003364563\r\nSaved checkpoint at step 100\r\nStep 100, loss: 0.08017443865537643\r\nStep 101, loss: 0.0853368490934372\r\nStep 102, loss: 0.10362497717142105\r\nStep 103, loss: 0.08803144097328186\r\nStep 104, loss: 0.0731319859623909\r\nStep 105, loss: 0.10547707974910736\r\nStep 106, loss: 0.08872571587562561\r\nStep 107, loss: 0.09918766468763351\r\nStep 108, loss: 0.11389628052711487\r\nStep 109, loss: 0.08524921536445618\r\nCalculating validation metrics...\r\nStep 110, validation loss: 0.09094291925430298\r\nStep 110, loss: 0.10075535625219345\r\nStep 111, loss: 0.0951254665851593\r\nStep 112, loss: 0.07482922822237015\r\nStep 113, loss: 0.08090869337320328\r\nStep 114, loss: 0.08707627654075623\r\nStep 115, loss: 0.08405175060033798\r\nStep 116, loss: 0.08948099613189697\r\nStep 117, loss: 0.07551295310258865\r\nStep 118, loss: 0.10672882199287415\r\nStep 119, loss: 0.07707519829273224\r\nCalculating validation metrics...\r\nStep 120, validation loss: 0.07647018879652023\r\nStep 120, loss: 0.07320641726255417\r\nStep 121, loss: 0.10054738074541092\r\nStep 122, loss: 0.0862056091427803\r\nStep 123, loss: 0.09223911166191101\r\nStep 124, loss: 0.07955249398946762\r\nStep 125, loss: 0.07523972541093826\r\nStep 126, loss: 0.07100744545459747\r\nStep 127, loss: 0.0779755488038063\r\nStep 128, loss: 0.08777017146348953\r\nStep 129, loss: 0.08433938026428223\r\nCalculating validation metrics...\r\nStep 130, validation loss: 0.08397282660007477\r\nStep 130, loss: 0.09202032536268234\r\nStep 131, loss: 0.07978766411542892\r\nStep 132, loss: 0.08946399390697479\r\nStep 133, loss: 0.07694211602210999\r\nStep 134, loss: 0.0872776061296463\r\nStep 135, loss: 0.07367205619812012\r\nStep 136, loss: 0.09138592332601547\r\nStep 137, loss: 0.08840078115463257\r\nStep 138, loss: 0.08120749890804291\r\nStep 139, loss: 0.08273623138666153\r\nCalculating validation metrics...\r\nStep 140, validation loss: 0.07758229970932007\r\nStep 140, loss: 0.0892852172255516\r\nStep 141, loss: 0.06943343579769135\r\nStep 142, loss: 0.0694546177983284\r\nStep 143, loss: 0.07064647227525711\r\nStep 144, loss: 0.0724049061536789\r\nStep 145, loss: 0.0633174329996109\r\nStep 146, loss: 0.07066572457551956\r\nStep 147, loss: 0.0692739188671112\r\nStep 148, loss: 0.06583620607852936\r\nStep 149, loss: 0.07165002077817917\r\nCalculating validation metrics...\r\nStep 150, validation loss: 0.07329834997653961\r\nStep 150, loss: 0.06329204142093658\r\nStep 151, loss: 0.06664171069860458\r\nStep 152, loss: 0.062354907393455505\r\nStep 153, loss: 0.07677270472049713\r\nStep 154, loss: 0.07087738811969757\r\nStep 155, loss: 0.07549387961626053\r\nStep 156, loss: 0.11238604784011841\r\nStep 157, loss: 0.08208107948303223\r\nStep 158, loss: 0.06930659711360931\r\nStep 159, loss: 0.07341207563877106\r\nCalculating validation metrics...\r\nStep 160, validation loss: 0.0708177462220192\r\nStep 160, loss: 0.07256186008453369\r\nStep 161, loss: 0.08049368113279343\r\nStep 162, loss: 0.07719960808753967\r\nStep 163, loss: 0.07508951425552368\r\nStep 164, loss: 0.06332002580165863\r\nStep 165, loss: 0.07934826612472534\r\nStep 166, loss: 0.0667673796415329\r\nStep 167, loss: 0.0736018493771553\r\nStep 168, loss: 0.06537581980228424\r\nStep 169, loss: 0.06136263906955719\r\nCalculating validation metrics...\r\nStep 170, validation loss: 0.07275910675525665\r\nStep 170, loss: 0.06972520053386688\r\nStep 171, loss: 0.08074561506509781\r\nStep 172, loss: 0.07730477303266525\r\nStep 173, loss: 0.07991638034582138\r\nStep 174, loss: 0.068448506295681\r\nStep 175, loss: 0.07280027866363525\r\nStep 176, loss: 0.07981234043836594\r\nStep 177, loss: 0.060561083257198334\r\nStep 178, loss: 0.06595361232757568\r\nStep 179, loss: 0.06908052414655685\r\nCalculating validation metrics...\r\nStep 180, validation loss: 0.06621947139501572\r\nStep 180, loss: 0.07320982217788696\r\nStep 181, loss: 0.07080576568841934\r\nStep 182, loss: 0.06703071296215057\r\nStep 183, loss: 0.06997152417898178\r\nStep 184, loss: 0.06756695359945297\r\nStep 185, loss: 0.07311617583036423\r\nStep 186, loss: 0.06432139128446579\r\nStep 187, loss: 0.07327717542648315\r\nStep 188, loss: 0.07340525090694427\r\nStep 189, loss: 0.07438525557518005\r\nCalculating validation metrics...\r\nStep 190, validation loss: 0.07065940648317337\r\nStep 190, loss: 0.07426071166992188\r\nStep 191, loss: 0.05727018043398857\r\nStep 192, loss: 0.07497460395097733\r\nStep 193, loss: 0.06436926126480103\r\nStep 194, loss: 0.07450353354215622\r\nStep 195, loss: 0.08314775675535202\r\nStep 196, loss: 0.0687609612941742\r\nStep 197, loss: 0.06916137039661407\r\n",,terminal_output +8680,8930669,"TERMINAL",0,0,"5:00555",,terminal_output +8681,8931632,"TERMINAL",0,0,"1666",,terminal_output +8682,8932635,"TERMINAL",0,0,"2777",,terminal_output +8683,8933675,"TERMINAL",0,0,"3888",,terminal_output +8684,8934165,"TERMINAL",0,0,"Step 198, loss: 0.05582619458436966\r\nStep 199, loss: 0.06539875268936157\r\nCalculating validation metrics...\r\nStep 200, validation loss: 0.06883565336465836\r\nSaved checkpoint at step 200\r\n",,terminal_output +8685,8934719,"TERMINAL",0,0,"4999",,terminal_output +8686,8935762,"TERMINAL",0,0,"5101050",,terminal_output +8687,8935801,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-tokenizer-dev-3468835 at: https://wandb.ai/instant-uv/jafar/runs/qrch1ogo\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_155201-qrch1ogo/logs\r\n",,terminal_output +8688,8936800,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +8689,8936811,"TERMINAL",0,0,"6111",,terminal_output +8690,8937166,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +8691,8937835,"TERMINAL",0,0,"7222",,terminal_output +8692,8938743,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh ",,terminal_output +8693,8938882,"TERMINAL",0,0,"8333",,terminal_output +8694,8939322,"TERMINAL",0,0,"",,terminal_output +8695,8939960,"TERMINAL",0,0,"",,terminal_output +8696,8939960,"TERMINAL",0,0,"9444",,terminal_output +8697,8940114,"TERMINAL",0,0,"\r\r\n\r",,terminal_output +8698,8940239,"TERMINAL",0,0,"",,terminal_output +8699,8940375,"TERMINAL",0,0,"",,terminal_output +8700,8940567,"TERMINAL",0,0,"",,terminal_output +8701,8940698,"TERMINAL",0,0,"",,terminal_output +8702,8940880,"TERMINAL",0,0,"",,terminal_output +8703,8940972,"TERMINAL",0,0,"10555",,terminal_output +8704,8941019,"TERMINAL",0,0,"",,terminal_output +8705,8941160,"TERMINAL",0,0,"",,terminal_output +8706,8941287,"TERMINAL",0,0,"",,terminal_output +8707,8941424,"TERMINAL",0,0,"",,terminal_output +8708,8941509,"TERMINAL",0,0,"",,terminal_output +8709,8941689,"TERMINAL",0,0,"",,terminal_output +8710,8941927,"TERMINAL",0,0,"",,terminal_output +8711,8942029,"TERMINAL",0,0,"1666",,terminal_output +8712,8942329,"TERMINAL",0,0,"l",,terminal_output +8713,8942393,"TERMINAL",0,0,"a",,terminal_output +8714,8942578,"TERMINAL",0,0,"m_single_gpu.sh ",,terminal_output +8715,8943039,"TERMINAL",0,0,"2777",,terminal_output +8716,8943515,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=100 \\r\n --log \\r\n --name=coinrun-lam-dev-$slurm_job_id \\r\n --tags lam coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --data_dir $array_records_dir_train\r\n\r\n",,terminal_output +8717,8943650,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +8718,8943741,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +8719,8944197,"TERMINAL",0,0,"3888",,terminal_output +8720,8945134,"TERMINAL",0,0,"4999",,terminal_output +8721,8946197,"TERMINAL",0,0,"520203:00",,terminal_output +8722,8947207,"TERMINAL",0,0,"6111",,terminal_output +8723,8948284,"TERMINAL",0,0,"7222",,terminal_output +8724,8949362,"TERMINAL",0,0,"8444",,terminal_output +8725,8950354,"TERMINAL",0,0,"20555",,terminal_output +8726,8951060,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +8727,8951390,"TERMINAL",0,0,"1666",,terminal_output +8728,8951773,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +8729,8952174,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_155520-oxqhhgpq\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-lam-dev-3468835\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/oxqhhgpq\r\n",,terminal_output +8730,8952442,"TERMINAL",0,0,"2777",,terminal_output +8731,8953488,"TERMINAL",0,0,"3888",,terminal_output +8732,8954531,"TERMINAL",0,0,"4999",,terminal_output +8733,8955656,"TERMINAL",0,0,"5303010",,terminal_output +8734,8955662,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['action_in', 'action_up', 'decoder', 'encoder', 'patch_up', 'vq']\r\nParameter counts:\r\n{'action_in': 768, 'action_up': 16896, 'decoder': 17474816, 'encoder': 17228832, 'patch_up': 393728, 'vq': 192, 'total': 35115232}\r\nStarting training from step 0...\r\n",,terminal_output +8735,8956607,"TERMINAL",0,0,"6111",,terminal_output +8736,8957646,"TERMINAL",0,0,"7222",,terminal_output +8737,8958688,"TERMINAL",0,0,"8333",,terminal_output +8738,8959723,"TERMINAL",0,0,"9444",,terminal_output +8739,8960773,"TERMINAL",0,0,"30555",,terminal_output +8740,8961818,"TERMINAL",0,0,"1666",,terminal_output +8741,8962862,"TERMINAL",0,0,"2777",,terminal_output +8742,8963901,"TERMINAL",0,0,"3888",,terminal_output +8743,8964350,"TERMINAL",0,0,"2025-09-05 15:55:34.037068: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:55:34.038234: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:55:34.038254: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:55:34.038726: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8744,8964938,"TERMINAL",0,0,"4999",,terminal_output +8745,8966044,"TERMINAL",0,0,"5404020",,terminal_output +8746,8967124,"TERMINAL",0,0,"6111",,terminal_output +8747,8968155,"TERMINAL",0,0,"7222",,terminal_output +8748,8969116,"TERMINAL",0,0,"8333",,terminal_output +8749,8970244,"TERMINAL",0,0,"9444",,terminal_output +8750,8971286,"TERMINAL",0,0,"40555",,terminal_output +8751,8972360,"TERMINAL",0,0,"1666",,terminal_output +8752,8973370,"TERMINAL",0,0,"2888",,terminal_output +8753,8974362,"TERMINAL",0,0,"4999",,terminal_output +8754,8975425,"TERMINAL",0,0,"5505030",,terminal_output +8755,8976415,"TERMINAL",0,0,"6111",,terminal_output +8756,8977450,"TERMINAL",0,0,"7222",,terminal_output +8757,8978493,"TERMINAL",0,0,"8333",,terminal_output +8758,8979626,"TERMINAL",0,0,"9444",,terminal_output +8759,8980642,"TERMINAL",0,0,"50555",,terminal_output +8760,8981630,"TERMINAL",0,0,"1666",,terminal_output +8761,8982690,"TERMINAL",0,0,"2777",,terminal_output +8762,8983721,"TERMINAL",0,0,"3888",,terminal_output +8763,8984761,"TERMINAL",0,0,"4999",,terminal_output +8764,8985807,"TERMINAL",0,0,"540:0040:0040",,terminal_output +8765,8986917,"TERMINAL",0,0,"6111",,terminal_output +8766,8987894,"TERMINAL",0,0,"7222",,terminal_output +8767,8989038,"TERMINAL",0,0,"8333",,terminal_output +8768,8990017,"TERMINAL",0,0,"9444",,terminal_output +8769,8991098,"TERMINAL",0,0,"6:00555",,terminal_output +8770,8992111,"TERMINAL",0,0,"1666",,terminal_output +8771,8993235,"TERMINAL",0,0,"2777",,terminal_output +8772,8994261,"TERMINAL",0,0,"3888",,terminal_output +8773,8995214,"TERMINAL",0,0,"4999",,terminal_output +8774,8996294,"TERMINAL",0,0,"5101050",,terminal_output +8775,8997310,"TERMINAL",0,0,"6222",,terminal_output +8776,8998465,"TERMINAL",0,0,"8333",,terminal_output +8777,8999426,"TERMINAL",0,0,"9444",,terminal_output +8778,9000513,"TERMINAL",0,0,"10555",,terminal_output +8779,9001537,"TERMINAL",0,0,"1666",,terminal_output +8780,9002665,"TERMINAL",0,0,"2777",,terminal_output +8781,9003687,"TERMINAL",0,0,"3888",,terminal_output +8782,9004721,"TERMINAL",0,0,"4999",,terminal_output +8783,9005737,"TERMINAL",0,0,"520204:00",,terminal_output +8784,9006725,"TERMINAL",0,0,"6111",,terminal_output +8785,9007785,"TERMINAL",0,0,"7222",,terminal_output +8786,9008814,"TERMINAL",0,0,"8333",,terminal_output +8787,9009945,"TERMINAL",0,0,"9444",,terminal_output +8788,9010952,"TERMINAL",0,0,"20555",,terminal_output +8789,9012087,"TERMINAL",0,0,"1666",,terminal_output +8790,9013303,"TERMINAL",0,0,"2777",,terminal_output +8791,9014065,"TERMINAL",0,0,"3888",,terminal_output +8792,9015107,"TERMINAL",0,0,"4999",,terminal_output +8793,9016161,"TERMINAL",0,0,"5303010",,terminal_output +8794,9017201,"TERMINAL",0,0,"6111",,terminal_output +8795,9018248,"TERMINAL",0,0,"7222",,terminal_output +8796,9019366,"TERMINAL",0,0,"8444",,terminal_output +8797,9020373,"TERMINAL",0,0,"30555",,terminal_output +8798,9021506,"TERMINAL",0,0,"1666",,terminal_output +8799,9022529,"TERMINAL",0,0,"2777",,terminal_output +8800,9023577,"TERMINAL",0,0,"3888",,terminal_output +8801,9024540,"TERMINAL",0,0,"4999",,terminal_output +8802,9025209,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +8803,9025606,"TERMINAL",0,0,"5404020",,terminal_output +8804,9026636,"TERMINAL",0,0,"6111",,terminal_output +8805,9027650,"TERMINAL",0,0,"7222",,terminal_output +8806,9028738,"TERMINAL",0,0,"8333",,terminal_output +8807,9029728,"TERMINAL",0,0,"9444",,terminal_output +8808,9030765,"TERMINAL",0,0,"Step 0, loss: 0.29082542657852173\r\nStep 1, loss: 0.2048691064119339\r\nStep 2, loss: 0.19221149384975433\r\nStep 3, loss: 0.20193631947040558\r\nStep 4, loss: 0.1634262055158615\r\nStep 5, loss: 0.16224892437458038\r\nStep 6, loss: 0.1465519517660141\r\nStep 7, loss: 0.13969092071056366\r\nStep 8, loss: 0.13379880785942078\r\nStep 9, loss: 0.13682898879051208\r\nStep 10, loss: 0.10182199627161026\r\nStep 11, loss: 0.10439193993806839\r\nStep 12, loss: 0.09406972676515579\r\nStep 13, loss: 0.09772490710020065\r\nStep 14, loss: 0.09554266184568405\r\nStep 15, loss: 0.09632503986358643\r\nStep 16, loss: 0.0835835412144661\r\nStep 17, loss: 0.09580384939908981\r\nStep 18, loss: 0.07298149913549423\r\nStep 19, loss: 0.07869017869234085\r\nStep 20, loss: 0.07823637127876282\r\nStep 21, loss: 0.06644692271947861\r\nStep 22, loss: 0.061057426035404205\r\nStep 23, loss: 0.06661047786474228\r\nStep 24, loss: 0.05326160788536072\r\nStep 25, loss: 0.05032860487699509\r\nStep 26, loss: 0.04621792584657669\r\nStep 27, loss: 0.058809492737054825\r\nStep 28, loss: 0.05668637156486511\r\nStep 29, loss: 0.049171317368745804\r\nStep 30, loss: 0.054714083671569824\r\nStep 31, loss: 0.03886327147483826\r\nStep 32, loss: 0.04377226158976555\r\nStep 33, loss: 0.04706248268485069\r\nStep 34, loss: 0.03870921954512596\r\nStep 35, loss: 0.04759908840060234\r\nStep 36, loss: 0.03624310344457626\r\nStep 37, loss: 0.03723645582795143\r\nStep 38, loss: 0.03415235877037048\r\nStep 39, loss: 0.03045961633324623\r\nStep 40, loss: 0.04029426723718643\r\nStep 41, loss: 0.037842318415641785\r\nStep 42, loss: 0.036140669137239456\r\nStep 43, loss: 0.042215485125780106\r\nStep 44, loss: 0.04040057957172394\r\nStep 45, loss: 0.03682219609618187\r\nStep 46, loss: 0.02909393236041069\r\nStep 47, loss: 0.03433133289217949\r\nStep 48, loss: 0.02926039509475231\r\nStep 49, loss: 0.029214780777692795\r\nStep 50, loss: 0.03843312710523605\r\nStep 51, loss: 0.025400172919034958\r\nStep 52, loss: 0.0252959243953228\r\nStep 53, loss: 0.03099111281335354\r\nStep 54, loss: 0.030360329896211624\r\nStep 55, loss: 0.030434750020503998\r\nStep 56, loss: 0.03504445031285286\r\nStep 57, loss: 0.026979556307196617\r\nStep 58, loss: 0.023744165897369385\r\nStep 59, loss: 0.025236567482352257\r\nStep 60, loss: 0.025721365585923195\r\nStep 61, loss: 0.023945394903421402\r\nStep 62, loss: 0.019217371940612793\r\nStep 63, loss: 0.030162263661623\r\nStep 64, loss: 0.019047735258936882\r\nStep 65, loss: 0.028320234268903732\r\nStep 66, loss: 0.025090985000133514\r\nStep 67, loss: 0.026432154700160027\r\nStep 68, loss: 0.023692764341831207\r\nStep 69, loss: 0.026204457506537437\r\nStep 70, loss: 0.022219838574528694\r\nStep 71, loss: 0.02456802874803543\r\nStep 72, loss: 0.020066484808921814\r\nStep 73, loss: 0.026353834196925163\r\nStep 74, loss: 0.023041803389787674\r\nStep 75, loss: 0.025038868188858032\r\nStep 76, loss: 0.02404751628637314\r\nStep 77, loss: 0.017286183312535286\r\nStep 78, loss: 0.021259160712361336\r\nStep 79, loss: 0.019932588562369347\r\nStep 80, loss: 0.02013363130390644\r\nStep 81, loss: 0.023084068670868874\r\nStep 82, loss: 0.024414854124188423\r\nStep 83, loss: 0.026525352150201797\r\nStep 84, loss: 0.01507299579679966\r\nStep 85, loss: 0.01850030943751335\r\nStep 86, loss: 0.018153125420212746\r\nStep 87, loss: 0.021576229482889175\r\nStep 88, loss: 0.0203157477080822\r\nStep 89, loss: 0.022011112421751022\r\nStep 90, loss: 0.022103428840637207\r\nStep 91, loss: 0.017125708982348442\r\nStep 92, loss: 0.018620772287249565\r\nStep 93, loss: 0.023652445524930954\r\nStep 94, loss: 0.016419129446148872\r\nStep 95, loss: 0.019779052585363388\r\nStep 96, loss: 0.02462187595665455\r\nStep 97, loss: 0.013301949948072433\r\nStep 98, loss: 0.020797014236450195\r\nStep 99, loss: 0.016959479078650475\r\nSaved checkpoint at step 100\r\nStep 100, loss: 0.014280087314546108\r\nStep 101, loss: 0.015398509800434113\r\nStep 102, loss: 0.021299662068486214\r\nStep 103, loss: 0.01845368556678295\r\nStep 104, loss: 0.014964827336370945\r\nStep 105, loss: 0.021878650411963463\r\nStep 106, loss: 0.01783316768705845\r\nStep 107, loss: 0.01999477483332157\r\nStep 108, loss: 0.020448556169867516\r\nStep 109, loss: 0.017638100311160088\r\nStep 110, loss: 0.021010704338550568\r\nStep 111, loss: 0.021663282066583633\r\nStep 112, loss: 0.02006286010146141\r\nStep 113, loss: 0.018050743266940117\r\nStep 114, loss: 0.017534755170345306\r\nStep 115, loss: 0.01856238953769207\r\nStep 116, loss: 0.02143116109073162\r\nStep 117, loss: 0.014868938364088535\r\nStep 118, loss: 0.01818583719432354\r\nStep 119, loss: 0.02024712599813938\r\nStep 120, loss: 0.015612361021339893\r\nStep 121, loss: 0.020772771909832954\r\nStep 122, loss: 0.02284320816397667\r\nStep 123, loss: 0.017088936641812325\r\nStep 124, loss: 0.01777857542037964\r\nStep 125, loss: 0.015260602347552776\r\nStep 126, loss: 0.012052659876644611\r\nStep 127, loss: 0.017362497746944427\r\nStep 128, loss: 0.02183559536933899\r\nStep 129, loss: 0.015694834291934967\r\nStep 130, loss: 0.01950552873313427\r\nStep 131, loss: 0.017693782225251198\r\nStep 132, loss: 0.016871673986315727\r\nStep 133, loss: 0.017513863742351532\r\nStep 134, loss: 0.01661726087331772\r\nStep 135, loss: 0.01714961975812912\r\nStep 136, loss: 0.015073807910084724\r\nStep 137, loss: 0.018432624638080597\r\nStep 138, loss: 0.016114436089992523\r\nStep 139, loss: 0.01654084585607052\r\nStep 140, loss: 0.01476135104894638\r\nStep 141, loss: 0.014641030691564083\r\nStep 142, loss: 0.016761479899287224\r\nStep 143, loss: 0.016827326267957687\r\nStep 144, loss: 0.014375269412994385\r\nStep 145, loss: 0.015380159951746464\r\nStep 146, loss: 0.01451564859598875\r\nStep 147, loss: 0.014944633468985558\r\nStep 148, loss: 0.013574398122727871\r\nStep 149, loss: 0.015518540516495705\r\nStep 150, loss: 0.014933833852410316\r\nStep 151, loss: 0.015372075140476227\r\nStep 152, loss: 0.01231596153229475\r\nStep 153, loss: 0.01539556309580803\r\nStep 154, loss: 0.015142295509576797\r\nStep 155, loss: 0.015786105766892433\r\nStep 156, loss: 0.02043990045785904\r\nStep 157, loss: 0.015890201553702354\r\nStep 158, loss: 0.012537137605249882\r\nStep 159, loss: 0.01571078971028328\r\nStep 160, loss: 0.014786813408136368\r\nStep 161, loss: 0.014251478016376495\r\nStep 162, loss: 0.016027238219976425\r\nStep 163, loss: 0.015812069177627563\r\nStep 164, loss: 0.011770876124501228\r\nStep 165, loss: 0.016805432736873627\r\nStep 166, loss: 0.015529119409620762\r\nStep 167, loss: 0.014419076964259148\r\nStep 168, loss: 0.016051633283495903\r\nStep 169, loss: 0.012988136149942875\r\nStep 170, loss: 0.013591167517006397\r\nStep 171, loss: 0.01437724195420742\r\nStep 172, loss: 0.01483831461519003\r\nStep 173, loss: 0.017630159854888916\r\nStep 174, loss: 0.013004233129322529\r\nStep 175, loss: 0.015921808779239655\r\nStep 176, loss: 0.014098438434302807\r\nStep 177, loss: 0.015263441018760204\r\nStep 178, loss: 0.014908174984157085\r\nStep 179, loss: 0.013653873465955257\r\nStep 180, loss: 0.018078844994306564\r\nStep 181, loss: 0.015561181120574474\r\nStep 182, loss: 0.013101051561534405\r\nStep 183, loss: 0.012764105573296547\r\nStep 184, loss: 0.014194772578775883\r\nStep 185, loss: 0.014551274478435516\r\nStep 186, loss: 0.013629589229822159\r\nStep 187, loss: 0.013752375729382038\r\nStep 188, loss: 0.01368360873311758\r\nStep 189, loss: 0.01491300854831934\r\nStep 190, loss: 0.016657283529639244\r\nStep 191, loss: 0.013172263279557228\r\nStep 192, loss: 0.011906351894140244\r\nStep 193, loss: 0.011473177000880241\r\nStep 194, loss: 0.012833449058234692\r\nStep 195, loss: 0.018449127674102783\r\nStep 196, loss: 0.012154367752373219\r\nStep 197, loss: 0.01526846643537283\r\nStep 198, loss: 0.011575196869671345\r\nStep 199, loss: 0.014514456503093243\r\nSaved checkpoint at step 200\r\n",,terminal_output +8809,9030874,"TERMINAL",0,0,"40555",,terminal_output +8810,9031819,"TERMINAL",0,0,"1666",,terminal_output +8811,9032301,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-lam-dev-3468835 at: https://wandb.ai/instant-uv/jafar/runs/oxqhhgpq\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_155520-oxqhhgpq/logs\r\n",,terminal_output +8812,9032920,"TERMINAL",0,0,"2777",,terminal_output +8813,9033107,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 10 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +8814,9033393,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +8815,9033966,"TERMINAL",0,0,"3888",,terminal_output +8816,9034974,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh ",,terminal_output +8817,9034982,"TERMINAL",0,0,"4999",,terminal_output +8818,9036009,"TERMINAL",0,0,"5505030",,terminal_output +8819,9036061,"TERMINAL",0,0,"",,terminal_output +8820,9036567,"TERMINAL",0,0,"",,terminal_output +8821,9036687,"TERMINAL",0,0,"\r\r\n\r",,terminal_output +8822,9036784,"TERMINAL",0,0,"",,terminal_output +8823,9036975,"TERMINAL",0,0,"",,terminal_output +8824,9037074,"TERMINAL",0,0,"6111",,terminal_output +8825,9037283,"TERMINAL",0,0,"",,terminal_output +8826,9037725,"TERMINAL",0,0,"",,terminal_output +8827,9037839,"TERMINAL",0,0,"",,terminal_output +8828,9038000,"TERMINAL",0,0,"",,terminal_output +8829,9038097,"TERMINAL",0,0,"7222",,terminal_output +8830,9038214,"TERMINAL",0,0,"",,terminal_output +8831,9038331,"TERMINAL",0,0,"",,terminal_output +8832,9038524,"TERMINAL",0,0,"",,terminal_output +8833,9038745,"TERMINAL",0,0,"",,terminal_output +8834,9038855,"TERMINAL",0,0,"",,terminal_output +8835,9039028,"TERMINAL",0,0,"",,terminal_output +8836,9039224,"TERMINAL",0,0,"",,terminal_output +8837,9039224,"TERMINAL",0,0,"8333",,terminal_output +8838,9039506,"TERMINAL",0,0,"",,terminal_output +8839,9040241,"TERMINAL",0,0,"9444",,terminal_output +8840,9040710,"TERMINAL",0,0,"_",,terminal_output +8841,9040795,"TERMINAL",0,0,"d",,terminal_output +8842,9041014,"TERMINAL",0,0,"yn_single_gpu.sh ",,terminal_output +8843,9041367,"TERMINAL",0,0,"50555",,terminal_output +8844,9043720,"TERMINAL",0,0,"1888",,terminal_output +8845,9044738,"TERMINAL",0,0,"4999",,terminal_output +8846,9045770,"TERMINAL",0,0,"51:001:0040",,terminal_output +8847,9046735,"TERMINAL",0,0,"6111",,terminal_output +8848,9047772,"TERMINAL",0,0,"7222",,terminal_output +8849,9048815,"TERMINAL",0,0,"8333",,terminal_output +8850,9049974,"TERMINAL",0,0,"9444",,terminal_output +8851,9051100,"TERMINAL",0,0,"7:00555",,terminal_output +8852,9052022,"TERMINAL",0,0,"1666",,terminal_output +8853,9052994,"TERMINAL",0,0,"2777",,terminal_output +8854,9054061,"TERMINAL",0,0,"3888",,terminal_output +8855,9055073,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +8856,9055327,"TERMINAL",0,0,"4999",,terminal_output +8857,9056168,"TERMINAL",0,0,"5101050",,terminal_output +8858,9057160,"TERMINAL",0,0,"6111",,terminal_output +8859,9058178,"TERMINAL",0,0,"7222",,terminal_output +8860,9059339,"TERMINAL",0,0,"8333",,terminal_output +8861,9060374,"TERMINAL",0,0,"9444",,terminal_output +8862,9061435,"TERMINAL",0,0,"10666",,terminal_output +8863,9062357,"TERMINAL",0,0,"2777",,terminal_output +8864,9063486,"TERMINAL",0,0,"3888",,terminal_output +8865,9064639,"TERMINAL",0,0,"4999",,terminal_output +8866,9065477,"TERMINAL",0,0,"520205:00",,terminal_output +8867,9065953,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3468835\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train \\r\n --lam_checkpoint $lam_checkpoint \\r\n --tokenizer_checkpoint $tokenizer_checkpoint\r\n",,terminal_output +8868,9066060,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +8869,9066160,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +8870,9066556,"TERMINAL",0,0,"6111",,terminal_output +8871,9067586,"TERMINAL",0,0,"7222",,terminal_output +8872,9068712,"TERMINAL",0,0,"8333",,terminal_output +8873,9069736,"TERMINAL",0,0,"9444",,terminal_output +8874,9070697,"TERMINAL",0,0,"20555",,terminal_output +8875,9071739,"TERMINAL",0,0,"1666",,terminal_output +8876,9072775,"TERMINAL",0,0,"2777",,terminal_output +8877,9073817,"TERMINAL",0,0,"3888",,terminal_output +8878,9074890,"TERMINAL",0,0,"4999",,terminal_output +8879,9075908,"TERMINAL",0,0,"5303010",,terminal_output +8880,9076336,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +8881,9076954,"TERMINAL",0,0,"6111",,terminal_output +8882,9077082,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +8883,9077150,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_155726-86m0lb4s\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-dyn-dev-3468835\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/86m0lb4s\r\n",,terminal_output +8884,9078023,"TERMINAL",0,0,"7222",,terminal_output +8885,9079082,"TERMINAL",0,0,"8333",,terminal_output +8886,9080106,"TERMINAL",0,0,"9444",,terminal_output +8887,9081205,"TERMINAL",0,0,"30555",,terminal_output +8888,9082191,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +8889,9082251,"TERMINAL",0,0,"1666",,terminal_output +8890,9083349,"TERMINAL",0,0,"2777",,terminal_output +8891,9084274,"TERMINAL",0,0,"3999",,terminal_output +8892,9085334,"TERMINAL",0,0,"5404020",,terminal_output +8893,9086370,"TERMINAL",0,0,"6111",,terminal_output +8894,9087408,"TERMINAL",0,0,"7222",,terminal_output +8895,9087908,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35115232, 'tokenizer': 33750256, 'total': 95421392}\r\nStarting training from step 0...\r\n",,terminal_output +8896,9088445,"TERMINAL",0,0,"8333",,terminal_output +8897,9089493,"TERMINAL",0,0,"9444",,terminal_output +8898,9090531,"TERMINAL",0,0,"40555",,terminal_output +8899,9091577,"TERMINAL",0,0,"1666",,terminal_output +8900,9092656,"TERMINAL",0,0,"2777",,terminal_output +8901,9093698,"TERMINAL",0,0,"3888",,terminal_output +8902,9094726,"TERMINAL",0,0,"4999",,terminal_output +8903,9095748,"TERMINAL",0,0,"5505030",,terminal_output +8904,9096786,"TERMINAL",0,0,"6111",,terminal_output +8905,9097885,"TERMINAL",0,0,"7222",,terminal_output +8906,9098914,"TERMINAL",0,0,"8333",,terminal_output +8907,9099906,"TERMINAL",0,0,"9444",,terminal_output +8908,9100661,"TERMINAL",0,0,"2025-09-05 15:57:50.320789: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:57:50.321194: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:57:50.321755: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:57:50.322254: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:57:50.323485: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 15:57:50.324791: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +8909,9101064,"TERMINAL",0,0,"50555",,terminal_output +8910,9102088,"TERMINAL",0,0,"1666",,terminal_output +8911,9103043,"TERMINAL",0,0,"2777",,terminal_output +8912,9104088,"TERMINAL",0,0,"3888",,terminal_output +8913,9105137,"TERMINAL",0,0,"4999",,terminal_output +8914,9106175,"TERMINAL",0,0,"52:002:0040",,terminal_output +8915,9107215,"TERMINAL",0,0,"6111",,terminal_output +8916,9108366,"TERMINAL",0,0,"7222",,terminal_output +8917,9109393,"TERMINAL",0,0,"8444",,terminal_output +8918,9110350,"TERMINAL",0,0,"8:00555",,terminal_output +8919,9111390,"TERMINAL",0,0,"1666",,terminal_output +8920,9112541,"TERMINAL",0,0,"2777",,terminal_output +8921,9113495,"TERMINAL",0,0,"3888",,terminal_output +8922,9114524,"TERMINAL",0,0,"4999",,terminal_output +8923,9115768,"TERMINAL",0,0,"5101050",,terminal_output +8924,9116641,"TERMINAL",0,0,"6111",,terminal_output +8925,9117725,"TERMINAL",0,0,"7222",,terminal_output +8926,9118708,"TERMINAL",0,0,"8333",,terminal_output +8927,9119758,"TERMINAL",0,0,"9444",,terminal_output +8928,9120788,"TERMINAL",0,0,"10555",,terminal_output +8929,9121852,"TERMINAL",0,0,"1666",,terminal_output +8930,9122865,"TERMINAL",0,0,"2777",,terminal_output +8931,9124009,"TERMINAL",0,0,"3888",,terminal_output +8932,9124957,"TERMINAL",0,0,"4999",,terminal_output +8933,9125999,"TERMINAL",0,0,"520206:00",,terminal_output +8934,9127491,"TERMINAL",0,0,"6111",,terminal_output +8935,9128219,"TERMINAL",0,0,"7222",,terminal_output +8936,9129227,"TERMINAL",0,0,"8333",,terminal_output +8937,9130177,"TERMINAL",0,0,"9444",,terminal_output +8938,9131212,"TERMINAL",0,0,"20555",,terminal_output +8939,9132303,"TERMINAL",0,0,"1666",,terminal_output +8940,9133321,"TERMINAL",0,0,"2888",,terminal_output +8941,9134367,"TERMINAL",0,0,"4999",,terminal_output +8942,9135502,"TERMINAL",0,0,"5303010",,terminal_output +8943,9136503,"TERMINAL",0,0,"6111",,terminal_output +8944,9137518,"TERMINAL",0,0,"7222",,terminal_output +8945,9138499,"TERMINAL",0,0,"8333",,terminal_output +8946,9139655,"TERMINAL",0,0,"9444",,terminal_output +8947,9140693,"TERMINAL",0,0,"30555",,terminal_output +8948,9141745,"TERMINAL",0,0,"1666",,terminal_output +8949,9142721,"TERMINAL",0,0,"2777",,terminal_output +8950,9143772,"TERMINAL",0,0,"3888",,terminal_output +8951,9144753,"TERMINAL",0,0,"4999",,terminal_output +8952,9145799,"TERMINAL",0,0,"5404020",,terminal_output +8953,9146828,"TERMINAL",0,0,"6111",,terminal_output +8954,9147873,"TERMINAL",0,0,"7222",,terminal_output +8955,9148995,"TERMINAL",0,0,"8333",,terminal_output +8956,9150019,"TERMINAL",0,0,"9444",,terminal_output +8957,9151036,"TERMINAL",0,0,"40555",,terminal_output +8958,9152070,"TERMINAL",0,0,"1666",,terminal_output +8959,9153201,"TERMINAL",0,0,"2777",,terminal_output +8960,9154219,"TERMINAL",0,0,"3888",,terminal_output +8961,9155251,"TERMINAL",0,0,"4999",,terminal_output +8962,9156207,"TERMINAL",0,0,"5505030",,terminal_output +8963,9157290,"TERMINAL",0,0,"6111",,terminal_output +8964,9158420,"TERMINAL",0,0,"7333",,terminal_output +8965,9159364,"TERMINAL",0,0,"9444",,terminal_output +8966,9160489,"TERMINAL",0,0,"50555",,terminal_output +8967,9161480,"TERMINAL",0,0,"1666",,terminal_output +8968,9162512,"TERMINAL",0,0,"2777",,terminal_output +8969,9163514,"TERMINAL",0,0,"3888",,terminal_output +8970,9164674,"TERMINAL",0,0,"4999",,terminal_output +8971,9165679,"TERMINAL",0,0,"53:003:0040",,terminal_output +8972,9166743,"TERMINAL",0,0,"6111",,terminal_output +8973,9167121,"TERMINAL",0,0,"Step 0, loss: 13.707112312316895\r\nStep 1, loss: 12.787692070007324\r\nStep 2, loss: 14.445314407348633\r\nStep 3, loss: 6.839857578277588\r\nStep 4, loss: 5.966392993927002\r\nStep 5, loss: 9.156126022338867\r\nStep 6, loss: 8.733874320983887\r\nStep 7, loss: 7.992668151855469\r\nStep 8, loss: 5.01971960067749\r\nStep 9, loss: 4.375117778778076\r\nCalculating validation metrics...\r\n",,terminal_output +8974,9167728,"TERMINAL",0,0,"7222",,terminal_output +8975,9168753,"TERMINAL",0,0,"8333",,terminal_output +8976,9169770,"TERMINAL",0,0,"9444",,terminal_output +8977,9170808,"TERMINAL",0,0,"9:00555",,terminal_output +8978,9171280,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 418, in \r\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val, optimizer.model)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 171, in calculate_validation_metrics\r\n loss, recon, metrics = val_step(genie, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/flax/nnx/transforms/compilation.py"", line 431, in __call__\r\n pure_args_out, pure_kwargs_out, pure_out = self.jitted_fn(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/flax/nnx/transforms/compilation.py"", line 126, in __call__\r\n out = self.f(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 161, in val_step\r\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 103, in dynamics_loss_fn\r\n outputs = model(inputs, training=True)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py"", line 161, in __call__\r\n outputs[""mask_rng""] = batch[""mask_rng""]\r\nKeyError: 'mask_rng'\r\n",,terminal_output +8979,9171845,"TERMINAL",0,0,"1666",,terminal_output +8980,9172883,"TERMINAL",0,0,"2777",,terminal_output +8981,9173265,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-dyn-dev-3468835 at: https://wandb.ai/instant-uv/jafar/runs/86m0lb4s\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_155726-86m0lb4s/logs\r\n",,terminal_output +8982,9173936,"TERMINAL",0,0,"3888",,terminal_output +8983,9174408,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +8984,9174774,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +8985,9175012,"TERMINAL",0,0,"4999",,terminal_output +8986,9176091,"TERMINAL",0,0,"5101050",,terminal_output +8987,9177060,"TERMINAL",0,0,"6111",,terminal_output +8988,9178105,"TERMINAL",0,0,"7222",,terminal_output +8989,9179190,"TERMINAL",0,0,"8333",,terminal_output +8990,9180196,"TERMINAL",0,0,"9444",,terminal_output +8991,9181354,"TERMINAL",0,0,"10555",,terminal_output +8992,9182378,"TERMINAL",0,0,"1777",,terminal_output +8993,9183403,"TERMINAL",0,0,"3888",,terminal_output +8994,9184372,"TERMINAL",0,0,"4999",,terminal_output +8995,9185456,"TERMINAL",0,0,"520207:00",,terminal_output +8996,9186201,"train_lam.py",0,0,"",python,tab +8997,9186678,"TERMINAL",0,0,"6111",,terminal_output +8998,9187469,"train_dynamics.py",0,0,"",python,tab +8999,9187613,"TERMINAL",0,0,"7222",,terminal_output +9000,9188569,"TERMINAL",0,0,"8333",,terminal_output +9001,9189637,"TERMINAL",0,0,"9444",,terminal_output +9002,9190703,"TERMINAL",0,0,"20555",,terminal_output +9003,9191771,"TERMINAL",0,0,"1666",,terminal_output +9004,9192745,"TERMINAL",0,0,"2777",,terminal_output +9005,9193874,"TERMINAL",0,0,"3888",,terminal_output +9006,9194812,"TERMINAL",0,0,"4999",,terminal_output +9007,9195857,"TERMINAL",0,0,"5303010",,terminal_output +9008,9196894,"TERMINAL",0,0,"6111",,terminal_output +9009,9197992,"TERMINAL",0,0,"7222",,terminal_output +9010,9198973,"TERMINAL",0,0,"8333",,terminal_output +9011,9200112,"TERMINAL",0,0,"9444",,terminal_output +9012,9201117,"TERMINAL",0,0,"30555",,terminal_output +9013,9202145,"TERMINAL",0,0,"1666",,terminal_output +9014,9203158,"TERMINAL",0,0,"2777",,terminal_output +9015,9204295,"TERMINAL",0,0,"3888",,terminal_output +9016,9205009,"train_dynamics.py",14426,0,"",python,selection_mouse +9017,9205236,"TERMINAL",0,0,"4999",,terminal_output +9018,9205686,"train_dynamics.py",14419,0,"",python,selection_mouse +9019,9205818,"train_dynamics.py",14414,8,"mask_rng",python,selection_mouse +9020,9206334,"TERMINAL",0,0,"5414121",,terminal_output +9021,9207359,"TERMINAL",0,0,"7222",,terminal_output +9022,9208404,"TERMINAL",0,0,"8333",,terminal_output +9023,9209422,"TERMINAL",0,0,"9444",,terminal_output +9024,9210542,"TERMINAL",0,0,"40555",,terminal_output +9025,9211410,"train_dynamics.py",14428,0,"",python,selection_mouse +9026,9211529,"TERMINAL",0,0,"1666",,terminal_output +9027,9211583,"train_dynamics.py",14423,9,"_rng_mask",python,selection_mouse +9028,9212586,"TERMINAL",0,0,"2777",,terminal_output +9029,9213707,"TERMINAL",0,0,"3888",,terminal_output +9030,9214729,"TERMINAL",0,0,"4999",,terminal_output +9031,9215744,"TERMINAL",0,0,"5505030",,terminal_output +9032,9216735,"TERMINAL",0,0,"6111",,terminal_output +9033,9217768,"TERMINAL",0,0,"7222",,terminal_output +9034,9218814,"TERMINAL",0,0,"8333",,terminal_output +9035,9219869,"TERMINAL",0,0,"9444",,terminal_output +9036,9220919,"TERMINAL",0,0,"50555",,terminal_output +9037,9222136,"TERMINAL",0,0,"1666",,terminal_output +9038,9222318,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +9039,9223069,"TERMINAL",0,0,"2777",,terminal_output +9040,9224105,"TERMINAL",0,0,"3888",,terminal_output +9041,9225409,"TERMINAL",0,0,"4999",,terminal_output +9042,9226217,"TERMINAL",0,0,"54:004:0040",,terminal_output +9043,9227150,"TERMINAL",0,0,"6111",,terminal_output +9044,9228202,"TERMINAL",0,0,"7222",,terminal_output +9045,9229355,"TERMINAL",0,0,"8333",,terminal_output +9046,9230278,"TERMINAL",0,0,"9555",,terminal_output +9047,9231331,"TERMINAL",0,0,"6:00:01666",,terminal_output +9048,9232459,"TERMINAL",0,0,"2777",,terminal_output +9049,9233489,"TERMINAL",0,0,"3888",,terminal_output +9050,9234458,"TERMINAL",0,0,"4999",,terminal_output +9051,9235523,"TERMINAL",0,0,"5101050",,terminal_output +9052,9236547,"TERMINAL",0,0,"6111",,terminal_output +9053,9236916,"train_dynamics.py",14738,0,"",python,selection_mouse +9054,9237802,"train_dynamics.py",14738,0,"\n ",python,content +9055,9237805,"TERMINAL",0,0,"7222",,terminal_output +9056,9238601,"TERMINAL",0,0,"8333",,terminal_output +9057,9239249,"train_dynamics.py",14739,16,"",python,content +9058,9239686,"train_dynamics.py",14361,0,"",python,selection_mouse +9059,9239711,"TERMINAL",0,0,"9444",,terminal_output +9060,9240749,"TERMINAL",0,0,"10555",,terminal_output +9061,9240893,"train_dynamics.py",14415,0,"",python,selection_command +9062,9241069,"train_dynamics.py",14476,0,"",python,selection_command +9063,9241253,"train_dynamics.py",14558,0,"",python,selection_command +9064,9241345,"train_dynamics.py",14604,0,"",python,selection_command +9065,9241497,"train_dynamics.py",14630,0,"",python,selection_command +9066,9241648,"train_dynamics.py",14632,0,"",python,selection_command +9067,9241744,"TERMINAL",0,0,"1666",,terminal_output +9068,9241777,"train_dynamics.py",14669,0,"",python,selection_command +9069,9241942,"train_dynamics.py",14713,0,"",python,selection_command +9070,9242268,"train_dynamics.py",14739,0,"",python,selection_command +9071,9242484,"train_dynamics.py",14739,0,"\n rng, _rng_mask = jax.random.split(rng, 2)",python,content +9072,9242541,"train_dynamics.py",14752,0,"",python,selection_command +9073,9242775,"TERMINAL",0,0,"2777",,terminal_output +9074,9243080,"train_dynamics.py",14739,0,"",python,selection_command +9075,9243475,"train_dynamics.py",14752,0,"",python,selection_command +9076,9243835,"TERMINAL",0,0,"3888",,terminal_output +9077,9244554,"train_dynamics.py",14752,0," ",python,content +9078,9244858,"TERMINAL",0,0,"4999",,terminal_output +9079,9245047,"train_dynamics.py",14755,0,"",python,selection_command +9080,9245348,"train_dynamics.py",14755,42,"",python,content +9081,9245400,"train_dynamics.py",14754,0,"",python,selection_command +9082,9245520,"train_dynamics.py",14754,1,"",python,content +9083,9245546,"train_dynamics.py",14753,0,"",python,selection_command +9084,9245923,"TERMINAL",0,0,"520208:00",,terminal_output +9085,9246961,"TERMINAL",0,0,"6111",,terminal_output +9086,9247568,"train_dynamics.py",14754,0," ",python,content +9087,9247586,"train_dynamics.py",14754,0,"",python,selection_command +9088,9247974,"train_dynamics.py",14755,0," rng, _rng_mask = jax.random.split(rng, 2)",python,content +9089,9248008,"train_dynamics.py",14755,0,"",python,selection_command +9090,9248008,"TERMINAL",0,0,"7222",,terminal_output +9091,9248532,"train_dynamics.py",14739,0,"",python,selection_command +9092,9249075,"train_dynamics.py",14739,1,"",python,content +9093,9249075,"train_dynamics.py",14755,0,"",python,selection_command +9094,9249075,"TERMINAL",0,0,"8333",,terminal_output +9095,9249321,"train_dynamics.py",14813,0,"",python,selection_command +9096,9250154,"TERMINAL",0,0,"9444",,terminal_output +9097,9251157,"TERMINAL",0,0,"20555",,terminal_output +9098,9251374,"train_dynamics.py",14770,0,"",python,selection_mouse +9099,9252124,"TERMINAL",0,0,"1666",,terminal_output +9100,9252332,"train_dynamics.py",14769,0,"",python,selection_mouse +9101,9253175,"TERMINAL",0,0,"2777",,terminal_output +9102,9253622,"train_dynamics.py",14769,0,"_",python,content +9103,9253623,"train_dynamics.py",14770,0,"",python,selection_keyboard +9104,9253910,"train_dynamics.py",14770,0,"v",python,content +9105,9253911,"train_dynamics.py",14771,0,"",python,selection_keyboard +9106,9253961,"train_dynamics.py",14771,0,"a",python,content +9107,9253962,"train_dynamics.py",14772,0,"",python,selection_keyboard +9108,9253979,"train_dynamics.py",14772,0,"l",python,content +9109,9253980,"train_dynamics.py",14773,0,"",python,selection_keyboard +9110,9254254,"TERMINAL",0,0,"3888",,terminal_output +9111,9254662,"train_dynamics.py",14772,0,"",python,selection_command +9112,9255262,"TERMINAL",0,0,"4999",,terminal_output +9113,9256416,"TERMINAL",0,0,"5313111",,terminal_output +9114,9257341,"TERMINAL",0,0,"7222",,terminal_output +9115,9258372,"TERMINAL",0,0,"8333",,terminal_output +9116,9259408,"TERMINAL",0,0,"9444",,terminal_output +9117,9259618,"train_dynamics.py",14986,0,"",python,selection_mouse +9118,9260518,"TERMINAL",0,0,"30555",,terminal_output +9119,9261310,"train_dynamics.py",14986,0,",",python,content +9120,9261311,"train_dynamics.py",14987,0,"",python,selection_keyboard +9121,9261488,"TERMINAL",0,0,"1666",,terminal_output +9122,9262528,"TERMINAL",0,0,"2777",,terminal_output +9123,9263464,"train_dynamics.py",14987,0," ",python,content +9124,9263464,"train_dynamics.py",14988,0,"",python,selection_keyboard +9125,9263570,"TERMINAL",0,0,"3888",,terminal_output +9126,9264287,"train_dynamics.py",14988,0,"r",python,content +9127,9264288,"train_dynamics.py",14989,0,"",python,selection_keyboard +9128,9264627,"TERMINAL",0,0,"4999",,terminal_output +9129,9264995,"train_dynamics.py",14988,1,"",python,content +9130,9265540,"train_dynamics.py",14988,0,"_",python,content +9131,9265541,"train_dynamics.py",14989,0,"",python,selection_keyboard +9132,9265641,"TERMINAL",0,0,"5404020",,terminal_output +9133,9265819,"train_dynamics.py",14989,0,"r",python,content +9134,9265821,"train_dynamics.py",14990,0,"",python,selection_keyboard +9135,9266016,"train_dynamics.py",14990,0,"n",python,content +9136,9266017,"train_dynamics.py",14991,0,"",python,selection_keyboard +9137,9266768,"TERMINAL",0,0,"6111",,terminal_output +9138,9267258,"train_dynamics.py",14991,0,"g",python,content +9139,9267259,"train_dynamics.py",14992,0,"",python,selection_keyboard +9140,9267429,"train_dynamics.py",14992,0,"_",python,content +9141,9267431,"train_dynamics.py",14993,0,"",python,selection_keyboard +9142,9267694,"train_dynamics.py",14993,0,"m",python,content +9143,9267695,"train_dynamics.py",14994,0,"",python,selection_keyboard +9144,9267763,"TERMINAL",0,0,"7222",,terminal_output +9145,9267820,"train_dynamics.py",14994,0,"a",python,content +9146,9267822,"train_dynamics.py",14995,0,"",python,selection_keyboard +9147,9267874,"train_dynamics.py",14995,0,"s",python,content +9148,9267875,"train_dynamics.py",14996,0,"",python,selection_keyboard +9149,9267967,"train_dynamics.py",14996,0,"k",python,content +9150,9267968,"train_dynamics.py",14997,0,"",python,selection_keyboard +9151,9268226,"train_dynamics.py",14997,0,"_",python,content +9152,9268227,"train_dynamics.py",14998,0,"",python,selection_keyboard +9153,9268345,"train_dynamics.py",14998,0,"v",python,content +9154,9268346,"train_dynamics.py",14999,0,"",python,selection_keyboard +9155,9268545,"train_dynamics.py",14999,0,"a",python,content +9156,9268546,"train_dynamics.py",15000,0,"",python,selection_keyboard +9157,9268627,"train_dynamics.py",15000,0,"l",python,content +9158,9268628,"train_dynamics.py",15001,0,"",python,selection_keyboard +9159,9268800,"TERMINAL",0,0,"8333",,terminal_output +9160,9269811,"TERMINAL",0,0,"9444",,terminal_output +9161,9270845,"TERMINAL",0,0,"40555",,terminal_output +9162,9270879,"train_dynamics.py",15000,0,"",python,selection_command +9163,9271943,"TERMINAL",0,0,"1666",,terminal_output +9164,9273012,"TERMINAL",0,0,"2777",,terminal_output +9165,9274011,"TERMINAL",0,0,"3888",,terminal_output +9166,9275045,"TERMINAL",0,0,"4999",,terminal_output +9167,9276064,"TERMINAL",0,0,"5505030",,terminal_output +9168,9277119,"TERMINAL",0,0,"6111",,terminal_output +9169,9278151,"TERMINAL",0,0,"7222",,terminal_output +9170,9279190,"TERMINAL",0,0,"8333",,terminal_output +9171,9280220,"TERMINAL",0,0,"9444",,terminal_output +9172,9281297,"TERMINAL",0,0,"50555",,terminal_output +9173,9282067,"train_dynamics.py",14948,0,"",python,selection_mouse +9174,9282285,"train_dynamics.py",5265,0,"",python,selection_command +9175,9282374,"TERMINAL",0,0,"1777",,terminal_output +9176,9283448,"TERMINAL",0,0,"3888",,terminal_output +9177,9283620,"train_dynamics.py",5316,0,"",python,selection_mouse +9178,9284378,"TERMINAL",0,0,"4999",,terminal_output +9179,9284554,"train_dynamics.py",5315,0,"",python,selection_mouse +9180,9285427,"TERMINAL",0,0,"55:005:0040",,terminal_output +9181,9285595,"train_dynamics.py",5315,0,",",python,content +9182,9285597,"train_dynamics.py",5316,0,"",python,selection_keyboard +9183,9285710,"train_dynamics.py",5316,0," ",python,content +9184,9285711,"train_dynamics.py",5317,0,"",python,selection_keyboard +9185,9285912,"train_dynamics.py",5317,0,"r",python,content +9186,9285912,"train_dynamics.py",5318,0,"",python,selection_keyboard +9187,9285981,"train_dynamics.py",5318,0,"n",python,content +9188,9285981,"train_dynamics.py",5319,0,"",python,selection_keyboard +9189,9286039,"train_dynamics.py",5319,0,"g",python,content +9190,9286039,"train_dynamics.py",5320,0,"",python,selection_keyboard +9191,9286478,"TERMINAL",0,0,"6111",,terminal_output +9192,9287495,"TERMINAL",0,0,"7222",,terminal_output +9193,9288441,"train_dynamics.py",5320,0,"_",python,content +9194,9288442,"train_dynamics.py",5321,0,"",python,selection_keyboard +9195,9288554,"TERMINAL",0,0,"8333",,terminal_output +9196,9288728,"train_dynamics.py",5321,0,"m",python,content +9197,9288728,"train_dynamics.py",5322,0,"",python,selection_keyboard +9198,9288779,"train_dynamics.py",5322,0,"a",python,content +9199,9288780,"train_dynamics.py",5323,0,"",python,selection_keyboard +9200,9288895,"train_dynamics.py",5323,0,"s",python,content +9201,9288896,"train_dynamics.py",5324,0,"",python,selection_keyboard +9202,9288941,"train_dynamics.py",5324,0,"k",python,content +9203,9288941,"train_dynamics.py",5325,0,"",python,selection_keyboard +9204,9289597,"TERMINAL",0,0,"9444",,terminal_output +9205,9290740,"TERMINAL",0,0,"1:00555",,terminal_output +9206,9291737,"TERMINAL",0,0,"1666",,terminal_output +9207,9292722,"TERMINAL",0,0,"2777",,terminal_output +9208,9293798,"TERMINAL",0,0,"3888",,terminal_output +9209,9294813,"TERMINAL",0,0,"4999",,terminal_output +9210,9295856,"TERMINAL",0,0,"5101050",,terminal_output +9211,9296921,"TERMINAL",0,0,"6111",,terminal_output +9212,9297948,"TERMINAL",0,0,"7222",,terminal_output +9213,9297962,"train_dynamics.py",5550,0,"",python,selection_mouse +9214,9298193,"train_dynamics.py",5547,3,"ss)",python,selection_mouse +9215,9298194,"train_dynamics.py",5543,7,"d(loss)",python,selection_mouse +9216,9298330,"train_dynamics.py",5538,12,"append(loss)",python,selection_mouse +9217,9298331,"train_dynamics.py",5534,16,"tep.append(loss)",python,selection_mouse +9218,9298331,"train_dynamics.py",5474,76," recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)",python,selection_mouse +9219,9298331,"train_dynamics.py",5470,80,"oss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)",python,selection_mouse +9220,9298332,"train_dynamics.py",5469,81,"loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)",python,selection_mouse +9221,9298362,"train_dynamics.py",5468,82," loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)",python,selection_mouse +9222,9298407,"train_dynamics.py",5464,86," loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)",python,selection_mouse +9223,9298437,"train_dynamics.py",5462,88," loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)",python,selection_mouse +9224,9298496,"train_dynamics.py",5461,89," loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)",python,selection_mouse +9225,9298522,"train_dynamics.py",5424,126," inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)",python,selection_mouse +9226,9299016,"TERMINAL",0,0,"8333",,terminal_output +9227,9299676,"train_dynamics.py",5432,0,"",python,selection_command +9228,9300127,"TERMINAL",0,0,"9444",,terminal_output +9229,9301075,"TERMINAL",0,0,"10555",,terminal_output +9230,9302152,"TERMINAL",0,0,"1666",,terminal_output +9231,9303162,"TERMINAL",0,0,"2777",,terminal_output +9232,9304368,"TERMINAL",0,0,"3999",,terminal_output +9233,9305468,"TERMINAL",0,0,"520209:00",,terminal_output +9234,9306728,"TERMINAL",0,0,"6111",,terminal_output +9235,9307613,"TERMINAL",0,0,"7222",,terminal_output +9236,9308713,"TERMINAL",0,0,"8333",,terminal_output +9237,9309664,"TERMINAL",0,0,"9444",,terminal_output +9238,9309829,"train_dynamics.py",5591,0,"",python,selection_mouse +9239,9310011,"train_dynamics.py",5589,2,"s)",python,selection_mouse +9240,9310012,"train_dynamics.py",5544,47,"(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9241,9310170,"train_dynamics.py",5485,106,"rics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9242,9310170,"train_dynamics.py",5483,108,"etrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9243,9310171,"train_dynamics.py",5443,148,"ct(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9244,9310171,"train_dynamics.py",5406,185,"n val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9245,9310171,"train_dynamics.py",5339,252,"0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9246,9310171,"train_dynamics.py",5270,321,"late_validation_metrics(val_dataloader, genie, rng_mask):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9247,9310172,"train_dynamics.py",5259,332,"\n\ndef calculate_validation_metrics(val_dataloader, genie, rng_mask):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9248,9310186,"train_dynamics.py",5232,359,"eturn loss, recon, metrics\n\n\ndef calculate_validation_metrics(val_dataloader, genie, rng_mask):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9249,9310207,"train_dynamics.py",5231,360,"return loss, recon, metrics\n\n\ndef calculate_validation_metrics(val_dataloader, genie, rng_mask):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9250,9310253,"train_dynamics.py",5168,423,"(loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n return loss, recon, metrics\n\n\ndef calculate_validation_metrics(val_dataloader, genie, rng_mask):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9251,9310309,"train_dynamics.py",5149,442," genie.eval()\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n return loss, recon, metrics\n\n\ndef calculate_validation_metrics(val_dataloader, genie, rng_mask):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9252,9310341,"train_dynamics.py",5103,488," """"""Evaluate model and compute metrics""""""\n genie.eval()\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n return loss, recon, metrics\n\n\ndef calculate_validation_metrics(val_dataloader, genie, rng_mask):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9253,9310379,"train_dynamics.py",5024,567,"ef val_step(genie: Genie, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n return loss, recon, metrics\n\n\ndef calculate_validation_metrics(val_dataloader, genie, rng_mask):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9254,9310428,"train_dynamics.py",5023,568,"def val_step(genie: Genie, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n return loss, recon, metrics\n\n\ndef calculate_validation_metrics(val_dataloader, genie, rng_mask):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9255,9310637,"TERMINAL",0,0,"20555",,terminal_output +9256,9311711,"TERMINAL",0,0,"1666",,terminal_output +9257,9312744,"TERMINAL",0,0,"2777",,terminal_output +9258,9313786,"TERMINAL",0,0,"3888",,terminal_output +9259,9314027,"train_dynamics.py",5591,0,"",python,selection_mouse +9260,9314711,"train_dynamics.py",5550,41,"\n metrics_per_step.append(metrics)",python,selection_mouse +9261,9314745,"train_dynamics.py",5539,52,"ppend(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9262,9314783,"train_dynamics.py",5476,115,"econ, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9263,9314784,"train_dynamics.py",5469,122,"loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9264,9314862,"train_dynamics.py",5429,162," inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9265,9314863,"train_dynamics.py",5426,165," inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9266,9314863,"train_dynamics.py",5424,167," inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9267,9314863,"train_dynamics.py",5390,201," for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,selection_mouse +9268,9314912,"TERMINAL",0,0,"4999",,terminal_output +9269,9315726,"train_dynamics.py",5390,201,"K",python,content +9270,9315877,"TERMINAL",0,0,"5303010",,terminal_output +9271,9316011,"train_dynamics.py",5391,0,"u",python,content +9272,9316013,"train_dynamics.py",5392,0,"",python,selection_keyboard +9273,9316128,"train_dynamics.py",5392,0,"s",python,content +9274,9316130,"train_dynamics.py",5393,0,"",python,selection_keyboard +9275,9316905,"TERMINAL",0,0,"6111",,terminal_output +9276,9317329,"train_dynamics.py",5390,3," for videos in val_dataloader:\n inputs = dict(videos=videos)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)",python,content +9277,9317959,"TERMINAL",0,0,"7222",,terminal_output +9278,9318992,"TERMINAL",0,0,"8333",,terminal_output +9279,9319858,"train_dynamics.py",5394,0,"",python,selection_command +9280,9320086,"TERMINAL",0,0,"9444",,terminal_output +9281,9321098,"TERMINAL",0,0,"30555",,terminal_output +9282,9322160,"TERMINAL",0,0,"1666",,terminal_output +9283,9323179,"TERMINAL",0,0,"2777",,terminal_output +9284,9324225,"TERMINAL",0,0,"3888",,terminal_output +9285,9325278,"TERMINAL",0,0,"4999",,terminal_output +9286,9326344,"TERMINAL",0,0,"5414121",,terminal_output +9287,9327371,"TERMINAL",0,0,"7222",,terminal_output +9288,9328422,"TERMINAL",0,0,"8333",,terminal_output +9289,9329452,"TERMINAL",0,0,"9444",,terminal_output +9290,9330511,"TERMINAL",0,0,"40555",,terminal_output +9291,9331542,"TERMINAL",0,0,"1666",,terminal_output +9292,9332586,"TERMINAL",0,0,"2777",,terminal_output +9293,9333632,"TERMINAL",0,0,"3888",,terminal_output +9294,9334725,"TERMINAL",0,0,"4999",,terminal_output +9295,9335527,"train_dynamics.py",5390,0,"",python,selection_command +9296,9335757,"TERMINAL",0,0,"5505030",,terminal_output +9297,9336762,"TERMINAL",0,0,"6111",,terminal_output +9298,9337827,"TERMINAL",0,0,"7222",,terminal_output +9299,9338852,"TERMINAL",0,0,"8333",,terminal_output +9300,9339128,"train_dynamics.py",5424,0," rng, rng_mask = jax.random.split(rng_mask, 2)\n",python,content +9301,9339528,"train_dynamics.py",5478,0," inputs = dict(videos=videos, mask_rng=rng_mask)\n",python,content +9302,9339892,"TERMINAL",0,0,"9444",,terminal_output +9303,9339942,"train_dynamics.py",5534,37,"",python,content +9304,9340955,"TERMINAL",0,0,"50555",,terminal_output +9305,9342020,"TERMINAL",0,0,"1666",,terminal_output +9306,9343056,"TERMINAL",0,0,"2777",,terminal_output +9307,9344076,"TERMINAL",0,0,"3888",,terminal_output +9308,9345128,"TERMINAL",0,0,"4999",,terminal_output +9309,9346161,"TERMINAL",0,0,"56:006:0040",,terminal_output +9310,9347533,"TERMINAL",0,0,"6111",,terminal_output +9311,9348260,"TERMINAL",0,0,"7222",,terminal_output +9312,9349300,"TERMINAL",0,0,"8444",,terminal_output +9313,9350406,"TERMINAL",0,0,"2:00555",,terminal_output +9314,9351447,"TERMINAL",0,0,"1666",,terminal_output +9315,9352570,"TERMINAL",0,0,"2777",,terminal_output +9316,9353613,"TERMINAL",0,0,"3888",,terminal_output +9317,9354559,"TERMINAL",0,0,"4999",,terminal_output +9318,9355570,"TERMINAL",0,0,"5101050",,terminal_output +9319,9356615,"TERMINAL",0,0,"6111",,terminal_output +9320,9357786,"TERMINAL",0,0,"7222",,terminal_output +9321,9358728,"TERMINAL",0,0,"8333",,terminal_output +9322,9359468,"train_dynamics.py",5442,0,"",python,selection_mouse +9323,9359628,"train_dynamics.py",5437,8,"rng_mask",python,selection_mouse +9324,9359744,"TERMINAL",0,0,"9444",,terminal_output +9325,9360832,"TERMINAL",0,0,"10555",,terminal_output +9326,9361844,"TERMINAL",0,0,"1666",,terminal_output +9327,9361936,"train_dynamics.py",5487,0,"",python,selection_mouse +9328,9362879,"TERMINAL",0,0,"2777",,terminal_output +9329,9363922,"TERMINAL",0,0,"3888",,terminal_output +9330,9365166,"TERMINAL",0,0,"4999",,terminal_output +9331,9366011,"TERMINAL",0,0,"5202050:00",,terminal_output +9332,9366869,"train_dynamics.py",5537,0,"",python,selection_mouse +9333,9367059,"TERMINAL",0,0,"6111",,terminal_output +9334,9367802,"train_dynamics.py",5437,0,"",python,selection_mouse +9335,9368114,"TERMINAL",0,0,"7222",,terminal_output +9336,9369251,"TERMINAL",0,0,"8333",,terminal_output +9337,9369842,"train_dynamics.py",5340,0,"",python,selection_mouse +9338,9370275,"TERMINAL",0,0,"9444",,terminal_output +9339,9370361,"train_dynamics.py",5324,16,"k):\n step = 0",python,selection_mouse +9340,9371115,"train_dynamics.py",5324,0,"",python,selection_mouse +9341,9371288,"TERMINAL",0,0,"20555",,terminal_output +9342,9371678,"train_dynamics.py",5325,0,"",python,selection_command +9343,9371952,"train_dynamics.py",5324,1,"",python,content +9344,9372054,"train_dynamics.py",5323,1,"",python,content +9345,9372211,"train_dynamics.py",5322,1,"",python,content +9346,9372299,"TERMINAL",0,0,"1777",,terminal_output +9347,9372337,"train_dynamics.py",5321,1,"",python,content +9348,9372652,"train_dynamics.py",5320,1,"",python,content +9349,9373361,"TERMINAL",0,0,"3888",,terminal_output +9350,9374503,"TERMINAL",0,0,"4999",,terminal_output +9351,9375425,"TERMINAL",0,0,"5303010",,terminal_output +9352,9376522,"TERMINAL",0,0,"6111",,terminal_output +9353,9377249,"train_dynamics.py",5468,0,"",python,selection_mouse +9354,9377510,"TERMINAL",0,0,"7222",,terminal_output +9355,9377921,"train_dynamics.py",5467,1,"",python,content +9356,9378068,"train_dynamics.py",5466,1,"",python,content +9357,9378211,"train_dynamics.py",5465,1,"",python,content +9358,9378373,"train_dynamics.py",5464,1,"",python,content +9359,9378545,"TERMINAL",0,0,"8333",,terminal_output +9360,9378822,"train_dynamics.py",5463,1,"",python,content +9361,9379590,"TERMINAL",0,0,"9444",,terminal_output +9362,9380640,"TERMINAL",0,0,"30555",,terminal_output +9363,9381679,"TERMINAL",0,0,"1666",,terminal_output +9364,9382763,"TERMINAL",0,0,"2777",,terminal_output +9365,9383800,"TERMINAL",0,0,"3888",,terminal_output +9366,9384828,"TERMINAL",0,0,"4999",,terminal_output +9367,9385912,"TERMINAL",0,0,"5404020",,terminal_output +9368,9386920,"TERMINAL",0,0,"6111",,terminal_output +9369,9388007,"TERMINAL",0,0,"7222",,terminal_output +9370,9388983,"TERMINAL",0,0,"8333",,terminal_output +9371,9390098,"TERMINAL",0,0,"9444",,terminal_output +9372,9391073,"TERMINAL",0,0,"40555",,terminal_output +9373,9392141,"TERMINAL",0,0,"1666",,terminal_output +9374,9393317,"train_dynamics.py",15173,0,"",python,selection_command +9375,9393380,"TERMINAL",0,0,"2777",,terminal_output +9376,9394244,"TERMINAL",0,0,"3888",,terminal_output +9377,9395097,"train_dynamics.py",14440,0,"",python,selection_mouse +9378,9395268,"TERMINAL",0,0,"4999",,terminal_output +9379,9395371,"train_dynamics.py",14438,3,"rng",python,selection_mouse +9380,9396292,"TERMINAL",0,0,"5515131",,terminal_output +9381,9397420,"TERMINAL",0,0,"7222",,terminal_output +9382,9398380,"TERMINAL",0,0,"8333",,terminal_output +9383,9399436,"TERMINAL",0,0,"9444",,terminal_output +9384,9400593,"TERMINAL",0,0,"50555",,terminal_output +9385,9401561,"TERMINAL",0,0,"1666",,terminal_output +9386,9402646,"TERMINAL",0,0,"2777",,terminal_output +9387,9403659,"TERMINAL",0,0,"3888",,terminal_output +9388,9404691,"TERMINAL",0,0,"4999",,terminal_output +9389,9406759,"TERMINAL",0,0,"57:017:0141",,terminal_output +9390,9407796,"TERMINAL",0,0,"7222",,terminal_output +9391,9408860,"TERMINAL",0,0,"8333",,terminal_output +9392,9409933,"TERMINAL",0,0,"9444",,terminal_output +9393,9410795,"train_dynamics.py",5463,0,"",python,selection_mouse +9394,9410907,"TERMINAL",0,0,"3:00555",,terminal_output +9395,9411953,"TERMINAL",0,0,"1666",,terminal_output +9396,9413010,"TERMINAL",0,0,"2777",,terminal_output +9397,9414030,"TERMINAL",0,0,"3888",,terminal_output +9398,9415257,"TERMINAL",0,0,"4999",,terminal_output +9399,9416011,"train_dynamics.py",5486,0,"",python,selection_mouse +9400,9416228,"TERMINAL",0,0,"5101050",,terminal_output +9401,9416619,"train_dynamics.py",5439,0,"",python,selection_mouse +9402,9417183,"TERMINAL",0,0,"6111",,terminal_output +9403,9418215,"TERMINAL",0,0,"7222",,terminal_output +9404,9419270,"TERMINAL",0,0,"8333",,terminal_output +9405,9420019,"train_dynamics.py",5432,0,"",python,selection_mouse +9406,9420307,"TERMINAL",0,0,"9555",,terminal_output +9407,9420546,"train_dynamics.py",5435,0,"",python,selection_mouse +9408,9420711,"train_dynamics.py",5432,8,"rng_mask",python,selection_mouse +9409,9421350,"TERMINAL",0,0,"11666",,terminal_output +9410,9422508,"TERMINAL",0,0,"2777",,terminal_output +9411,9423544,"TERMINAL",0,0,"3888",,terminal_output +9412,9424232,"train_dynamics.py",5435,0,"",python,selection_mouse +9413,9424479,"TERMINAL",0,0,"4999",,terminal_output +9414,9425585,"TERMINAL",0,0,"520201:00",,terminal_output +9415,9426338,"train_dynamics.py",5667,0,"",python,selection_mouse +9416,9426577,"TERMINAL",0,0,"6111",,terminal_output +9417,9427069,"train_dynamics.py",5484,0,"",python,selection_mouse +9418,9427253,"train_dynamics.py",5484,1," ",python,selection_mouse +9419,9427609,"TERMINAL",0,0,"7222",,terminal_output +9420,9427712,"train_dynamics.py",5437,0,"",python,selection_mouse +9421,9427840,"train_dynamics.py",5432,8,"rng_mask",python,selection_mouse +9422,9428651,"TERMINAL",0,0,"8333",,terminal_output +9423,9429868,"TERMINAL",0,0,"9444",,terminal_output +9424,9430801,"TERMINAL",0,0,"20555",,terminal_output +9425,9431422,"train_dynamics.py",5440,0,"",python,selection_command +9426,9431448,"train_dynamics.py",5514,0,"_",python,content +9427,9431449,"train_dynamics.py",5432,0,"_",python,content +9428,9431775,"TERMINAL",0,0,"1666",,terminal_output +9429,9432852,"TERMINAL",0,0,"2777",,terminal_output +9430,9433916,"TERMINAL",0,0,"3888",,terminal_output +9431,9435236,"TERMINAL",0,0,"4999",,terminal_output +9432,9435556,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh ",,terminal_output +9433,9436002,"TERMINAL",0,0,"5303010",,terminal_output +9434,9436045,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3468835\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --data_dir $array_records_dir_train \\r\n --lam_checkpoint $lam_checkpoint \\r\n --tokenizer_checkpoint $tokenizer_checkpoint\r\n",,terminal_output +9435,9436136,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +9436,9436276,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +9437,9437048,"TERMINAL",0,0,"6111",,terminal_output +9438,9438109,"TERMINAL",0,0,"7222",,terminal_output +9439,9439096,"TERMINAL",0,0,"8333",,terminal_output +9440,9440144,"TERMINAL",0,0,"9444",,terminal_output +9441,9441183,"TERMINAL",0,0,"30555",,terminal_output +9442,9442280,"TERMINAL",0,0,"1666",,terminal_output +9443,9443277,"TERMINAL",0,0,"2888",,terminal_output +9444,9444334,"TERMINAL",0,0,"4999",,terminal_output +9445,9445388,"TERMINAL",0,0,"5404020",,terminal_output +9446,9446032,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +9447,9446571,"TERMINAL",0,0,"6111",,terminal_output +9448,9446837,"TERMINAL",0,0,"wandb: creating run\r\nwandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_160335-bb7pfnj2\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-dyn-dev-3468835\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/bb7pfnj2\r\n",,terminal_output +9449,9447493,"TERMINAL",0,0,"7222",,terminal_output +9450,9448620,"TERMINAL",0,0,"8333",,terminal_output +9451,9449646,"TERMINAL",0,0,"9444",,terminal_output +9452,9450580,"TERMINAL",0,0,"40555",,terminal_output +9453,9451613,"TERMINAL",0,0,"1666",,terminal_output +9454,9452000,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +9455,9452654,"TERMINAL",0,0,"2777",,terminal_output +9456,9453744,"TERMINAL",0,0,"3888",,terminal_output +9457,9454764,"TERMINAL",0,0,"4999",,terminal_output +9458,9455796,"TERMINAL",0,0,"5505030",,terminal_output +9459,9456812,"TERMINAL",0,0,"6111",,terminal_output +9460,9457853,"TERMINAL",0,0,"7222",,terminal_output +9461,9457909,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35115232, 'tokenizer': 33750256, 'total': 95421392}\r\nStarting training from step 0...\r\n",,terminal_output +9462,9458904,"TERMINAL",0,0,"8333",,terminal_output +9463,9459991,"TERMINAL",0,0,"9444",,terminal_output +9464,9460984,"TERMINAL",0,0,"50555",,terminal_output +9465,9462154,"TERMINAL",0,0,"1666",,terminal_output +9466,9463136,"TERMINAL",0,0,"2777",,terminal_output +9467,9464178,"TERMINAL",0,0,"3888",,terminal_output +9468,9465204,"TERMINAL",0,0,"4999",,terminal_output +9469,9466226,"TERMINAL",0,0,"58:008:0040",,terminal_output +9470,9467241,"TERMINAL",0,0,"6111",,terminal_output +9471,9468383,"TERMINAL",0,0,"7333",,terminal_output +9472,9469355,"TERMINAL",0,0,"9444",,terminal_output +9473,9470385,"TERMINAL",0,0,"4:00555",,terminal_output +9474,9470810,"TERMINAL",0,0,"2025-09-05 16:04:00.542945: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:04:00.543352: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:04:00.543908: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:04:00.544401: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:04:00.545627: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:04:00.546937: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +9475,9471478,"TERMINAL",0,0,"1666",,terminal_output +9476,9472689,"TERMINAL",0,0,"2777",,terminal_output +9477,9473609,"TERMINAL",0,0,"3888",,terminal_output +9478,9474633,"TERMINAL",0,0,"4999",,terminal_output +9479,9475590,"TERMINAL",0,0,"5101050",,terminal_output +9480,9476610,"TERMINAL",0,0,"6111",,terminal_output +9481,9478048,"TERMINAL",0,0,"7222",,terminal_output +9482,9479136,"TERMINAL",0,0,"8333",,terminal_output +9483,9480130,"TERMINAL",0,0,"9444",,terminal_output +9484,9481286,"TERMINAL",0,0,"10555",,terminal_output +9485,9482302,"TERMINAL",0,0,"1666",,terminal_output +9486,9483276,"TERMINAL",0,0,"2777",,terminal_output +9487,9484350,"TERMINAL",0,0,"3999",,terminal_output +9488,9485371,"TERMINAL",0,0,"520202:00",,terminal_output +9489,9486394,"TERMINAL",0,0,"6111",,terminal_output +9490,9487532,"TERMINAL",0,0,"7222",,terminal_output +9491,9488487,"TERMINAL",0,0,"8333",,terminal_output +9492,9489510,"TERMINAL",0,0,"9444",,terminal_output +9493,9490555,"TERMINAL",0,0,"20555",,terminal_output +9494,9491599,"TERMINAL",0,0,"1666",,terminal_output +9495,9492723,"TERMINAL",0,0,"2777",,terminal_output +9496,9493779,"TERMINAL",0,0,"3888",,terminal_output +9497,9494802,"TERMINAL",0,0,"4999",,terminal_output +9498,9495826,"TERMINAL",0,0,"5303010",,terminal_output +9499,9496812,"TERMINAL",0,0,"6111",,terminal_output +9500,9497861,"TERMINAL",0,0,"7222",,terminal_output +9501,9498910,"TERMINAL",0,0,"8333",,terminal_output +9502,9500027,"TERMINAL",0,0,"9444",,terminal_output +9503,9501055,"TERMINAL",0,0,"30555",,terminal_output +9504,9502053,"TERMINAL",0,0,"1666",,terminal_output +9505,9503145,"TERMINAL",0,0,"2777",,terminal_output +9506,9504223,"TERMINAL",0,0,"3888",,terminal_output +9507,9505184,"TERMINAL",0,0,"4999",,terminal_output +9508,9506271,"TERMINAL",0,0,"5404020",,terminal_output +9509,9507277,"TERMINAL",0,0,"6111",,terminal_output +9510,9508428,"TERMINAL",0,0,"8333",,terminal_output +9511,9509359,"TERMINAL",0,0,"9444",,terminal_output +9512,9510462,"TERMINAL",0,0,"40555",,terminal_output +9513,9511487,"TERMINAL",0,0,"1666",,terminal_output +9514,9512619,"TERMINAL",0,0,"2777",,terminal_output +9515,9513648,"TERMINAL",0,0,"3888",,terminal_output +9516,9514661,"TERMINAL",0,0,"4999",,terminal_output +9517,9515643,"TERMINAL",0,0,"5505030",,terminal_output +9518,9516677,"TERMINAL",0,0,"6111",,terminal_output +9519,9517728,"TERMINAL",0,0,"7222",,terminal_output +9520,9518770,"TERMINAL",0,0,"8333",,terminal_output +9521,9519885,"TERMINAL",0,0,"9444",,terminal_output +9522,9520857,"TERMINAL",0,0,"50555",,terminal_output +9523,9521939,"TERMINAL",0,0,"1666",,terminal_output +9524,9522962,"TERMINAL",0,0,"2777",,terminal_output +9525,9523988,"TERMINAL",0,0,"3888",,terminal_output +9526,9525094,"TERMINAL",0,0,"4999",,terminal_output +9527,9526076,"TERMINAL",0,0,"59:009:0040",,terminal_output +9528,9528794,"TERMINAL",0,0,"6333",,terminal_output +9529,9529800,"TERMINAL",0,0,"9444",,terminal_output +9530,9530860,"TERMINAL",0,0,"5:00555",,terminal_output +9531,9531881,"TERMINAL",0,0,"1666",,terminal_output +9532,9532928,"TERMINAL",0,0,"2777",,terminal_output +9533,9533962,"TERMINAL",0,0,"3888",,terminal_output +9534,9535107,"TERMINAL",0,0,"4999",,terminal_output +9535,9536043,"TERMINAL",0,0,"5101050",,terminal_output +9536,9536069,"TERMINAL",0,0,"Step 0, loss: 13.707112312316895\r\nStep 1, loss: 12.787692070007324\r\nStep 2, loss: 14.445314407348633\r\nStep 3, loss: 6.839857578277588\r\nStep 4, loss: 5.966392993927002\r\nStep 5, loss: 9.156126022338867\r\nStep 6, loss: 8.733874320983887\r\nStep 7, loss: 7.992668151855469\r\nStep 8, loss: 5.01971960067749\r\nStep 9, loss: 4.375117778778076\r\nCalculating validation metrics...\r\n",,terminal_output +9537,9537115,"TERMINAL",0,0,"6111",,terminal_output +9538,9538126,"TERMINAL",0,0,"7222",,terminal_output +9539,9539165,"TERMINAL",0,0,"8333",,terminal_output +9540,9540208,"TERMINAL",0,0,"9444",,terminal_output +9541,9541231,"TERMINAL",0,0,"10555",,terminal_output +9542,9542274,"TERMINAL",0,0,"1777",,terminal_output +9543,9543311,"TERMINAL",0,0,"3888",,terminal_output +9544,9544351,"TERMINAL",0,0,"4999",,terminal_output +9545,9545388,"TERMINAL",0,0,"520203:00",,terminal_output +9546,9546444,"TERMINAL",0,0,"6111",,terminal_output +9547,9547539,"TERMINAL",0,0,"7222",,terminal_output +9548,9548563,"TERMINAL",0,0,"8333",,terminal_output +9549,9549605,"TERMINAL",0,0,"9444",,terminal_output +9550,9550579,"TERMINAL",0,0,"20555",,terminal_output +9551,9551614,"TERMINAL",0,0,"1666",,terminal_output +9552,9552674,"TERMINAL",0,0,"2777",,terminal_output +9553,9553690,"TERMINAL",0,0,"3888",,terminal_output +9554,9554821,"TERMINAL",0,0,"4999",,terminal_output +9555,9555763,"TERMINAL",0,0,"5303010",,terminal_output +9556,9556801,"TERMINAL",0,0,"6111",,terminal_output +9557,9557843,"TERMINAL",0,0,"7222",,terminal_output +9558,9558884,"TERMINAL",0,0,"8333",,terminal_output +9559,9559933,"TERMINAL",0,0,"9444",,terminal_output +9560,9560960,"TERMINAL",0,0,"30555",,terminal_output +9561,9562010,"TERMINAL",0,0,"1666",,terminal_output +9562,9563040,"TERMINAL",0,0,"2777",,terminal_output +9563,9564218,"TERMINAL",0,0,"3888",,terminal_output +9564,9564694,"models/lam.py",0,0,"",python,tab +9565,9565205,"TERMINAL",0,0,"4999",,terminal_output +9566,9566160,"TERMINAL",0,0,"5404020",,terminal_output +9567,9567187,"TERMINAL",0,0,"6111",,terminal_output +9568,9568326,"TERMINAL",0,0,"7222",,terminal_output +9569,9569350,"TERMINAL",0,0,"8333",,terminal_output +9570,9570374,"TERMINAL",0,0,"9555",,terminal_output +9571,9571343,"TERMINAL",0,0,"41666",,terminal_output +9572,9572427,"TERMINAL",0,0,"2777",,terminal_output +9573,9573542,"TERMINAL",0,0,"3888",,terminal_output +9574,9574511,"TERMINAL",0,0,"4999",,terminal_output +9575,9575591,"TERMINAL",0,0,"5505030",,terminal_output +9576,9576589,"TERMINAL",0,0,"6111",,terminal_output +9577,9577612,"TERMINAL",0,0,"7222",,terminal_output +9578,9578668,"TERMINAL",0,0,"8333",,terminal_output +9579,9579801,"TERMINAL",0,0,"9444",,terminal_output +9580,9580793,"TERMINAL",0,0,"50555",,terminal_output +9581,9581843,"TERMINAL",0,0,"1666",,terminal_output +9582,9582798,"TERMINAL",0,0,"2777",,terminal_output +9583,9583846,"TERMINAL",0,0,"3888",,terminal_output +9584,9584885,"TERMINAL",0,0,"4999",,terminal_output +9585,9585918,"TERMINAL",0,0,"550:0050:0040",,terminal_output +9586,9586958,"TERMINAL",0,0,"6111",,terminal_output +9587,9587996,"TERMINAL",0,0,"7222",,terminal_output +9588,9589029,"TERMINAL",0,0,"8333",,terminal_output +9589,9590075,"TERMINAL",0,0,"9444",,terminal_output +9590,9591161,"TERMINAL",0,0,"6:00555",,terminal_output +9591,9592193,"TERMINAL",0,0,"1666",,terminal_output +9592,9593282,"TERMINAL",0,0,"2777",,terminal_output +9593,9594230,"TERMINAL",0,0,"3888",,terminal_output +9594,9595260,"TERMINAL",0,0,"4999",,terminal_output +9595,9596301,"TERMINAL",0,0,"5111151",,terminal_output +9596,9597391,"TERMINAL",0,0,"7222",,terminal_output +9597,9598426,"TERMINAL",0,0,"8333",,terminal_output +9598,9599409,"TERMINAL",0,0,"9444",,terminal_output +9599,9600445,"TERMINAL",0,0,"10555",,terminal_output +9600,9601518,"TERMINAL",0,0,"1666",,terminal_output +9601,9602638,"TERMINAL",0,0,"2777",,terminal_output +9602,9603654,"TERMINAL",0,0,"3888",,terminal_output +9603,9604586,"TERMINAL",0,0,"4999",,terminal_output +9604,9605793,"TERMINAL",0,0,"520204:00",,terminal_output +9605,9606799,"TERMINAL",0,0,"6111",,terminal_output +9606,9607799,"TERMINAL",0,0,"7222",,terminal_output +9607,9608835,"TERMINAL",0,0,"8333",,terminal_output +9608,9609874,"TERMINAL",0,0,"9444",,terminal_output +9609,9610911,"TERMINAL",0,0,"20555",,terminal_output +9610,9611944,"TERMINAL",0,0,"1666",,terminal_output +9611,9612980,"TERMINAL",0,0,"2777",,terminal_output +9612,9614021,"TERMINAL",0,0,"3888",,terminal_output +9613,9615124,"TERMINAL",0,0,"4999",,terminal_output +9614,9616086,"TERMINAL",0,0,"5303010",,terminal_output +9615,9617130,"TERMINAL",0,0,"6111",,terminal_output +9616,9618202,"TERMINAL",0,0,"7222",,terminal_output +9617,9619206,"TERMINAL",0,0,"8333",,terminal_output +9618,9620242,"TERMINAL",0,0,"9444",,terminal_output +9619,9621303,"TERMINAL",0,0,"30666",,terminal_output +9620,9622310,"TERMINAL",0,0,"2777",,terminal_output +9621,9623419,"TERMINAL",0,0,"3888",,terminal_output +9622,9624387,"TERMINAL",0,0,"4999",,terminal_output +9623,9625427,"TERMINAL",0,0,"5404020",,terminal_output +9624,9626495,"TERMINAL",0,0,"6111",,terminal_output +9625,9627502,"TERMINAL",0,0,"7222",,terminal_output +9626,9628659,"TERMINAL",0,0,"8333",,terminal_output +9627,9629659,"TERMINAL",0,0,"9444",,terminal_output +9628,9630689,"TERMINAL",0,0,"40555",,terminal_output +9629,9631655,"TERMINAL",0,0,"1666",,terminal_output +9630,9632705,"TERMINAL",0,0,"2777",,terminal_output +9631,9633760,"TERMINAL",0,0,"3888",,terminal_output +9632,9634761,"TERMINAL",0,0,"4999",,terminal_output +9633,9635878,"TERMINAL",0,0,"5505030",,terminal_output +9634,9636842,"TERMINAL",0,0,"6111",,terminal_output +9635,9637877,"TERMINAL",0,0,"7222",,terminal_output +9636,9638926,"TERMINAL",0,0,"8333",,terminal_output +9637,9639960,"TERMINAL",0,0,"9444",,terminal_output +9638,9640999,"TERMINAL",0,0,"50555",,terminal_output +9639,9642050,"TERMINAL",0,0,"1666",,terminal_output +9640,9643088,"TERMINAL",0,0,"2777",,terminal_output +9641,9644131,"TERMINAL",0,0,"3888",,terminal_output +9642,9645167,"TERMINAL",0,0,"4999",,terminal_output +9643,9646214,"TERMINAL",0,0,"51:001:0040",,terminal_output +9644,9647379,"TERMINAL",0,0,"6111",,terminal_output +9645,9648412,"TERMINAL",0,0,"7333",,terminal_output +9646,9650861,"TERMINAL",0,0,"9555",,terminal_output +9647,9651897,"TERMINAL",0,0,"7:01666",,terminal_output +9648,9652935,"TERMINAL",0,0,"2777",,terminal_output +9649,9653973,"TERMINAL",0,0,"3888",,terminal_output +9650,9655017,"TERMINAL",0,0,"4999",,terminal_output +9651,9656066,"TERMINAL",0,0,"5101050",,terminal_output +9652,9657160,"TERMINAL",0,0,"6111",,terminal_output +9653,9658148,"TERMINAL",0,0,"7222",,terminal_output +9654,9659185,"TERMINAL",0,0,"8333",,terminal_output +9655,9660277,"TERMINAL",0,0,"9444",,terminal_output +9656,9661264,"TERMINAL",0,0,"10555",,terminal_output +9657,9662302,"TERMINAL",0,0,"1777",,terminal_output +9658,9663462,"TERMINAL",0,0,"3888",,terminal_output +9659,9664382,"TERMINAL",0,0,"4999",,terminal_output +9660,9665504,"TERMINAL",0,0,"520205:00",,terminal_output +9661,9666468,"TERMINAL",0,0,"6111",,terminal_output +9662,9667592,"TERMINAL",0,0,"7222",,terminal_output +9663,9668552,"TERMINAL",0,0,"8333",,terminal_output +9664,9669697,"TERMINAL",0,0,"9444",,terminal_output +9665,9670727,"TERMINAL",0,0,"20555",,terminal_output +9666,9671710,"TERMINAL",0,0,"1666",,terminal_output +9667,9672711,"TERMINAL",0,0,"2777",,terminal_output +9668,9673846,"TERMINAL",0,0,"3888",,terminal_output +9669,9674832,"TERMINAL",0,0,"4999",,terminal_output +9670,9675848,"TERMINAL",0,0,"5303010",,terminal_output +9671,9676871,"TERMINAL",0,0,"6111",,terminal_output +9672,9677894,"TERMINAL",0,0,"7222",,terminal_output +9673,9678934,"TERMINAL",0,0,"8333",,terminal_output +9674,9679971,"TERMINAL",0,0,"9444",,terminal_output +9675,9681030,"TERMINAL",0,0,"30555",,terminal_output +9676,9682140,"TERMINAL",0,0,"1666",,terminal_output +9677,9683117,"TERMINAL",0,0,"2777",,terminal_output +9678,9684142,"TERMINAL",0,0,"3888",,terminal_output +9679,9685192,"TERMINAL",0,0,"4999",,terminal_output +9680,9686301,"TERMINAL",0,0,"5404020",,terminal_output +9681,9687322,"TERMINAL",0,0,"6111",,terminal_output +9682,9688340,"TERMINAL",0,0,"7333",,terminal_output +9683,9689352,"TERMINAL",0,0,"9444",,terminal_output +9684,9690601,"TERMINAL",0,0,"40555",,terminal_output +9685,9691528,"TERMINAL",0,0,"1666",,terminal_output +9686,9692450,"TERMINAL",0,0,"2777",,terminal_output +9687,9693499,"TERMINAL",0,0,"3888",,terminal_output +9688,9694617,"TERMINAL",0,0,"4999",,terminal_output +9689,9695605,"TERMINAL",0,0,"5505030",,terminal_output +9690,9696621,"TERMINAL",0,0,"6111",,terminal_output +9691,9697665,"TERMINAL",0,0,"7222",,terminal_output +9692,9698710,"TERMINAL",0,0,"8333",,terminal_output +9693,9699857,"TERMINAL",0,0,"9444",,terminal_output +9694,9700848,"TERMINAL",0,0,"50555",,terminal_output +9695,9701848,"TERMINAL",0,0,"1666",,terminal_output +9696,9702913,"TERMINAL",0,0,"2777",,terminal_output +9697,9703937,"TERMINAL",0,0,"3888",,terminal_output +9698,9704978,"TERMINAL",0,0,"4999",,terminal_output +9699,9705663,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +9700,9706036,"TERMINAL",0,0,"52:002:0040",,terminal_output +9701,9707196,"TERMINAL",0,0,"6111",,terminal_output +9702,9708139,"TERMINAL",0,0,"7222",,terminal_output +9703,9709225,"TERMINAL",0,0,"8333",,terminal_output +9704,9710190,"TERMINAL",0,0,"9444",,terminal_output +9705,9711283,"TERMINAL",0,0,"8:00555",,terminal_output +9706,9712295,"TERMINAL",0,0,"1777",,terminal_output +9707,9713333,"TERMINAL",0,0,"3888",,terminal_output +9708,9714363,"TERMINAL",0,0,"4999",,terminal_output +9709,9715630,"TERMINAL",0,0,"5101050",,terminal_output +9710,9716510,"TERMINAL",0,0,"6111",,terminal_output +9711,9717487,"TERMINAL",0,0,"7222",,terminal_output +9712,9718528,"TERMINAL",0,0,"8333",,terminal_output +9713,9719563,"TERMINAL",0,0,"9444",,terminal_output +9714,9720606,"TERMINAL",0,0,"10555",,terminal_output +9715,9721649,"TERMINAL",0,0,"1666",,terminal_output +9716,9722707,"TERMINAL",0,0,"2777",,terminal_output +9717,9723872,"TERMINAL",0,0,"3888",,terminal_output +9718,9724764,"TERMINAL",0,0,"4999",,terminal_output +9719,9725844,"TERMINAL",0,0,"520206:00",,terminal_output +9720,9726945,"TERMINAL",0,0,"6111",,terminal_output +9721,9727883,"TERMINAL",0,0,"7222",,terminal_output +9722,9728927,"TERMINAL",0,0,"8333",,terminal_output +9723,9729961,"TERMINAL",0,0,"9444",,terminal_output +9724,9731008,"TERMINAL",0,0,"20555",,terminal_output +9725,9732060,"TERMINAL",0,0,"1666",,terminal_output +9726,9733073,"TERMINAL",0,0,"2777",,terminal_output +9727,9734217,"TERMINAL",0,0,"3888",,terminal_output +9728,9735151,"TERMINAL",0,0,"4999",,terminal_output +9729,9736263,"TERMINAL",0,0,"5303010",,terminal_output +9730,9737261,"TERMINAL",0,0,"6111",,terminal_output +9731,9738217,"TERMINAL",0,0,"Step 10, validation loss: 3.863799810409546\r\nStep 10, loss: 5.617998123168945\r\nStep 11, loss: 3.658097267150879\r\nStep 12, loss: 3.9051384925842285\r\nStep 13, loss: 3.421496629714966\r\nStep 14, loss: 3.9468939304351807\r\nStep 15, loss: 5.51287317276001\r\nStep 16, loss: 3.4131057262420654\r\nStep 17, loss: 3.312298536300659\r\nStep 18, loss: 4.716933727264404\r\nStep 19, loss: 4.017162322998047\r\nCalculating validation metrics...\r\nStep 20, validation loss: 2.7066314220428467\r\nStep 20, loss: 3.035166025161743\r\nStep 21, loss: 2.194017171859741\r\nStep 22, loss: 2.0624258518218994\r\nStep 23, loss: 2.398887872695923\r\nStep 24, loss: 2.3067803382873535\r\nStep 25, loss: 1.6381253004074097\r\nStep 26, loss: 1.5757440328598022\r\nStep 27, loss: 2.545316219329834\r\nStep 28, loss: 1.8925243616104126\r\nStep 29, loss: 1.8141918182373047\r\nCalculating validation metrics...\r\nStep 30, validation loss: 1.902382254600525\r\nStep 30, loss: 1.6641029119491577\r\nStep 31, loss: 1.8452732563018799\r\nStep 32, loss: 1.7666065692901611\r\nStep 33, loss: 1.4141861200332642\r\nStep 34, loss: 1.6031861305236816\r\nStep 35, loss: 1.549225926399231\r\nStep 36, loss: 1.6716769933700562\r\nStep 37, loss: 1.598390817642212\r\nStep 38, loss: 1.8403428792953491\r\nStep 39, loss: 1.9032573699951172\r\nCalculating validation metrics...\r\nStep 40, validation loss: 1.690502405166626\r\nStep 40, loss: 1.5007492303848267\r\nStep 41, loss: 1.2747361660003662\r\nStep 42, loss: 1.7683488130569458\r\nStep 43, loss: 1.2164154052734375\r\nStep 44, loss: 1.5611039400100708\r\nStep 45, loss: 1.784223198890686\r\nStep 46, loss: 1.5254615545272827\r\nStep 47, loss: 1.7472668886184692\r\nStep 48, loss: 1.2970725297927856\r\nStep 49, loss: 1.6757519245147705\r\nCalculating validation metrics...\r\nStep 50, validation loss: 1.4558123350143433\r\nStep 50, loss: 1.4130017757415771\r\nStep 51, loss: 1.2574968338012695\r\nStep 52, loss: 1.6169488430023193\r\nStep 53, loss: 1.2867003679275513\r\nStep 54, loss: 1.4802948236465454\r\nStep 55, loss: 1.635909914970398\r\nStep 56, loss: 2.1223909854888916\r\nStep 57, loss: 1.710260033607483\r\nStep 58, loss: 1.5560634136199951\r\nStep 59, loss: 1.840280294418335\r\nCalculating validation metrics...\r\nStep 60, validation loss: 1.9126529693603516\r\nStep 60, loss: 2.2006616592407227\r\nStep 61, loss: 1.9962691068649292\r\nStep 62, loss: 1.6735620498657227\r\nStep 63, loss: 1.6704058647155762\r\nStep 64, loss: 1.4653749465942383\r\nStep 65, loss: 1.8832337856292725\r\nStep 66, loss: 1.5651105642318726\r\nStep 67, loss: 1.8137803077697754\r\nStep 68, loss: 1.5393142700195312\r\nStep 69, loss: 1.9694292545318604\r\nCalculating validation metrics...\r\nStep 70, validation loss: 1.6601284742355347\r\nStep 70, loss: 1.9931340217590332\r\nStep 71, loss: 1.4294160604476929\r\nStep 72, loss: 1.7146375179290771\r\nStep 73, loss: 1.38381028175354\r\nStep 74, loss: 1.4439661502838135\r\nStep 75, loss: 1.4158592224121094\r\nStep 76, loss: 1.9410786628723145\r\nStep 77, loss: 1.2250840663909912\r\nStep 78, loss: 2.383310317993164\r\nStep 79, loss: 2.7957451343536377\r\nCalculating validation metrics...\r\nStep 80, validation loss: 1.6752173900604248\r\nStep 80, loss: 2.2438559532165527\r\nStep 81, loss: 2.408881902694702\r\nStep 82, loss: 1.50490403175354\r\nStep 83, loss: 1.547531008720398\r\nStep 84, loss: 2.4689486026763916\r\nStep 85, loss: 1.402790904045105\r\nStep 86, loss: 1.3607187271118164\r\nStep 87, loss: 1.6103236675262451\r\nStep 88, loss: 1.4436708688735962\r\nStep 89, loss: 1.0464810132980347\r\nCalculating validation metrics...\r\nStep 90, validation loss: 1.5799601078033447\r\nStep 90, loss: 1.3662998676300049\r\nStep 91, loss: 1.4120216369628906\r\nStep 92, loss: 1.2001478672027588\r\nStep 93, loss: 1.2785634994506836\r\nStep 94, loss: 1.2166292667388916\r\nStep 95, loss: 1.3877538442611694\r\nStep 96, loss: 1.2869967222213745\r\nStep 97, loss: 1.301251769065857\r\nStep 98, loss: 1.4590044021606445\r\nStep 99, loss: 1.2045484781265259\r\nCalculating validation metrics...\r\nStep 100, validation loss: 1.4659712314605713\r\nStep 100, loss: 2.8209402561187744\r\nStep 101, loss: 1.510357141494751\r\nStep 102, loss: 1.2913997173309326\r\nStep 103, loss: 1.5385509729385376\r\nStep 104, loss: 1.885851502418518\r\nStep 105, loss: 1.3162516355514526\r\nStep 106, loss: 1.6132906675338745\r\nStep 107, loss: 1.4784572124481201\r\nStep 108, loss: 1.423385500907898\r\nStep 109, loss: 1.181691288948059\r\nCalculating validation metrics...\r\nStep 110, validation loss: 1.5012677907943726\r\nStep 110, loss: 1.6853578090667725\r\nStep 111, loss: 1.2308719158172607\r\nStep 112, loss: 0.8081416487693787\r\nStep 113, loss: 1.659087061882019\r\nStep 114, loss: 2.0554471015930176\r\nStep 115, loss: 1.2311298847198486\r\nStep 116, loss: 1.6897703409194946\r\nStep 117, loss: 1.665964961051941\r\nStep 118, loss: 1.0422885417938232\r\nStep 119, loss: 1.4493454694747925\r\nCalculating validation metrics...\r\nStep 120, validation loss: 1.2377139329910278\r\nStep 120, loss: 1.096556544303894\r\nStep 121, loss: 1.428924322128296\r\nStep 122, loss: 1.2504318952560425\r\nStep 123, loss: 1.1306169033050537\r\nStep 124, loss: 1.3851771354675293\r\nStep 125, loss: 0.9719951152801514\r\nStep 126, loss: 1.0026214122772217\r\nStep 127, loss: 1.2429043054580688\r\nStep 128, loss: 1.189037799835205\r\nStep 129, loss: 1.7428635358810425\r\nCalculating validation metrics...\r\nStep 130, validation loss: 1.250633716583252\r\nStep 130, loss: 0.9150975346565247\r\nStep 131, loss: 1.6795315742492676\r\nStep 132, loss: 1.3100148439407349\r\nStep 133, loss: 1.1524235010147095\r\nStep 134, loss: 1.1549766063690186\r\nStep 135, loss: 1.2108114957809448\r\nStep 136, loss: 1.0983374118804932\r\nStep 137, loss: 1.221045970916748\r\nStep 138, loss: 0.9965833425521851\r\nStep 139, loss: 1.1118370294570923\r\nCalculating validation metrics...\r\nStep 140, validation loss: 1.1454304456710815\r\nStep 140, loss: 0.8275948166847229\r\nStep 141, loss: 0.9517509341239929\r\nStep 142, loss: 1.1883511543273926\r\nStep 143, loss: 1.0015233755111694\r\nStep 144, loss: 0.9272937774658203\r\nStep 145, loss: 0.8591479659080505\r\nStep 146, loss: 1.0562242269515991\r\nStep 147, loss: 1.2644447088241577\r\nStep 148, loss: 1.0331653356552124\r\nStep 149, loss: 1.1727200746536255\r\nCalculating validation metrics...\r\nStep 150, validation loss: 1.0384365320205688\r\nStep 150, loss: 0.8564679026603699\r\nStep 151, loss: 1.1845380067825317\r\nStep 152, loss: 0.9024031758308411\r\nStep 153, loss: 1.1990989446640015\r\nStep 154, loss: 1.3838838338851929\r\nStep 155, loss: 0.9469202160835266\r\nStep 156, loss: 1.4537367820739746\r\nStep 157, loss: 0.941718339920044\r\nStep 158, loss: 0.9372869729995728\r\nStep 159, loss: 0.7135844230651855\r\nCalculating validation metrics...\r\nStep 160, validation loss: 1.07765793800354\r\nStep 160, loss: 0.9519727230072021\r\nStep 161, loss: 0.9659063220024109\r\nStep 162, loss: 0.8567101359367371\r\nStep 163, loss: 1.4933159351348877\r\nStep 164, loss: 1.0984652042388916\r\nStep 165, loss: 1.805280089378357\r\nStep 166, loss: 1.161780595779419\r\nStep 167, loss: 1.1788370609283447\r\nStep 168, loss: 1.075250267982483\r\nStep 169, loss: 0.7789218425750732\r\nCalculating validation metrics...\r\nStep 170, validation loss: 0.9729978442192078\r\nStep 170, loss: 1.005738377571106\r\nStep 171, loss: 0.8488935828208923\r\nStep 172, loss: 0.9200100302696228\r\nStep 173, loss: 0.8432302474975586\r\nStep 174, loss: 0.8818174004554749\r\nStep 175, loss: 0.8978248238563538\r\nStep 176, loss: 0.8968703150749207\r\nStep 177, loss: 0.9918755292892456\r\nStep 178, loss: 1.067613959312439\r\nStep 179, loss: 0.965050995349884\r\nCalculating validation metrics...\r\nStep 180, validation loss: 0.8967819809913635\r\nStep 180, loss: 1.0185922384262085\r\nStep 181, loss: 0.9348647594451904\r\nStep 182, loss: 0.8532124757766724\r\nStep 183, loss: 0.8919010162353516\r\nStep 184, loss: 0.8528591394424438\r\nStep 185, loss: 0.8829078674316406\r\nStep 186, loss: 0.8773826956748962\r\nStep 187, loss: 0.824509859085083\r\nStep 188, loss: 0.7707624435424805\r\nStep 189, loss: 1.063322901725769\r\nCalculating validation metrics...\r\nStep 190, validation loss: 0.97890305519104\r\nStep 190, loss: 1.1584973335266113\r\nStep 191, loss: 0.8137894868850708\r\nStep 192, loss: 0.9858308434486389\r\nStep 193, loss: 1.050197958946228\r\nStep 194, loss: 0.9639319181442261\r\nStep 195, loss: 1.0246282815933228\r\nStep 196, loss: 0.8736329078674316\r\nStep 197, loss: 0.896998405456543\r\nStep 198, loss: 1.0483462810516357\r\nStep 199, loss: 1.0809338092803955\r\nCalculating validation metrics...\r\nStep 200, validation loss: 1.0224435329437256\r\n",,terminal_output +9732,9738269,"TERMINAL",0,0,"7222",,terminal_output +9733,9739307,"TERMINAL",0,0,"9444",,terminal_output +9734,9740260,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-dyn-dev-3468835 at: https://wandb.ai/instant-uv/jafar/runs/bb7pfnj2\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_160335-bb7pfnj2/logs\r\n",,terminal_output +9735,9740359,"TERMINAL",0,0,"30555",,terminal_output +9736,9741386,"TERMINAL",0,0,"1666",,terminal_output +9737,9741896,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +9738,9742211,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +9739,9742499,"TERMINAL",0,0,"2777",,terminal_output +9740,9743534,"TERMINAL",0,0,"3888",,terminal_output +9741,9744559,"TERMINAL",0,0,"4999",,terminal_output +9742,9745585,"TERMINAL",0,0,"5404020",,terminal_output +9743,9746583,"TERMINAL",0,0,"6111",,terminal_output +9744,9747702,"TERMINAL",0,0,"7222",,terminal_output +9745,9748756,"TERMINAL",0,0,"8333",,terminal_output +9746,9749698,"TERMINAL",0,0,"9444",,terminal_output +9747,9750805,"TERMINAL",0,0,"40555",,terminal_output +9748,9751778,"TERMINAL",0,0,"1666",,terminal_output +9749,9752893,"TERMINAL",0,0,"2777",,terminal_output +9750,9754270,"TERMINAL",0,0,"3888",,terminal_output +9751,9755271,"TERMINAL",0,0,"4999",,terminal_output +9752,9756324,"TERMINAL",0,0,"5515131",,terminal_output +9753,9757460,"TERMINAL",0,0,"7222",,terminal_output +9754,9758598,"TERMINAL",0,0,"8333",,terminal_output +9755,9759590,"TERMINAL",0,0,"9444",,terminal_output +9756,9760634,"TERMINAL",0,0,"50555",,terminal_output +9757,9761619,"TERMINAL",0,0,"1666",,terminal_output +9758,9762541,"TERMINAL",0,0,"2777",,terminal_output +9759,9763708,"TERMINAL",0,0,"3888",,terminal_output +9760,9764734,"TERMINAL",0,0,"4999",,terminal_output +9761,9765755,"TERMINAL",0,0,"53:003:0040",,terminal_output +9762,9766789,"TERMINAL",0,0,"6111",,terminal_output +9763,9767765,"TERMINAL",0,0,"7222",,terminal_output +9764,9768929,"TERMINAL",0,0,"8333",,terminal_output +9765,9769858,"TERMINAL",0,0,"9444",,terminal_output +9766,9772870,"TERMINAL",0,0,"9:00777",,terminal_output +9767,9773923,"TERMINAL",0,0,"3888",,terminal_output +9768,9774941,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1919,0,"",shellscript,selection_mouse +9769,9775017,"TERMINAL",0,0,"4999",,terminal_output +9770,9775387,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1895,0,"",shellscript,selection_mouse +9771,9776027,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1895,0,"\n ",shellscript,content +9772,9776132,"TERMINAL",0,0,"5101050",,terminal_output +9773,9776779,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1900,0,"-",shellscript,content +9774,9776780,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1901,0,"",shellscript,selection_keyboard +9775,9776901,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1901,0,"-",shellscript,content +9776,9776902,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1902,0,"",shellscript,selection_keyboard +9777,9777021,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1902,0,"w",shellscript,content +9778,9777022,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1903,0,"",shellscript,selection_keyboard +9779,9777043,"TERMINAL",0,0,"6111",,terminal_output +9780,9777202,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1903,0,"a",shellscript,content +9781,9777203,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1904,0,"",shellscript,selection_keyboard +9782,9777255,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1904,0,"n",shellscript,content +9783,9777258,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1905,0,"",shellscript,selection_keyboard +9784,9777381,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1905,0,"d",shellscript,content +9785,9777382,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1906,0,"",shellscript,selection_keyboard +9786,9777461,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1906,0,"b",shellscript,content +9787,9777461,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1907,0,"",shellscript,selection_keyboard +9788,9777804,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1907,0,"_",shellscript,content +9789,9777805,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1908,0,"",shellscript,selection_keyboard +9790,9778076,"TERMINAL",0,0,"7222",,terminal_output +9791,9778121,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1908,0,"i",shellscript,content +9792,9778122,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1909,0,"",shellscript,selection_keyboard +9793,9778209,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1909,0,"d",shellscript,content +9794,9778210,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1910,0,"",shellscript,selection_keyboard +9795,9778522,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1910,0,"=",shellscript,content +9796,9778523,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1911,0,"",shellscript,selection_keyboard +9797,9778903,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1911,0,"bb7pfnj2",shellscript,content +9798,9779108,"TERMINAL",0,0,"8333",,terminal_output +9799,9779729,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1919,0," ",shellscript,content +9800,9779730,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1920,0,"",shellscript,selection_keyboard +9801,9779927,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1920,0,"\",shellscript,content +9802,9779928,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1921,0,"",shellscript,selection_keyboard +9803,9780264,"TERMINAL",0,0,"9444",,terminal_output +9804,9781119,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1911,0,"",shellscript,selection_mouse +9805,9781345,"TERMINAL",0,0,"10666",,terminal_output +9806,9781429,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1910,1,"",shellscript,content +9807,9781522,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1910,0," ",shellscript,content +9808,9781523,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1911,0,"",shellscript,selection_keyboard +9809,9782384,"TERMINAL",0,0,"2777",,terminal_output +9810,9783391,"TERMINAL",0,0,"3888",,terminal_output +9811,9784424,"TERMINAL",0,0,"4999",,terminal_output +9812,9784464,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1802,0,"",shellscript,selection_mouse +9813,9785294,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1801,1,"",shellscript,content +9814,9785338,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1801,0,"3",shellscript,content +9815,9785339,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1802,0,"",shellscript,selection_keyboard +9816,9785495,"TERMINAL",0,0,"520207:00",,terminal_output +9817,9786593,"TERMINAL",0,0,"6111",,terminal_output +9818,9787562,"TERMINAL",0,0,"7222",,terminal_output +9819,9788686,"TERMINAL",0,0,"8333",,terminal_output +9820,9789727,"TERMINAL",0,0,"9444",,terminal_output +9821,9790742,"TERMINAL",0,0,"20555",,terminal_output +9822,9791764,"TERMINAL",0,0,"1666",,terminal_output +9823,9792777,"TERMINAL",0,0,"2777",,terminal_output +9824,9793408,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1921,0,"",shellscript,selection_mouse +9825,9793813,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1921,0,"\n ",shellscript,content +9826,9793904,"TERMINAL",0,0,"3888",,terminal_output +9827,9794639,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1926,0,"-",shellscript,content +9828,9794640,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1927,0,"",shellscript,selection_keyboard +9829,9794760,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1927,0,"-",shellscript,content +9830,9794761,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1928,0,"",shellscript,selection_keyboard +9831,9794912,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1928,0,"r",shellscript,content +9832,9794913,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1929,0,"",shellscript,selection_keyboard +9833,9794913,"TERMINAL",0,0,"4999",,terminal_output +9834,9795082,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1929,0,"e",shellscript,content +9835,9795082,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1930,0,"",shellscript,selection_keyboard +9836,9795401,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1930,0,"s",shellscript,content +9837,9795401,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1931,0,"",shellscript,selection_keyboard +9838,9795510,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1931,0,"t",shellscript,content +9839,9795511,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1932,0,"",shellscript,selection_keyboard +9840,9795718,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1932,0,"o",shellscript,content +9841,9795718,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1933,0,"",shellscript,selection_keyboard +9842,9795836,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1933,0,"r",shellscript,content +9843,9795837,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1934,0,"",shellscript,selection_keyboard +9844,9795995,"TERMINAL",0,0,"5303010",,terminal_output +9845,9796003,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1934,0,"e",shellscript,content +9846,9796003,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1935,0,"",shellscript,selection_keyboard +9847,9796704,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1935,0,"-",shellscript,content +9848,9796705,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1936,0,"",shellscript,selection_keyboard +9849,9796970,"TERMINAL",0,0,"6111",,terminal_output +9850,9797202,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1935,1,"",shellscript,content +9851,9797444,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1935,0,"_",shellscript,content +9852,9797444,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1936,0,"",shellscript,selection_keyboard +9853,9797930,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1936,0,"c",shellscript,content +9854,9797931,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1937,0,"",shellscript,selection_keyboard +9855,9798027,"TERMINAL",0,0,"7222",,terminal_output +9856,9798079,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1937,0,"k",shellscript,content +9857,9798080,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1938,0,"",shellscript,selection_keyboard +9858,9798370,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1938,0,"p",shellscript,content +9859,9798371,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1939,0,"",shellscript,selection_keyboard +9860,9798394,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1939,0,"t",shellscript,content +9861,9798395,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1940,0,"",shellscript,selection_keyboard +9862,9798995,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1940,0," ",shellscript,content +9863,9798996,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1941,0,"",shellscript,selection_keyboard +9864,9799054,"TERMINAL",0,0,"8333",,terminal_output +9865,9799259,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1941,0,"\",shellscript,content +9866,9799260,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1942,0,"",shellscript,selection_keyboard +9867,9799668,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1941,0,"",shellscript,selection_command +9868,9800221,"TERMINAL",0,0,"9444",,terminal_output +9869,9801468,"TERMINAL",0,0,"30555",,terminal_output +9870,9802252,"TERMINAL",0,0,"1666",,terminal_output +9871,9803235,"TERMINAL",0,0,"2777",,terminal_output +9872,9804343,"TERMINAL",0,0,"3999",,terminal_output +9873,9805385,"TERMINAL",0,0,"5404020",,terminal_output +9874,9806385,"train_dynamics.py",0,0,"",python,tab +9875,9806386,"train_dynamics.py",1495,0,"",python,selection_mouse +9876,9806510,"TERMINAL",0,0,"6111",,terminal_output +9877,9807452,"TERMINAL",0,0,"7222",,terminal_output +9878,9808462,"TERMINAL",0,0,"8333",,terminal_output +9879,9809542,"TERMINAL",0,0,"9444",,terminal_output +9880,9810613,"TERMINAL",0,0,"40555",,terminal_output +9881,9810976,"train_dynamics.py",2501,0,"",python,selection_mouse +9882,9811589,"TERMINAL",0,0,"1666",,terminal_output +9883,9812279,"train_dynamics.py",2500,0,"",python,selection_command +9884,9812667,"TERMINAL",0,0,"2777",,terminal_output +9885,9813712,"TERMINAL",0,0,"3888",,terminal_output +9886,9814800,"TERMINAL",0,0,"4999",,terminal_output +9887,9815796,"TERMINAL",0,0,"5505030",,terminal_output +9888,9816826,"TERMINAL",0,0,"6111",,terminal_output +9889,9817870,"TERMINAL",0,0,"7222",,terminal_output +9890,9818508,"train_dynamics.py",2524,0,"",python,selection_keyboard +9891,9818562,"train_dynamics.py",2535,0,"",python,selection_keyboard +9892,9819105,"TERMINAL",0,0,"8333Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:09:48 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:53:53\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:53:53\t 1 hkn07363468835 dev_accel interact tum_cte0 R57:33\t 1 hkn0402",,terminal_output +9893,9819244,"TERMINAL",0,0,"Python",,terminal_focus +9894,9820049,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:09:49 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:53:54\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:53:54\t 1 hkn07363468835 dev_accel interact tum_cte0 R57:34\t 1 hkn0402",,terminal_output +9895,9821096,"TERMINAL",0,0,"50555",,terminal_output +9896,9821394,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command +9897,9821440,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +9898,9821483,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/python",,terminal_command +9899,9821526,"TERMINAL",0,0,"]633;CPython 3.10.18 (main, Jun 4 2025, 17:36:27) [Clang 20.1.4 ] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n",,terminal_output +9900,9821709,"TERMINAL",0,0,">>> ",,terminal_output +9901,9821776,"TERMINAL",0,0,"val_steps: int = 50\r\n>>> ",,terminal_output +9902,9822131,"TERMINAL",0,0,"1666",,terminal_output +9903,9823173,"TERMINAL",0,0,"2777",,terminal_output +9904,9823848,"TERMINAL",0,0,"watch",,terminal_focus +9905,9824202,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:09:53 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:53:58\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:53:58\t 1 hkn07363468835 dev_accel interact tum_cte0 R57:38\t 1 hkn0402",,terminal_output +9906,9825227,"TERMINAL",0,0,"4999",,terminal_output +9907,9825447,"train_dynamics.py",644,0,"",python,selection_mouse +9908,9826068,"train_dynamics.py",868,0,"",python,selection_mouse +9909,9826268,"TERMINAL",0,0,"54:004:0040",,terminal_output +9910,9826617,"train_dynamics.py",861,0,"",python,selection_mouse +9911,9826747,"train_dynamics.py",851,12,"restore_ckpt",python,selection_mouse +9912,9827407,"TERMINAL",0,0,"6222",,terminal_output +9913,9827871,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +9914,9827872,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2054,0,"",shellscript,selection_mouse +9915,9828341,"TERMINAL",0,0,"8333",,terminal_output +9916,9829451,"TERMINAL",0,0,"srun",,terminal_focus +9917,9829643,"TERMINAL",0,0,"9444",,terminal_output +9918,9830427,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh ",,terminal_output +9919,9830477,"TERMINAL",0,0,"10:00555",,terminal_output +9920,9831552,"TERMINAL",0,0,"1666",,terminal_output +9921,9831552,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3468835\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=1000 \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 300 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --wandb_id bb7pfnj2 \\r\n --restore_ckpt \\r\n --data_dir $array_records_dir_train \\r\n --lam_checkpoint $lam_checkpoint \\r\n --tokenizer_checkpoint $tokenizer_checkpoint\r\n",,terminal_output +9922,9831683,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1362839\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1757077935\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757081535\r\nSLURM_PMI2_SRUN_PORT=37571\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3468835\r\nSLURM_PTY_PORT=43005\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=33\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=196\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=34789\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3468835\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34789\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +9923,9831777,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +9924,9832562,"TERMINAL",0,0,"2777",,terminal_output +9925,9833568,"TERMINAL",0,0,"3888",,terminal_output +9926,9834654,"TERMINAL",0,0,"4999",,terminal_output +9927,9835654,"TERMINAL",0,0,"5101050",,terminal_output +9928,9836828,"TERMINAL",0,0,"6111",,terminal_output +9929,9837742,"TERMINAL",0,0,"7222",,terminal_output +9930,9838868,"TERMINAL",0,0,"8333",,terminal_output +9931,9839826,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +9932,9840065,"TERMINAL",0,0,"9444",,terminal_output +9933,9841161,"TERMINAL",0,0,"10555",,terminal_output +9934,9841724,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +9935,9842044,"TERMINAL",0,0,"1666",,terminal_output +9936,9842298,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1601,0,"",shellscript,selection_mouse +9937,9842430,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +9938,9842619,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_161011-bb7pfnj2\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Resuming run coinrun-dyn-dev-3468835\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/bb7pfnj2\r\n",,terminal_output +9939,9843034,"TERMINAL",0,0,"2777",,terminal_output +9940,9843263,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1600,1,"",shellscript,content +9941,9844053,"TERMINAL",0,0,"3888",,terminal_output +9942,9845245,"TERMINAL",0,0,"4999",,terminal_output +9943,9845334,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1801,0,"",shellscript,selection_mouse +9944,9845653,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1800,1,"",shellscript,content +9945,9845763,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1800,0,"w",shellscript,content +9946,9845764,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1801,0,"",shellscript,selection_keyboard +9947,9846266,"TERMINAL",0,0,"520208:00",,terminal_output +9948,9846771,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1800,1,"",shellscript,content +9949,9846852,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1800,0,"2",shellscript,content +9950,9846853,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1801,0,"",shellscript,selection_keyboard +9951,9846909,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35115232, 'tokenizer': 33750256, 'total': 95421392}\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 375, in \r\n restored = checkpoint_manager.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1621, in restore\r\n raise FileNotFoundError(f'No steps found in {self.directory}.')\r\nFileNotFoundError: No steps found in /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3468835.\r\n",,terminal_output +9952,9847294,"TERMINAL",0,0,"6111",,terminal_output +9953,9847893,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1915,0,"",shellscript,selection_mouse +9954,9847992,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-dyn-dev-3468835 at: https://wandb.ai/instant-uv/jafar/runs/bb7pfnj2\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_161011-bb7pfnj2/logs\r\n",,terminal_output +9955,9848059,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1910,8,"bb7pfnj2",shellscript,selection_mouse +9956,9848189,"TERMINAL",0,0,"7222",,terminal_output +9957,9848709,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +9958,9849272,"TERMINAL",0,0,"8333",,terminal_output +9959,9849292,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1917,0,"",shellscript,selection_command +9960,9849775,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1893,0,"",shellscript,selection_command +9961,9850277,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1872,0,"",shellscript,selection_command +9962,9850314,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1828,0,"",shellscript,selection_command +9963,9850370,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1804,0,"",shellscript,selection_command +9964,9850393,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1780,0,"",shellscript,selection_command +9965,9850394,"TERMINAL",0,0,"9555",,terminal_output +9966,9850488,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1756,0,"",shellscript,selection_command +9967,9850624,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1780,0,"",shellscript,selection_command +9968,9850784,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1804,0,"",shellscript,selection_command +9969,9850937,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1828,0,"",shellscript,selection_command +9970,9851075,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1872,0,"",shellscript,selection_command +9971,9851208,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1893,0,"",shellscript,selection_command +9972,9851317,"TERMINAL",0,0,"21666",,terminal_output +9973,9851568,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1917,0,"",shellscript,selection_command +9974,9851982,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1895,47,"",shellscript,content +9975,9852007,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1899,0,"",shellscript,selection_command +9976,9852439,"TERMINAL",0,0,"2777",,terminal_output +9977,9853494,"TERMINAL",0,0,"3888",,terminal_output +9978,9854743,"TERMINAL",0,0,"4999",,terminal_output +9979,9855574,"TERMINAL",0,0,"5303010",,terminal_output +9980,9856604,"TERMINAL",0,0,"6111",,terminal_output +9981,9857238,"TERMINAL",0,0,"watch",,terminal_focus +9982,9857602,"TERMINAL",0,0,"7222",,terminal_output +9983,9858166,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +9984,9861323,"TERMINAL",0,0,"srun",,terminal_focus +9985,9864730,"TERMINAL",0,0,"",,terminal_focus +9986,9867102,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command +9987,9867187,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +9988,9869143,"TERMINAL",0,0,"salloc_node",,terminal_command +9989,9869194,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3469346\r\nsalloc: job 3469346 queued and waiting for resources\r\n",,terminal_output +9990,9871487,"TERMINAL",0,0,"srun",,terminal_focus +9991,9872390,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\nsrun: error: hkn0402: task 0: Exited with exit code 1\r\nsalloc: Relinquishing job allocation 3468835\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +9992,9874019,"TERMINAL",0,0,"salloc",,terminal_focus +9993,9874892,"TERMINAL",0,0,"^Csalloc: Job allocation 3469346 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +9994,9875283,"TERMINAL",0,0,"salloc_node",,terminal_command +9995,9875341,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3469347\r\nsalloc: job 3469347 queued and waiting for resources\r\n",,terminal_output +9996,9877208,"TERMINAL",0,0,"qu",,terminal_output +9997,9877345,"TERMINAL",0,0,"e",,terminal_output +9998,9877742,"TERMINAL",0,0," ",,terminal_output +9999,9877801,"TERMINAL",0,0," ",,terminal_output +10000,9877946,"TERMINAL",0,0," ",,terminal_output +10001,9878165,"TERMINAL",0,0,"^Csalloc: Job allocation 3469347 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10002,9879429,"TERMINAL",0,0,"bash",,terminal_focus +10003,9882137,"TERMINAL",0,0,"salloc_node",,terminal_command +10004,9882219,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3469349\r\nsalloc: job 3469349 queued and waiting for resources\r\n",,terminal_output +10005,9883852,"TERMINAL",0,0,"bash",,terminal_focus +10006,9885026,"TERMINAL",0,0,"queue",,terminal_command +10007,9885185,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:10:54 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON) 3466286 accelerat train_to tum_cte0 R 18:54:59\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:54:59\t 1 hkn07363469349 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +10008,9886107,"TERMINAL",0,0,"55:005:00",,terminal_output +10009,9887152,"TERMINAL",0,0,"611",,terminal_output +10010,9887892,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10011,9888659,"TERMINAL",0,0,"idling",,terminal_command +10012,9888758,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 16:10:58 2025Partition dev_cpuonly:\t 2 nodes idle\rPartition cpuonly: 21 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated: 32 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 5 nodes idle\rPartition accelerated-h200:\t 5 nodes idle",,terminal_output +10013,9889911,"TERMINAL",0,0,"9",,terminal_output +10014,9890940,"TERMINAL",0,0,"1:00",,terminal_output +10015,9891736,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10016,9894259,"TERMINAL",0,0,"salloc",,terminal_focus +10017,9901164,"TERMINAL",0,0,"bash",,terminal_focus +10018,9902444,"TERMINAL",0,0,"queue",,terminal_command +10019,9902564,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:11:12 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON) 3466286 accelerat train_to tum_cte0 R 18:55:17\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:55:17\t 1 hkn07363469349 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +10020,9903509,"TERMINAL",0,0,"388",,terminal_output +10021,9904530,"TERMINAL",0,0,"499",,terminal_output +10022,9905647,"TERMINAL",0,0,"52020",,terminal_output +10023,9906608,"TERMINAL",0,0,"611",,terminal_output +10024,9907684,"TERMINAL",0,0,"722",,terminal_output +10025,9908690,"TERMINAL",0,0,"833",,terminal_output +10026,9909835,"TERMINAL",0,0,"944",,terminal_output +10027,9910854,"TERMINAL",0,0,"2055",,terminal_output +10028,9911870,"TERMINAL",0,0,"166",,terminal_output +10029,9912861,"TERMINAL",0,0,"277",,terminal_output +10030,9914032,"TERMINAL",0,0,"388",,terminal_output +10031,9914994,"TERMINAL",0,0,"499",,terminal_output +10032,9915314,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +10033,9915991,"TERMINAL",0,0,"53030",,terminal_output +10034,9917052,"TERMINAL",0,0,"611",,terminal_output +10035,9918097,"TERMINAL",0,0,"722",,terminal_output +10036,9919135,"TERMINAL",0,0,"833",,terminal_output +10037,9920176,"TERMINAL",0,0,"944",,terminal_output +10038,9921224,"TERMINAL",0,0,"3055",,terminal_output +10039,9922284,"TERMINAL",0,0,"166",,terminal_output +10040,9922998,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +10041,9923356,"TERMINAL",0,0,"288",,terminal_output +10042,9924329,"TERMINAL",0,0,"499",,terminal_output +10043,9925385,"TERMINAL",0,0,"54040",,terminal_output +10044,9926486,"TERMINAL",0,0,"611",,terminal_output +10045,9927436,"TERMINAL",0,0,"722",,terminal_output +10046,9928572,"TERMINAL",0,0,"833",,terminal_output +10047,9929512,"TERMINAL",0,0,"944",,terminal_output +10048,9930622,"TERMINAL",0,0,"4055",,terminal_output +10049,9931586,"TERMINAL",0,0,"166",,terminal_output +10050,9932620,"TERMINAL",0,0,"277",,terminal_output +10051,9933697,"TERMINAL",0,0,"388",,terminal_output +10052,9934819,"TERMINAL",0,0,"499",,terminal_output +10053,9935838,"TERMINAL",0,0,"55050",,terminal_output +10054,9936782,"TERMINAL",0,0,"611",,terminal_output +10055,9937824,"TERMINAL",0,0,"722",,terminal_output +10056,9938916,"TERMINAL",0,0,"833",,terminal_output +10057,9939939,"TERMINAL",0,0,"944",,terminal_output +10058,9940963,"TERMINAL",0,0,"5055",,terminal_output +10059,9941998,"TERMINAL",0,0,"166",,terminal_output +10060,9943041,"TERMINAL",0,0,"277",,terminal_output +10061,9944074,"TERMINAL",0,0,"388",,terminal_output +10062,9945116,"TERMINAL",0,0,"499",,terminal_output +10063,9946154,"TERMINAL",0,0,"56:006:00",,terminal_output +10064,9947198,"TERMINAL",0,0,"611",,terminal_output +10065,9948250,"TERMINAL",0,0,"722",,terminal_output +10066,9949345,"TERMINAL",0,0,"844",,terminal_output +10067,9950377,"TERMINAL",0,0,"2:0055",,terminal_output +10068,9951410,"TERMINAL",0,0,"166",,terminal_output +10069,9952389,"TERMINAL",0,0,"277",,terminal_output +10070,9953429,"TERMINAL",0,0,"388",,terminal_output +10071,9953911,"TERMINAL",0,0,"bash",,terminal_focus +10072,9954462,"TERMINAL",0,0,"499",,terminal_output +10073,9955503,"TERMINAL",0,0,"51010",,terminal_output +10074,9956185,"TERMINAL",0,0,"git status",,terminal_command +10075,9956221,"TERMINAL",0,0,"]633;COn branch validation-loss\r\nYour branch is up to date with 'origin/validation-loss'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: train_dynamics.py\r\n\tmodified: train_lam.py\r\n\tmodified: train_tokenizer.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10076,9956545,"TERMINAL",0,0,"611",,terminal_output +10077,9957653,"TERMINAL",0,0,"722",,terminal_output +10078,9958693,"TERMINAL",0,0,"833",,terminal_output +10079,9959670,"TERMINAL",0,0,"944",,terminal_output +10080,9960737,"TERMINAL",0,0,"1055",,terminal_output +10081,9961757,"TERMINAL",0,0,"166",,terminal_output +10082,9962806,"TERMINAL",0,0,"277",,terminal_output +10083,9963993,"TERMINAL",0,0,"388",,terminal_output +10084,9964919,"TERMINAL",0,0,"499",,terminal_output +10085,9965949,"TERMINAL",0,0,"52020",,terminal_output +10086,9966976,"TERMINAL",0,0,"611",,terminal_output +10087,9968010,"TERMINAL",0,0,"722",,terminal_output +10088,9969056,"TERMINAL",0,0,"git commit -am ""val loss implemented for dynamics model""",,terminal_command +10089,9969101,"TERMINAL",0,0,"]633;C",,terminal_output +10090,9969136,"TERMINAL",0,0,"833",,terminal_output +10091,9969243,"TERMINAL",0,0,"[validation-loss 4b964f1] val loss implemented for dynamics model\r\n 3 files changed, 205 insertions(+), 92 deletions(-)\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10092,9970110,"TERMINAL",0,0,"944",,terminal_output +10093,9970827,"TERMINAL",0,0,"git push",,terminal_command +10094,9970870,"TERMINAL",0,0,"]633;C",,terminal_output +10095,9971176,"TERMINAL",0,0,"2055",,terminal_output +10096,9972297,"TERMINAL",0,0,"166",,terminal_output +10097,9972353,"TERMINAL",0,0,"Enumerating objects: 9, done.\r\nCounting objects: 11% (1/9)\rCounting objects: 22% (2/9)\rCounting objects: 33% (3/9)\rCounting objects: 44% (4/9)\rCounting objects: 55% (5/9)\rCounting objects: 66% (6/9)\rCounting objects: 77% (7/9)\rCounting objects: 88% (8/9)\rCounting objects: 100% (9/9)\rCounting objects: 100% (9/9), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 20% (1/5)\rCompressing objects: 40% (2/5)\rCompressing objects: 60% (3/5)\rCompressing objects: 80% (4/5)\rCompressing objects: 100% (5/5)\rCompressing objects: 100% (5/5), done.\r\nWriting objects: 20% (1/5)\rWriting objects: 40% (2/5)\rWriting objects: 60% (3/5)\rWriting objects: 80% (4/5)\rWriting objects: 100% (5/5)\rWriting objects: 100% (5/5), 2.35 KiB | 600.00 KiB/s, done.\r\nTotal 5 (delta 4), reused 0 (delta 0), pack-reused 0\r\nremote: Resolving deltas: 0% (0/4)\rremote: Resolving deltas: 25% (1/4)\rremote: Resolving deltas: 50% (2/4)\rremote: Resolving deltas: 75% (3/4)\rremote: Resolving deltas: 100% (4/4)\rremote: Resolving deltas: 100% (4/4), completed with 4 local objects.\r\n",,terminal_output +10098,9972718,"TERMINAL",0,0,"To github.com:p-doom/jasmine.git\r\n d53848c..4b964f1 validation-loss -> validation-loss\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10099,9973232,"TERMINAL",0,0,"277",,terminal_output +10100,9974270,"TERMINAL",0,0,"399",,terminal_output +10101,9975309,"TERMINAL",0,0,"53030",,terminal_output +10102,9976492,"TERMINAL",0,0,"611",,terminal_output +10103,9977398,"TERMINAL",0,0,"722",,terminal_output +10104,9978426,"TERMINAL",0,0,"833",,terminal_output +10105,9979461,"TERMINAL",0,0,"944",,terminal_output +10106,9980586,"TERMINAL",0,0,"3055",,terminal_output +10107,9981566,"TERMINAL",0,0,"166",,terminal_output +10108,9982640,"TERMINAL",0,0,"277",,terminal_output +10109,9983665,"TERMINAL",0,0,"388",,terminal_output +10110,9984668,"TERMINAL",0,0,"499",,terminal_output +10111,9985736,"TERMINAL",0,0,"54040",,terminal_output +10112,9986749,"TERMINAL",0,0,"611",,terminal_output +10113,9987815,"TERMINAL",0,0,"722",,terminal_output +10114,9988893,"TERMINAL",0,0,"833",,terminal_output +10115,9989915,"TERMINAL",0,0,"944",,terminal_output +10116,9990965,"TERMINAL",0,0,"4055",,terminal_output +10117,9991978,"TERMINAL",0,0,"166",,terminal_output +10118,9993019,"TERMINAL",0,0,"277",,terminal_output +10119,9994071,"TERMINAL",0,0,"388",,terminal_output +10120,9995094,"TERMINAL",0,0,"499",,terminal_output +10121,9996143,"TERMINAL",0,0,"55050",,terminal_output +10122,9997184,"TERMINAL",0,0,"611",,terminal_output +10123,9998227,"TERMINAL",0,0,"722",,terminal_output +10124,9999028,"train_dynamics.py",0,0,"",python,tab +10125,9999029,"train_dynamics.py",1116,0,"",python,selection_mouse +10126,9999325,"TERMINAL",0,0,"844",,terminal_output +10127,10000357,"TERMINAL",0,0,"5055",,terminal_output +10128,10001346,"TERMINAL",0,0,"166",,terminal_output +10129,10002386,"TERMINAL",0,0,"277",,terminal_output +10130,10003443,"TERMINAL",0,0,"388",,terminal_output +10131,10004482,"TERMINAL",0,0,"499",,terminal_output +10132,10005577,"TERMINAL",0,0,"57:007:00",,terminal_output +10133,10006556,"TERMINAL",0,0,"611",,terminal_output +10134,10007606,"TERMINAL",0,0,"722",,terminal_output +10135,10008745,"TERMINAL",0,0,"833",,terminal_output +10136,10009819,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +10137,10009819,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1601,0,"",shellscript,selection_mouse +10138,10009891,"TERMINAL",0,0,"944",,terminal_output +10139,10010753,"TERMINAL",0,0,"3:0055",,terminal_output +10140,10011117,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +10141,10011855,"TERMINAL",0,0,"166",,terminal_output +10142,10012161,"TERMINAL",0,0,"watch",,terminal_focus +10143,10012840,"TERMINAL",0,0,"277",,terminal_output +10144,10013955,"TERMINAL",0,0,"388",,terminal_output +10145,10014903,"TERMINAL",0,0,"499",,terminal_output +10146,10016074,"TERMINAL",0,0,"51010",,terminal_output +10147,10018009,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:13:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:57:12\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:57:12\t 1 hkn07363469349 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +10148,10019083,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:13:08 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:57:13\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:57:13\t 1 hkn07363469349 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +10149,10019551,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:13:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:57:14\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:57:14\t 1 hkn07363469349 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority) ",,terminal_output +10150,10019797,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:13:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:57:14\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:57:14\t 1 hkn07363469349 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:13:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:57:14\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:57:14\t 1 hkn07363469349 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +10151,10019823,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:13:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:57:14\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:57:14\t 1 hkn07363469349 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +10152,10019873,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:13:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3466286 accelerat train_to tum_cte0 R 18:57:14\t 1 hkn07363466287 accelerat train_la tum_cte0 R 18:57:14\t 1 hkn07363469349 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +10153,10020929,"TERMINAL",0,0,"1055",,terminal_output +10154,10021954,"TERMINAL",0,0,"166",,terminal_output +10155,10022985,"TERMINAL",0,0,"277",,terminal_output +10156,10024029,"TERMINAL",0,0,"388",,terminal_output +10157,10025151,"TERMINAL",0,0,"499",,terminal_output +10158,10026110,"TERMINAL",0,0,"52020",,terminal_output +10159,10027158,"TERMINAL",0,0,"611",,terminal_output +10160,10028310,"TERMINAL",0,0,"722",,terminal_output +10161,10029227,"TERMINAL",0,0,"833",,terminal_output +10162,10030363,"TERMINAL",0,0,"955",,terminal_output +10163,10031407,"TERMINAL",0,0,"2166",,terminal_output +10164,10032395,"TERMINAL",0,0,"277",,terminal_output +10165,10033428,"TERMINAL",0,0,"388",,terminal_output +10166,10034463,"TERMINAL",0,0,"499",,terminal_output +10167,10035576,"TERMINAL",0,0,"53030",,terminal_output +10168,10036414,"TERMINAL",0,0,"bash",,terminal_focus +10169,10036510,"TERMINAL",0,0,"611",,terminal_output +10170,10037578,"TERMINAL",0,0,"722",,terminal_output +10171,10038582,"TERMINAL",0,0,"833",,terminal_output +10172,10039398,"TERMINAL",0,0,"scancel 3469349",,terminal_command +10173,10039421,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10174,10039454,"TERMINAL",0,0,"salloc: Job allocation 3469349 has been revoked.\r\nsalloc: Job has been cancelled\r\nsalloc: error: Job submit/allocate failed: Job/step already completing or completed\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10175,10039622,"TERMINAL",0,0,"\r944",,terminal_output +10176,10040689,"TERMINAL",0,0,"3055",,terminal_output +10177,10041758,"TERMINAL",0,0,"166",,terminal_output +10178,10042788,"TERMINAL",0,0,"277",,terminal_output +10179,10043478,"TERMINAL",0,0,"sync-runner-2",,terminal_command +10180,10043537,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +10181,10043876,"TERMINAL",0,0,"388",,terminal_output +10182,10044832,"TERMINAL",0,0,"499",,terminal_output +10183,10045878,"TERMINAL",0,0,"54040",,terminal_output +10184,10046953,"TERMINAL",0,0,"611",,terminal_output +10185,10047276,"TERMINAL",0,0,"./\r\ndiff.diff\r\ngenerate_dataset.py\r\ngenie.py\r\nsample.py\r\ntrain_dynamics.py\r\ntrain_lam.py\r\ntrain_tokenizer.py\r\n",,terminal_output +10186,10047550,"TERMINAL",0,0,"watch",,terminal_focus +10187,10047975,"TERMINAL",0,0,"722",,terminal_output +10188,10048180,"TERMINAL",0,0,"input_pipeline/preprocess/\r\ninput_pipeline/preprocess/pngs_to_array_records.py\r\ninput_pipeline/preprocess/video_to_array_records.py\r\nmodels/\r\nmodels/dynamics.py\r\nmodels/lam.py\r\nmodels/tokenizer.py\r\nslurm/jobs/mihir/horeka/coinrun/\r\nslurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\nutils/\r\nutils/dataloader.py\r\nutils/lr_utils.py\r\nutils/nn.py\r\nutils/parameter_utils.py\r\n",,terminal_output +10189,10048284,"TERMINAL",0,0,"\r\nsent 289,911 bytes received 588 bytes 64,555.33 bytes/sec\r\ntotal size is 128,526,061 speedup is 442.43\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10190,10048550,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10191,10049249,"TERMINAL",0,0,"bash",,terminal_focus +10192,10050974,"TERMINAL",0,0,"runner-2",,terminal_command +10193,10052331,"TERMINAL",0,0,"bash",,terminal_focus +10194,10060876,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +10195,10063397,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1615,43," --name=coinrun-dyn-dev-$slurm_job_id \\n",shellscript,selection_mouse +10196,10065440,"TERMINAL",0,0,"bash",,terminal_focus +10197,10070044,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",,terminal_command +10198,10070052,"TERMINAL",0,0,"]633;CSubmitted batch job 3469360\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +10199,10071610,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1614,0,"",shellscript,selection_mouse +10200,10071641,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1613,0,"",shellscript,selection_command +10201,10078709,"TERMINAL",0,0,"bash",,terminal_focus +10202,10080807,"TERMINAL",0,0,"git diff",,terminal_command +10203,10080905,"TERMINAL",0,0,"]633;C[?1h=\r\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10204,10087079,"TERMINAL",0,0,"git branch",,terminal_command +10205,10087140,"TERMINAL",0,0,"]633;C[?1h=\r add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n revised-dataloader\r\n:",,terminal_output +10206,10088357,"TERMINAL",0,0,"\r runner\r\n:",,terminal_output +10207,10088823,"TERMINAL",0,0,"\rM add-wandb-name-and-tags\r\n\r:",,terminal_output +10208,10088921,"TERMINAL",0,0,"\r\r:",,terminal_output +10209,10089105,"TERMINAL",0,0,"\r\r:",,terminal_output +10210,10092905,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10211,10095999,"TERMINAL",0,0,"git checkout input_pipeline/add-npy2array_record",,terminal_command +10212,10096032,"",0,0,"Switched from branch 'validation-loss' to 'input_pipeline/add-npy2array_record'",,git_branch_checkout +10213,10096106,"TERMINAL",0,0,"]633;CSwitched to branch 'input_pipeline/add-npy2array_record'\r\nYour branch is up to date with 'origin/input_pipeline/add-npy2array_record'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10214,10110301,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +10215,10110332,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output +10216,10111167,"TERMINAL",0,0,"ls",,terminal_command +10217,10111229,"TERMINAL",0,0,"]633;C",,terminal_output +10218,10111256,"TERMINAL",0,0,"checkpoints count_items.sh data data_atari data_coinrun data_minecraft data_new huggingface logs possibly_corrupt_files_in_this_workspace.txt scripts\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output +10219,10118801,"TERMINAL",0,0,"ls -l",,terminal_command +10220,10118890,"TERMINAL",0,0,"]633;Ctotal 541\r\ndrwxrwx---+ 125 tum_ind3695 hk-project-pai00039 16384 Aug 11 00:17 checkpoints\r\n-rw-rw----+ 1 tum_ind3695 hk-project-pai00039 613 Jul 2 11:25 count_items.sh\r\ndrwxrwx---+ 29 tum_ind3695 hk-project-pai00039 4096 Jul 3 11:23 data\r\ndrwxrwx---+ 6 tum_cte0515 hk-project-p0023960 4096 Aug 12 15:13 data_atari\r\ndrwxrwx---+ 8 tum_cte0515 hk-project-p0023960 4096 Sep 5 10:57 data_coinrun\r\nlrwxrwxrwx 1 tum_cte0515 hk-project-p0023960 8 Aug 11 11:13 data_minecraft -> data_new\r\ndrwxrwx---+ 18 tum_ind3695 hk-project-pai00039 4096 Aug 11 11:13 data_new\r\ndrwxrwx---+ 4 tum_ind3695 hk-project-pai00039 4096 Aug 7 00:43 huggingface\r\ndrwxrwx---+ 6 tum_ind3695 hk-project-pai00039 4096 Jul 15 11:46 logs\r\n-rw-------+ 1 tum_ind3695 root 483818 Jul 21 15:07 possibly_corrupt_files_in_this_workspace.txt\r\ndrwxrwx---+ 2 tum_ind3695 hk-project-pai00039 4096 Jul 23 18:24 scripts\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output +10221,10122733,"TERMINAL",0,0,"cd data_new/",,terminal_command +10222,10122765,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new",,terminal_output +10223,10123023,"TERMINAL",0,0,"ls",,terminal_command +10224,10123085,"TERMINAL",0,0,"]633;C",,terminal_output +10225,10123244,"TERMINAL",0,0,"open_ai_minecraft open_ai_minecraft_arrayrecords_chunked_compressed open_ai_minecraft_arrayrecords_w_actions open_ai_minecraft_arrayrecords_w_actions_login\r\nopen_ai_minecraft_actions open_ai_minecraft_arrayrecords_chunked_subset open_ai_minecraft_arrayrecords_w_actions_bak open_ai_minecraft_npy\r\nopen_ai_minecraft_arrayrecords open_ai_minecraft_arrayrecords_chunked_train_val_split open_ai_minecraft_arrayrecords_w_actions_compressed open_ai_minecraft_tfrecord_uncurrupted\r\nopen_ai_minecraft_arrayrecords_chunked open_ai_minecraft_arrayrecords_chunked_uncompressed open_ai_minecraft_arrayrecords_w_actions_dev open_ai_minecraft_tfrecord_uncurrupted-2\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new",,terminal_output +10226,10177511,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",0,0,"# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/causal/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3416521\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --init_lr=0 \\n --darkness_threshold=50 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-causal-test-$slurm_job_id \\n --tags dynamics causal \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir",shellscript,tab +10227,10179218,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",179,0,"",shellscript,selection_mouse +10228,10179352,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",178,14,"jafa_ws_shared",shellscript,selection_mouse +10229,10179516,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",119,122,"array_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n",shellscript,selection_mouse +10230,10183010,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",137,0,"",shellscript,selection_mouse +10231,10183142,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,2,"=/",shellscript,selection_mouse +10232,10183226,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,2,"=/",shellscript,selection_mouse +10233,10183264,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,6,"=/hkfs",shellscript,selection_mouse +10234,10183265,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,7,"=/hkfs/",shellscript,selection_mouse +10235,10183277,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,11,"=/hkfs/work",shellscript,selection_mouse +10236,10183347,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,21,"=/hkfs/work/workspace",shellscript,selection_mouse +10237,10183350,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,29,"=/hkfs/work/workspace/scratch",shellscript,selection_mouse +10238,10183350,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,41,"=/hkfs/work/workspace/scratch/tum_ind3695",shellscript,selection_mouse +10239,10183398,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,56,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",shellscript,selection_mouse +10240,10183488,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,65,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new",shellscript,selection_mouse +10241,10183544,"slurm/jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh",136,104,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked",shellscript,selection_mouse +10242,10224473,"TERMINAL",0,0,"cd open_ai_minecraft_arrayrecords",,terminal_command +10243,10224499,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords",,terminal_output +10244,10229631,"TERMINAL",0,0,"ls -l | wc -l",,terminal_command +10245,10229673,"TERMINAL",0,0,"]633;C2\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords",,terminal_output +10246,10231443,"TERMINAL",0,0,"ls",,terminal_command +10247,10231549,"TERMINAL",0,0,"]633;C10fps_160x90\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords",,terminal_output +10248,10233034,"TERMINAL",0,0,"cd 10fps_160x90/",,terminal_command +10249,10233081,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_160x90",,terminal_output +10250,10235377,"TERMINAL",0,0,"ls -l | wc -l",,terminal_command +10251,10235442,"TERMINAL",0,0,"]633;C",,terminal_output +10252,10246573,"TERMINAL",0,0,"57129\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_160x90",,terminal_output +10253,10259570,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=4\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=36:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_4_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_160x90\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=192 \\n --min_lr=0 \\n --max_lr=2.00e-4 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=tokenizer-batch-size-scaling-4-node-sqrt-lr-$slurm_job_id \\n --tags tokenizer batch-size-scaling 4-node sqrt-lr \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir\n",shellscript,tab +10254,10262743,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,0,"",shellscript,selection_mouse +10255,10262917,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,1,"/",shellscript,selection_mouse +10256,10262918,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,3,"/hk",shellscript,selection_mouse +10257,10262918,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,5,"/hkfs",shellscript,selection_mouse +10258,10262951,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,8,"/hkfs/wo",shellscript,selection_mouse +10259,10263019,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,11,"/hkfs/work/",shellscript,selection_mouse +10260,10263019,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,13,"/hkfs/work/wo",shellscript,selection_mouse +10261,10263020,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,15,"/hkfs/work/work",shellscript,selection_mouse +10262,10263020,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,19,"/hkfs/work/workspac",shellscript,selection_mouse +10263,10263193,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,22,"/hkfs/work/workspace/s",shellscript,selection_mouse +10264,10263194,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,26,"/hkfs/work/workspace/scrat",shellscript,selection_mouse +10265,10263194,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,31,"/hkfs/work/workspace/scratch/tu",shellscript,selection_mouse +10266,10263195,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",547,19,"\narray_records_dir=",shellscript,selection_mouse +10267,10263195,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,51,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_sh",shellscript,selection_mouse +10268,10263195,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,67,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/op",shellscript,selection_mouse +10269,10263206,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,109,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_160x90\n",shellscript,selection_mouse +10270,10263474,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,103,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_1",shellscript,selection_mouse +10271,10263501,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,104,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_16",shellscript,selection_mouse +10272,10263527,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,105,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_160",shellscript,selection_mouse +10273,10263574,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,106,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_160x",shellscript,selection_mouse +10274,10263641,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,107,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_160x9",shellscript,selection_mouse +10275,10263674,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",566,108,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_160x90",shellscript,selection_mouse +10276,10274257,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",727,0,"",shellscript,selection_mouse +10277,10274308,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch",726,0,"",shellscript,selection_command +10278,10299010,"TERMINAL",0,0,"dev",,terminal_command +10279,10299059,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10280,10320998,"TERMINAL",0,0,"idling",,terminal_command +10281,10321070,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 16:18:10 2025Partition dev_cpuonly:\t 4 nodes idle\rPartition cpuonly: 42 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated: 32 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 5 nodes idle\rPartition accelerated-h200:\t 5 nodes idle",,terminal_output +10282,10322093,"TERMINAL",0,0,"1",,terminal_output +10283,10322410,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10284,10354037,"TERMINAL",0,0,"idling",,terminal_command +10285,10354151,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 16:18:43 2025Partition dev_cpuonly:\t 9 nodes idle\rPartition cpuonly: 43 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated: 32 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 5 nodes idle\rPartition accelerated-h200:\t 5 nodes idle",,terminal_output +10286,10355141,"TERMINAL",0,0,"4",,terminal_output +10287,10356177,"TERMINAL",0,0,"5",,terminal_output +10288,10357222,"TERMINAL",0,0,"6",,terminal_output +10289,10358253,"TERMINAL",0,0,"7",,terminal_output +10290,10359294,"TERMINAL",0,0,"8",,terminal_output +10291,10360402,"TERMINAL",0,0,"50",,terminal_output +10292,10361375,"TERMINAL",0,0,"1",,terminal_output +10293,10362466,"TERMINAL",0,0,"2",,terminal_output +10294,10363328,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10295,10404354,"TERMINAL",0,0,"runner",,terminal_command +10296,10404403,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +10297,10408617,"TERMINAL",0,0,"queue",,terminal_command +10298,10408693,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:19:38 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469360 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 19:03:43\t 1 hkn07363466287 accelerat train_la tum_cte0 R 19:03:43\t 1 hkn0736",,terminal_output +10299,10409422,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +10300,10412009,"TERMINAL",0,0,"queue",,terminal_command +10301,10412113,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:19:41 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469360 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 19:03:46\t 1 hkn07363466287 accelerat train_la tum_cte0 R 19:03:46\t 1 hkn0736",,terminal_output +10302,10412758,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +10303,10418300,"TERMINAL",0,0,"horeka/jafar_og_reproduction/generate_dataset_10m_gt_actions_train_val_test.sbatch",,terminal_command +10304,10418331,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +10305,10427631,"slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-400M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_400M\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\n# slurm_job_id=$SLURM_JOB_ID\nslurm_job_id=3454954\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $slurm_job_id \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --darkness_threshold=50 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=lam-minecraft-8-node-darkness-filter-400M-$slurm_job_id \\n --tags lam minecraft 8-node darkness-filter 400M \\n --entity instant-uv \\n --project jafar \\n --num_latents=100 \\n --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +10306,10436827,"TERMINAL",0,0,"cat slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-400M.sbatch",,terminal_command +10307,10436903,"TERMINAL",0,0,"]633;C#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=8\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:4\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_400M\r\n#SBATCH --reservation=llmtum\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# --- signal trap to requeue job before timeout ---\r\nrequeue_job() {\r\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\r\n scontrol requeue $SLURM_JOB_ID\r\n exit 0\r\n}\r\n\r\ntrap requeue_job sigusr1\r\n\r\n# set checkpoint flag based on restart count\r\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\r\n\r\nif [ $restart_count -eq 0 ]; then\r\n restore_ckpt_flag=""--no-restore-ckpt""\r\nelse\r\n restore_ckpt_flag=""--restore-ckpt""\r\nfi\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\n# slurm_job_id=$SLURM_JOB_ID\r\nslurm_job_id=3454954\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/lam/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_lam.py \\r\n --save_ckpt \\r\n --restore_ckpt \\r\n --wandb_id $slurm_job_id \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=160 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --darkness_threshold=50 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=1000 \\r\n --name=lam-minecraft-8-node-darkness-filter-400M-$slurm_job_id \\r\n --tags lam minecraft 8-node darkness-filter 400M \\r\n --entity instant-uv \\r\n --project jafar \\r\n --num_latents=100 \\r\n --model_dim=1024 \\r\n --num_blocks=12 \\r\n --num_heads=16 \\r\n --latent_dim=64 \\r\n --ffn_dim=4096 \\r\n --data_dir $array_records_dir &\r\n\r\nchild_pid=$!\r\n\r\nwait $child_pid\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +10308,10449112,"slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-400M.sbatch",0,0,"",shellscript,tab +10309,10453911,"slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-400M.sbatch",487,0,"",shellscript,selection_mouse +10310,10453913,"slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node-darkness-filter-400M.sbatch",486,0,"",shellscript,selection_command +10311,10468437,"TERMINAL",0,0,"",,terminal_command +10312,10468493,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +10313,10564687,"TERMINAL",0,0,"git diff main",,terminal_command +10314,10564751,"TERMINAL",0,0,"]633;Cwarning: Not a git repository. Use --no-index to compare two paths outside a working tree\r\nusage: git diff --no-index [] \r\n\r\nDiff output format options\r\n -p, --patch generate patch\r\n -s, --no-patch suppress diff output\r\n -u generate patch\r\n -U, --unified[=] generate diffs with lines context\r\n -W, --[no-]function-context\r\n generate diffs with lines context\r\n --raw generate the diff in raw format\r\n --patch-with-raw synonym for '-p --raw'\r\n --patch-with-stat synonym for '-p --stat'\r\n --numstat machine friendly --stat\r\n --shortstat output only the last line of --stat\r\n -X, --dirstat[=...]\r\n output the distribution of relative amount of changes for each sub-directory\r\n --cumulative synonym for --dirstat=cumulative\r\n --dirstat-by-file[=...]\r\n synonym for --dirstat=files,param1,param2...\r\n --check warn if changes introduce conflict markers or whitespace errors\r\n --summary condensed summary such as creations, renames and mode changes\r\n --name-only show only names of changed files\r\n --name-status show only names and status of changed files\r\n --stat[=[,[,]]]\r\n generate diffstat\r\n --stat-width generate diffstat with a given width\r\n --stat-name-width \r\n generate diffstat with a given name width\r\n --stat-graph-width \r\n generate diffstat with a given graph width\r\n --stat-count generate diffstat with limited lines\r\n --[no-]compact-summary\r\n generate compact summary in diffstat\r\n --binary output a binary diff that can be applied\r\n --[no-]full-index show full pre- and post-image object names on the ""index"" lines\r\n --[no-]color[=] show colored diff\r\n --ws-error-highlight \r\n highlight whitespace errors in the 'context', 'old' or 'new' lines in the diff\r\n -z do not munge pathnames and use NULs as output field terminators in --raw or --numstat\r\n --[no-]abbrev[=] use digits to display object names\r\n --src-prefix show the given source prefix instead of ""a/""\r\n --dst-prefix show the given destination prefix instead of ""b/""\r\n --line-prefix \r\n prepend an additional prefix to every line of output\r\n --no-prefix do not show any source or destination prefix\r\n --default-prefix use default prefixes a/ and b/\r\n --inter-hunk-context \r\n show context between diff hunks up to the specified number of lines\r\n --output-indicator-new \r\n specify the character to indicate a new line instead of '+'\r\n --output-indicator-old \r\n specify the character to indicate an old line instead of '-'\r\n --output-indicator-context \r\n specify the character to indicate a context instead of ' '\r\n\r\nDiff rename options\r\n -B, --break-rewrites[=[/]]\r\n break complete rewrite changes into pairs of delete and create\r\n -M, --find-renames[=]\r\n detect renames\r\n -D, --irreversible-delete\r\n omit the preimage for deletes\r\n -C, --find-copies[=]\r\n detect copies\r\n --[no-]find-copies-harder\r\n use unmodified files as source to find copies\r\n --no-renames disable rename detection\r\n --[no-]rename-empty use empty blobs as rename source\r\n --[no-]follow continue listing the history of a file beyond renames\r\n -l prevent rename/copy detection if the number of rename/copy targets exceeds given limit\r\n\r\nDiff algorithm options\r\n --minimal produce the smallest possible diff\r\n -w, --ignore-all-space\r\n ignore whitespace when comparing lines\r\n -b, --ignore-space-change\r\n ignore changes in amount of whitespace\r\n --ignore-space-at-eol ignore changes in whitespace at EOL\r\n --ignore-cr-at-eol ignore carrier-return at the end of line\r\n --ignore-blank-lines ignore changes whose lines are all blank\r\n -I, --[no-]ignore-matching-lines \r\n ignore changes whose all lines match \r\n --[no-]indent-heuristic\r\n heuristic to shift diff hunk boundaries for easy reading\r\n --patience generate diff using the ""patience diff"" algorithm\r\n --histogram generate diff using the ""histogram diff"" algorithm\r\n --diff-algorithm \r\n choose a diff algorithm\r\n --anchored generate diff using the ""anchored diff"" algorithm\r\n --word-diff[=] show word diff, using to delimit changed words\r\n --word-diff-regex \r\n use to decide what a word is\r\n --color-words[=]\r\n equivalent to --word-diff=color --word-diff-regex=\r\n --[no-]color-moved[=]\r\n moved lines of code are colored differently\r\n --[no-]color-moved-ws \r\n how white spaces are ignored in --color-moved\r\n\r\nOther diff options\r\n --[no-]relative[=]\r\n when run from subdir, exclude changes outside and show relative paths\r\n -a, --[no-]text treat all files as text\r\n -R swap two inputs, reverse the diff\r\n --[no-]exit-code exit with 1 if there were differences, 0 otherwise\r\n --[no-]quiet disable all output of the program\r\n --[no-]ext-diff allow an external diff helper to be executed\r\n --[no-]textconv run external text conversion filters when comparing binary files\r\n --ignore-submodules[=]\r\n ignore changes to submodules in the diff generation\r\n --submodule[=]\r\n specify how differences in submodules are shown\r\n --ita-invisible-in-index\r\n hide 'git add -N' entries from the index\r\n --ita-visible-in-index\r\n treat 'git add -N' entries as real in the index\r\n -S look for differences that change the number of occurrences of the specified string\r\n -G look for differences that change the number of occurrences of the specified regex\r\n --pickaxe-all show all changes in the changeset with -S or -G\r\n --pickaxe-regex treat in -S as extended POSIX regular expression\r\n -O control the order in which files appear in the output\r\n --rotate-to show the change in the specified path first\r\n --skip-to skip the output to the specified path\r\n --find-object \r\n look for differences that change the number of occurrences of the specified object\r\n --diff-filter [(A|C|D|M|R|T|U|X|B)...[*]]\r\n select files by diff type\r\n --output output to a specific file\r\n\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +10315,10572177,"TERMINAL",0,0,"^C",,terminal_command +10316,10572187,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +10317,10574109,"TERMINAL",0,0,"dev",,terminal_command +10318,10574148,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10319,10575338,"TERMINAL",0,0,"git diff main",,terminal_command +10320,10575394,"TERMINAL",0,0,"]633;C[?1h=\rdiff --git a/generate_dataset.py b/generate_dataset.py\r\nindex a67c424..eea0b69 100644\r\n--- a/generate_dataset.py\r\n+++ b/generate_dataset.py\r\n@@ -10,6 +10,10 @@ from gym3 import types_np\r\n import numpy as np\r\n from procgen import ProcgenGym3Env\r\n import tyro\r\n+import pickle\r\n+import json\r\n+from array_record.python.array_record_module import ArrayRecordWriter \r\n+\r\n \r\n \r\n @dataclass\r\n@@ -25,31 +29,45 @@ output_dir.mkdir(parents=True, exist_ok=True)\r\n \r\n # --- Generate episodes ---\r\n i = 0\r\n-metadata = []\r\n+episode_metadata = []\r\n while i < args.num_episodes:\r\n seed = np.random.randint(0, 10000)\r\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=seed)\r\n- dataseq = []\r\n+ observations_seq = []\r\n \r\n # --- Run episode ---\r\n for j in range(1000):\r\n env.act(types_np.sample(env.ac_space, bshape=(env.num,)))\r\n rew, obs, first = env.observe()\r\n- dataseq.append(obs[""rgb""])\r\n:",,terminal_output +10321,10576940,"TERMINAL",0,0,"\r+ observations_seq.append(obs[""rgb""])\r\n:",,terminal_output +10322,10579051,"TERMINAL",0,0,"\r if first:\r\n:",,terminal_output +10323,10580014,"TERMINAL",0,0,"\r break\r\n:",,terminal_output +10324,10580916,"TERMINAL",0,0,"\r \r\n:\r # --- Save episode ---\r\n:\r- if len(dataseq) >= args.min_episode_length:\r\n:\r- episode_data = np.concatenate(dataseq, axis=0)\r\n:\r- episode_path = output_dir / f""episode_{i}.npy""\r\n:\r- np.save(episode_path, episode_data.astype(np.uint8))\r\n:\r- metadata.append({""path"": str(episode_path), ""length"": len(dataseq)})\r\n:\r- print(f""Episode {i} completed, length: {len(dataseq)}"")\r\n:\r+ if len(observations_seq) >= args.min_episode_length:\r\n:\r+ observations_data = np.concatenate(observations_seq, axis=0).astype(np.uint8)\r\n:\r+ episode_path = output_dir / f""episode_{i}.array_record"" \r\n:\r+\r\n:\r+ # --- Save as ArrayRecord ---\r\n:\r+ writer = ArrayRecordWriter(str(episode_path), ""group_size:1"")\r\n:\r+ record = {""raw_video"": observations_data.tobytes(), ""sequence_length"": len(observations_seq)}\r\n:",,terminal_output +10325,10581266,"TERMINAL",0,0,"\r+ writer.write(pickle.dumps(record))\r\n:\r+ writer.close()\r\n:\r+\r\n:\r+ episode_metadata.append({""path"": str(episode_path), ""length"": len(observations_seq)})\r\n:\r+ print(f""Episode {i} completed, length: {len(observations_seq)}"")\r\n:\r i += 1\r\n:\r else:\r\n:\r- print(f""Episode too short ({len(dataseq)}), resampling..."")\r\n:\r+ print(f""Episode too short ({len(observations_seq)}), resampling..."")\r\n:\r \r\n:",,terminal_output +10326,10581428,"TERMINAL",0,0,"\r # --- Save metadata ---\r\n:\r-np.save(output_dir / ""metadata.npy"", metadata)\r\n:\r-print(f""Dataset generated with {len(metadata)} valid episodes"")\r\n:\r+metadata = {\r\n:\r+ ""env"": ""coinrun"",\r\n:\r+ ""num_episodes"": args.num_episodes,\r\n:",,terminal_output +10327,10581760,"TERMINAL",0,0,"\r+ ""avg_episode_len"": np.mean([ep[""length""] for ep in episode_metadata]),\r\n:",,terminal_output +10328,10582225,"TERMINAL",0,0,"\r+ ""episode_metadata"": episode_metadata,\r\n:",,terminal_output +10329,10582630,"TERMINAL",0,0,"\r+}\r\n:\r+with open(output_dir / ""metadata.json"", ""w"") as f:\r\n:\r+ json.dump(metadata, f)\r\n:\r+\r\n:\r+print(f""Dataset generated with {len(episode_metadata)} valid episodes"")\r\n:\rdiff --git a/genie.py b/genie.py\r\n:\rindex f6aaf28..2a7b868 100644\r\n:\r--- a/genie.py\r\n:\r+++ b/genie.py\r\n:\r@@ -224,17 +224,13 @@ class Genie(nnx.Module):\r\n:\r vid_embed_BSNM = self.dynamics.patch_embed(token_idxs_BSN)\r\n:\r mask_token_111M = self.dynamics.mask_token.value\r\n:\r mask_expanded_BSN1 = mask_BSN[..., None]\r\n:",,terminal_output +10330,10583037,"TERMINAL",0,0,"\r- vid_embed_BSNM = jnp.where(\r\n:",,terminal_output +10331,10583577,"TERMINAL",0,0,"\r- mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\r\n:\r- )\r\n:\r+ vid_embed_BSNM = jnp.where(mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM)\r\n:",,terminal_output +10332,10584085,"TERMINAL",0,0,"\r \r\n:\r # --- Predict transition ---\r\n:\r action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\r\n:\r act_embed_BSm1M = self.dynamics.action_up(action_tokens_BSm1L)\r\n:\r act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\r\n:\r- act_embed_BS1M = jnp.reshape(\r\n:\r- act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\r\n:\r- )\r\n:\r+ act_embed_BS1M = jnp.reshape(act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1]))\r\n:\r vid_embed_BSNM += act_embed_BS1M\r\n:\r unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\r\n:\r step_temp = temperature * (1.0 - unmasked_ratio)\r\n:\r@@ -256,13 +252,8 @@ class Genie(nnx.Module):\r\n:\r \r\n:\r # --- Update mask ---\r\n:\r num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\r\n:",,terminal_output +10333,10593575,"TERMINAL",0,0,"\r- final_token_probs_flat_BP = einops.rearrange(\r\n:",,terminal_output +10334,10594036,"TERMINAL",0,0,"\r- final_token_probs_BSN, ""b s n -> b (s n)""\r\n:",,terminal_output +10335,10594792,"TERMINAL",0,0,"\r- )\r\n:\r- idx_mask_P = (\r\n:\r- jnp.arange(final_token_probs_flat_BP.shape[-1])\r\n:\r- <= N - num_unmasked_tokens\r\n:\r- )\r\n:\r+ final_token_probs_flat_BP = einops.rearrange(final_token_probs_BSN, ""b s n -> b (s n)"")\r\n:\r+ idx_mask_P = jnp.arange(final_token_probs_flat_BP.shape[-1]) <= N - num_unmasked_tokens\r\n:\r sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\r\n:\r mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\r\n:\r mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\r\n:\r@@ -316,12 +307,12 @@ class Genie(nnx.Module):\r\n:\r return final_frames_BSHWC\r\n:\r \r\n:\r def sample_causal(\r\n:\r- self,\r\n:\r- batch: Dict[str, jax.Array],\r\n:\r- seq_len: int,\r\n:\r- temperature: float = 1,\r\n:\r- sample_argmax: bool = False,\r\n:\r- ) -> jax.Array:\r\n:\r+ self,\r\n:\r+ batch: Dict[str, jax.Array],\r\n:\r+ seq_len: int,\r\n:\r+ temperature: float = 1,\r\n:",,terminal_output +10336,10600327,"TERMINAL",0,0,"\r+ sample_argmax: bool = False,\r\n:",,terminal_output +10337,10600860,"TERMINAL",0,0,"\r+ ) -> jax.Array:\r\n:\r """"""\r\n:",,terminal_output +10338,10601051,"TERMINAL",0,0,"\r Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\r\n:\r \r\n:\r@@ -373,14 +364,9 @@ class Genie(nnx.Module):\r\n:",,terminal_output +10339,10601052,"TERMINAL",0,0,"\r action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\r\n:\r act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\r\n:\r act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\r\n:",,terminal_output +10340,10601256,"TERMINAL",0,0,"\r- act_embed_BS1M = jnp.reshape(\r\n:\r- act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\r\n:\r- )\r\n:\r+ act_embed_BS1M = jnp.reshape(act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1]))\r\n:\r vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\r\n:\r- final_logits_BTNp1V = (\r\n:",,terminal_output +10341,10604728,"TERMINAL",0,0,"\r- dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\r\n:",,terminal_output +10342,10605613,"TERMINAL",0,0,"\r- / temperature\r\n:\r- )\r\n:\r+ final_logits_BTNp1V = dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n)) / temperature\r\n:\r final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\r\n:\r \r\n:\r # --- Sample new tokens for final frame ---\r\n:\r@@ -390,9 +376,7 @@ class Genie(nnx.Module):\r\n:\r rng, _rng = jax.random.split(rng)\r\n:\r sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\r\n:\r # Update next tokens only\r\n:\r- token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\r\n:\r- sampled_token_idxs_B\r\n:\r- )\r\n:\r+ token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(sampled_token_idxs_B)\r\n:",,terminal_output +10343,10607717,"TERMINAL",0,0,"\r \r\n:",,terminal_output +10344,10607945,"TERMINAL",0,0,"\r new_carry = (rng, token_idxs_BSN, action_tokens_EL, step_t)\r\n:",,terminal_output +10345,10608089,"TERMINAL",0,0,"\r return new_carry, None\r\n:",,terminal_output +10346,10608237,"TERMINAL",0,0,"\r@@ -440,7 +424,6 @@ class Genie(nnx.Module):\r\n:",,terminal_output +10347,10608469,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +10348,10680728,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes_filter_dark.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --darkness_threshold=50 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-darkness-filter-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main darkness-filter \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +10349,10682602,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +10350,10710183,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",2100,0,"",shellscript,selection_mouse +10351,10768521,"TERMINAL",0,0,"bash",,terminal_focus +10352,10770967,"TERMINAL",0,0,"idling",,terminal_command +10353,10771037,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 16:25:40 2025Partition dev_cpuonly:\t 9 nodes idle\rPartition cpuonly: 16 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 32 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 5 nodes idle\rPartition accelerated-h200:\t 5 nodes idle",,terminal_output +10354,10772156,"TERMINAL",0,0,"1",,terminal_output +10355,10773107,"TERMINAL",0,0,"2",,terminal_output +10356,10774203,"TERMINAL",0,0,"3",,terminal_output +10357,10775182,"TERMINAL",0,0,"4",,terminal_output +10358,10776245,"TERMINAL",0,0,"5",,terminal_output +10359,10777367,"TERMINAL",0,0,"6",,terminal_output +10360,10778293,"TERMINAL",0,0,"7",,terminal_output +10361,10779374,"TERMINAL",0,0,"9",,terminal_output +10362,10780450,"TERMINAL",0,0,"50",,terminal_output +10363,10781414,"TERMINAL",0,0,"1",,terminal_output +10364,10782460,"TERMINAL",0,0,"2",,terminal_output +10365,10783496,"TERMINAL",0,0,"3",,terminal_output +10366,10784531,"TERMINAL",0,0,"4",,terminal_output +10367,10785570,"TERMINAL",0,0,"5",,terminal_output +10368,10786649,"TERMINAL",0,0,"6",,terminal_output +10369,10787658,"TERMINAL",0,0,"7",,terminal_output +10370,10788759,"TERMINAL",0,0,"8",,terminal_output +10371,10789794,"TERMINAL",0,0,"9",,terminal_output +10372,10790817,"TERMINAL",0,0,"6:00",,terminal_output +10373,10791852,"TERMINAL",0,0,"1",,terminal_output +10374,10792836,"TERMINAL",0,0,"2",,terminal_output +10375,10793869,"TERMINAL",0,0,"3",,terminal_output +10376,10794973,"TERMINAL",0,0,"4",,terminal_output +10377,10796024,"TERMINAL",0,0,"5",,terminal_output +10378,10797176,"TERMINAL",0,0,"6",,terminal_output +10379,10798037,"TERMINAL",0,0,"7",,terminal_output +10380,10799196,"TERMINAL",0,0,"8",,terminal_output +10381,10800160,"TERMINAL",0,0,"9",,terminal_output +10382,10801232,"TERMINAL",0,0,"10",,terminal_output +10383,10802186,"TERMINAL",0,0,"1",,terminal_output +10384,10803280,"TERMINAL",0,0,"2",,terminal_output +10385,10804317,"TERMINAL",0,0,"3",,terminal_output +10386,10805376,"TERMINAL",0,0,"4",,terminal_output +10387,10806357,"TERMINAL",0,0,"6",,terminal_output +10388,10807395,"TERMINAL",0,0,"7",,terminal_output +10389,10808429,"TERMINAL",0,0,"8",,terminal_output +10390,10809458,"TERMINAL",0,0,"9",,terminal_output +10391,10810504,"TERMINAL",0,0,"20",,terminal_output +10392,10811547,"TERMINAL",0,0,"1",,terminal_output +10393,10812584,"TERMINAL",0,0,"2",,terminal_output +10394,10813724,"TERMINAL",0,0,"3",,terminal_output +10395,10814750,"TERMINAL",0,0,"4",,terminal_output +10396,10815740,"TERMINAL",0,0,"5",,terminal_output +10397,10816811,"TERMINAL",0,0,"6",,terminal_output +10398,10817784,"TERMINAL",0,0,"7",,terminal_output +10399,10818854,"TERMINAL",0,0,"8",,terminal_output +10400,10819889,"TERMINAL",0,0,"9",,terminal_output +10401,10820915,"TERMINAL",0,0,"30",,terminal_output +10402,10822089,"TERMINAL",0,0,"1",,terminal_output +10403,10822981,"TERMINAL",0,0,"2",,terminal_output +10404,10824271,"TERMINAL",0,0,"37",,terminal_output +10405,10825234,"TERMINAL",0,0,"4",,terminal_output +10406,10826263,"TERMINAL",0,0,"5",,terminal_output +10407,10827343,"TERMINAL",0,0,"6",,terminal_output +10408,10828368,"TERMINAL",0,0,"8",,terminal_output +10409,10829404,"TERMINAL",0,0,"9",,terminal_output +10410,10830409,"TERMINAL",0,0,"40",,terminal_output +10411,10831486,"TERMINAL",0,0,"1",,terminal_output +10412,10832499,"TERMINAL",0,0,"2",,terminal_output +10413,10833567,"TERMINAL",0,0,"3",,terminal_output +10414,10834565,"TERMINAL",0,0,"4",,terminal_output +10415,10835638,"TERMINAL",0,0,"5",,terminal_output +10416,10836766,"TERMINAL",0,0,"6",,terminal_output +10417,10837736,"TERMINAL",0,0,"7",,terminal_output +10418,10838539,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",plaintext,tab +10419,10838771,"TERMINAL",0,0,"8",,terminal_output +10420,10839145,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +10421,10839777,"TERMINAL",0,0,"9",,terminal_output +10422,10840805,"TERMINAL",0,0,"50",,terminal_output +10423,10841826,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",2452,0,"",shellscript,selection_mouse +10424,10841918,"TERMINAL",0,0,"1",,terminal_output +10425,10842242,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",2436,16,"wait $child_pid\n",shellscript,selection_mouse +10426,10842243,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",2284,168," --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +10427,10842244,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1981,471," --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +10428,10842244,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1725,727,"\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +10429,10842244,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",1171,1281,"\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +10430,10842245,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",875,1577,"# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +10431,10842294,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",2436,16,"wait $child_pid\n",shellscript,selection_command +10432,10842295,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",297,2155,"#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +10433,10842335,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,2452,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,selection_mouse +10434,10842981,"TERMINAL",0,0,"2",,terminal_output +10435,10844006,"TERMINAL",0,0,"3",,terminal_output +10436,10844958,"TERMINAL",0,0,"4",,terminal_output +10437,10845319,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",plaintext,tab +10438,10846003,"TERMINAL",0,0,"5",,terminal_output +10439,10846346,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",plaintext,content +10440,10847169,"TERMINAL",0,0,"6",,terminal_output +10441,10848077,"TERMINAL",0,0,"7",,terminal_output +10442,10848594,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +10443,10849045,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",plaintext,tab +10444,10849232,"TERMINAL",0,0,"8",,terminal_output +10445,10849846,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +10446,10850252,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",plaintext,tab +10447,10850472,"TERMINAL",0,0,"9",,terminal_output +10448,10850962,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +10449,10851231,"TERMINAL",0,0,"7:00",,terminal_output +10450,10851378,"TERMINAL",0,0,"bash",,terminal_focus +10451,10851850,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +10452,10852262,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",plaintext,tab +10453,10852393,"TERMINAL",0,0,"1",,terminal_output +10454,10853283,"TERMINAL",0,0,"2",,terminal_output +10455,10854342,"TERMINAL",0,0,"4",,terminal_output +10456,10855388,"TERMINAL",0,0,"5",,terminal_output +10457,10856415,"TERMINAL",0,0,"6",,terminal_output +10458,10857046,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",66,0,"",plaintext,selection_mouse +10459,10857456,"TERMINAL",0,0,"7",,terminal_output +10460,10857919,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",38,0,"",plaintext,selection_mouse +10461,10858452,"TERMINAL",0,0,"8",,terminal_output +10462,10858753,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",37,1,"",plaintext,content +10463,10858796,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",37,0,"2",plaintext,content +10464,10858797,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",38,0,"",plaintext,selection_keyboard +10465,10859488,"TERMINAL",0,0,"9",,terminal_output +10466,10859746,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",37,0,"",plaintext,selection_command +10467,10859956,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",55,0,"",plaintext,selection_command +10468,10860130,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",83,0,"",plaintext,selection_command +10469,10860263,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",107,0,"",plaintext,selection_command +10470,10860416,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",139,0,"",plaintext,selection_command +10471,10860564,"TERMINAL",0,0,"10",,terminal_output +10472,10860612,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",107,0,"",plaintext,selection_command +10473,10860767,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",83,0,"",plaintext,selection_command +10474,10861398,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",82,0,"",plaintext,selection_command +10475,10861600,"TERMINAL",0,0,"1",,terminal_output +10476,10862593,"TERMINAL",0,0,"2",,terminal_output +10477,10863537,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",82,2,"",plaintext,content +10478,10863639,"TERMINAL",0,0,"3",,terminal_output +10479,10864447,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",82,0,"0",plaintext,content +10480,10864448,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",83,0,"",plaintext,selection_keyboard +10481,10864577,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",83,0,"0",plaintext,content +10482,10864578,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",84,0,"",plaintext,selection_keyboard +10483,10864665,"TERMINAL",0,0,"4",,terminal_output +10484,10864957,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",85,0,"",plaintext,selection_command +10485,10865159,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",86,0,"",plaintext,selection_command +10486,10865708,"TERMINAL",0,0,"5",,terminal_output +10487,10866354,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",85,1,"",plaintext,content +10488,10866361,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",85,0,"3",plaintext,content +10489,10866361,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",86,0,"",plaintext,selection_keyboard +10490,10866746,"TERMINAL",0,0,"6",,terminal_output +10491,10866841,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",85,0,"",plaintext,selection_command +10492,10866930,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",109,0,"",plaintext,selection_command +10493,10867078,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",141,0,"",plaintext,selection_command +10494,10867221,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",167,0,"",plaintext,selection_command +10495,10867425,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",188,0,"",plaintext,selection_command +10496,10867560,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",315,0,"",plaintext,selection_command +10497,10867720,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",441,0,"",plaintext,selection_command +10498,10867809,"TERMINAL",0,0,"7",,terminal_output +10499,10868024,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",315,0,"",plaintext,selection_command +10500,10868180,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",188,0,"",plaintext,selection_command +10501,10868529,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",315,0,"",plaintext,selection_command +10502,10868809,"TERMINAL",0,0,"8",,terminal_output +10503,10869942,"TERMINAL",0,0,"9",,terminal_output +10504,10870972,"TERMINAL",0,0,"20",,terminal_output +10505,10871914,"TERMINAL",0,0,"1",,terminal_output +10506,10872078,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",188,0,"",plaintext,selection_command +10507,10872269,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",315,0,"",plaintext,selection_command +10508,10872489,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",441,0,"",plaintext,selection_command +10509,10872961,"TERMINAL",0,0,"2",,terminal_output +10510,10873595,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",442,0,"",plaintext,selection_command +10511,10874039,"TERMINAL",0,0,"3",,terminal_output +10512,10874105,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",443,0,"",plaintext,selection_command +10513,10874126,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",444,0,"",plaintext,selection_command +10514,10874157,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",445,0,"",plaintext,selection_command +10515,10874204,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",446,0,"",plaintext,selection_command +10516,10874243,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",447,0,"",plaintext,selection_command +10517,10874244,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",448,0,"",plaintext,selection_command +10518,10874290,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",449,0,"",plaintext,selection_command +10519,10874336,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",450,0,"",plaintext,selection_command +10520,10874430,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",451,0,"",plaintext,selection_command +10521,10874945,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",452,0,"",plaintext,selection_command +10522,10874953,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",453,0,"",plaintext,selection_command +10523,10875006,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",454,0,"",plaintext,selection_command +10524,10875047,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",455,0,"",plaintext,selection_command +10525,10875048,"TERMINAL",0,0,"4",,terminal_output +10526,10875086,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",456,0,"",plaintext,selection_command +10527,10875091,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",457,0,"",plaintext,selection_command +10528,10875134,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",458,0,"",plaintext,selection_command +10529,10875176,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",459,0,"",plaintext,selection_command +10530,10875177,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",460,0,"",plaintext,selection_command +10531,10875223,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",461,0,"",plaintext,selection_command +10532,10875263,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",462,0,"",plaintext,selection_command +10533,10875264,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",463,0,"",plaintext,selection_command +10534,10875309,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",464,0,"",plaintext,selection_command +10535,10875349,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",465,0,"",plaintext,selection_command +10536,10875362,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",466,0,"",plaintext,selection_command +10537,10875392,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",467,0,"",plaintext,selection_command +10538,10875411,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",468,0,"",plaintext,selection_command +10539,10875457,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",469,0,"",plaintext,selection_command +10540,10875503,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",470,0,"",plaintext,selection_command +10541,10877726,"TERMINAL",0,0,"54",,terminal_output +10542,10878856,"TERMINAL",0,0,"8",,terminal_output +10543,10879394,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",471,0,"",plaintext,selection_command +10544,10879635,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",471,0,"_",plaintext,content +10545,10879636,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",472,0,"",plaintext,selection_keyboard +10546,10879793,"TERMINAL",0,0,"9",,terminal_output +10547,10880166,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",472,0,"c",plaintext,content +10548,10880167,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",473,0,"",plaintext,selection_keyboard +10549,10880296,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",473,0,"h",plaintext,content +10550,10880297,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",474,0,"",plaintext,selection_keyboard +10551,10880470,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",474,0,"u",plaintext,content +10552,10880470,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",475,0,"",plaintext,selection_keyboard +10553,10880592,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",475,0,"n",plaintext,content +10554,10880593,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",476,0,"",plaintext,selection_keyboard +10555,10880725,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",476,0,"k",plaintext,content +10556,10880725,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",477,0,"",plaintext,selection_keyboard +10557,10880838,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",477,0,"e",plaintext,content +10558,10880838,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",478,0,"",plaintext,selection_keyboard +10559,10880839,"TERMINAL",0,0,"30",,terminal_output +10560,10880900,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",478,0,"d",plaintext,content +10561,10880901,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",479,0,"",plaintext,selection_keyboard +10562,10881423,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",478,0,"",plaintext,selection_command +10563,10881858,"TERMINAL",0,0,"1",,terminal_output +10564,10882900,"TERMINAL",0,0,"2",,terminal_output +10565,10883924,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",496,0,"",plaintext,selection_command +10566,10884012,"TERMINAL",0,0,"3",,terminal_output +10567,10884416,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",547,0,"",plaintext,selection_command +10568,10884452,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",549,0,"",plaintext,selection_command +10569,10884500,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",600,0,"",plaintext,selection_command +10570,10884537,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",616,0,"",plaintext,selection_command +10571,10884538,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",673,0,"",plaintext,selection_command +10572,10884592,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",756,0,"",plaintext,selection_command +10573,10884626,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",806,0,"",plaintext,selection_command +10574,10884809,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",841,0,"",plaintext,selection_command +10575,10884972,"TERMINAL",0,0,"4",,terminal_output +10576,10885290,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",852,0,"",plaintext,selection_command +10577,10885324,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",854,0,"",plaintext,selection_command +10578,10885358,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",856,0,"",plaintext,selection_command +10579,10885406,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",880,0,"",plaintext,selection_command +10580,10885443,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",882,0,"",plaintext,selection_command +10581,10885452,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",926,0,"",plaintext,selection_command +10582,10885489,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",983,0,"",plaintext,selection_command +10583,10885499,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1021,0,"",plaintext,selection_command +10584,10885541,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1054,0,"",plaintext,selection_command +10585,10885588,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1096,0,"",plaintext,selection_command +10586,10885628,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1101,0,"",plaintext,selection_command +10587,10885629,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1140,0,"",plaintext,selection_command +10588,10885675,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1143,0,"",plaintext,selection_command +10589,10885686,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1145,0,"",plaintext,selection_command +10590,10885732,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1146,0,"",plaintext,selection_command +10591,10885772,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1147,0,"",plaintext,selection_command +10592,10885807,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1170,0,"",plaintext,selection_command +10593,10885822,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1177,0,"",plaintext,selection_command +10594,10885974,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1179,0,"",plaintext,selection_command +10595,10886011,"TERMINAL",0,0,"5",,terminal_output +10596,10886167,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1208,0,"",plaintext,selection_command +10597,10886347,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1238,0,"",plaintext,selection_command +10598,10886572,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1264,0,"",plaintext,selection_command +10599,10887177,"TERMINAL",0,0,"6",,terminal_output +10600,10888358,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +10601,10888408,"TERMINAL",0,0,"78",,terminal_output +10602,10889116,"TERMINAL",0,0,"8",,terminal_output +10603,10890144,"TERMINAL",0,0,"9",,terminal_output +10604,10891671,"TERMINAL",0,0,"40",,terminal_output +10605,10892226,"TERMINAL",0,0,"1",,terminal_output +10606,10893094,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",shellscript,tab +10607,10893516,"TERMINAL",0,0,"2",,terminal_output +10608,10894270,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1097,0,"",shellscript,selection_mouse +10609,10894310,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1096,0,"",shellscript,selection_command +10610,10894354,"TERMINAL",0,0,"3",,terminal_output +10611,10895367,"TERMINAL",0,0,"5",,terminal_output +10612,10896445,"TERMINAL",0,0,"6",,terminal_output +10613,10896875,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1385,0,"",shellscript,selection_mouse +10614,10897409,"TERMINAL",0,0,"7",,terminal_output +10615,10897483,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1381,0,"",shellscript,selection_mouse +10616,10897642,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1350,38,"open_ai_minecraft_arrayrecords_chunked",shellscript,selection_mouse +10617,10898522,"TERMINAL",0,0,"8",,terminal_output +10618,10899481,"TERMINAL",0,0,"9",,terminal_output +10619,10900520,"TERMINAL",0,0,"50",,terminal_output +10620,10901603,"TERMINAL",0,0,"1",,terminal_output +10621,10902594,"TERMINAL",0,0,"2",,terminal_output +10622,10903648,"TERMINAL",0,0,"3",,terminal_output +10623,10904665,"TERMINAL",0,0,"4",,terminal_output +10624,10905783,"TERMINAL",0,0,"5",,terminal_output +10625,10906754,"TERMINAL",0,0,"6",,terminal_output +10626,10907775,"TERMINAL",0,0,"7",,terminal_output +10627,10908856,"TERMINAL",0,0,"8",,terminal_output +10628,10909879,"TERMINAL",0,0,"9",,terminal_output +10629,10910903,"TERMINAL",0,0,"8:00",,terminal_output +10630,10912078,"TERMINAL",0,0,"1",,terminal_output +10631,10912968,"TERMINAL",0,0,"2",,terminal_output +10632,10913389,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",38,0,"",shellscript,selection_mouse +10633,10913398,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",37,0,"",shellscript,selection_command +10634,10914008,"TERMINAL",0,0,"3",,terminal_output +10635,10914618,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",38,0,"",shellscript,selection_command +10636,10914776,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",37,1,"",shellscript,content +10637,10915154,"TERMINAL",0,0,"42",,terminal_output +10638,10915360,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",37,0,"8",shellscript,content +10639,10915361,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",38,0,"",shellscript,selection_keyboard +10640,10915605,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",37,0,"",shellscript,selection_command +10641,10916193,"TERMINAL",0,0,"5",,terminal_output +10642,10916454,"TERMINAL",0,0,"watch",,terminal_focus +10643,10917135,"TERMINAL",0,0,"6",,terminal_output +10644,10918215,"TERMINAL",0,0,"7",,terminal_output +10645,10919307,"TERMINAL",0,0,"8",,terminal_output +10646,10920371,"TERMINAL",0,0,"9",,terminal_output +10647,10921281,"TERMINAL",0,0,"109",,terminal_output +10648,10921510,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2343,0,"",shellscript,selection_mouse +10649,10921516,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2342,0,"",shellscript,selection_command +10650,10921850,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2343,0,"",shellscript,selection_mouse +10651,10921856,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2342,0,"",shellscript,selection_command +10652,10922030,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2342,1,"\",shellscript,selection_mouse +10653,10922030,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2342,0,"",shellscript,selection_mouse +10654,10922031,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2337,5,"4096 ",shellscript,selection_mouse +10655,10922032,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2343,0,"",shellscript,selection_command +10656,10922084,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2324,19,"dyna_ffn_dim=4096 \",shellscript,selection_mouse +10657,10922174,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2323,20,"-dyna_ffn_dim=4096 \",shellscript,selection_mouse +10658,10922181,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2321,22," --dyna_ffn_dim=4096 \",shellscript,selection_mouse +10659,10922216,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2294,49," --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +10660,10922216,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2293,50," --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +10661,10922272,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2292,51," --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +10662,10922313,"TERMINAL",0,0,"2",,terminal_output +10663,10922361,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2265,78," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +10664,10922453,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2243,100," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +10665,10923326,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2243,100,"",shellscript,content +10666,10923408,"TERMINAL",0,0,"3",,terminal_output +10667,10924390,"TERMINAL",0,0,"4",,terminal_output +10668,10924996,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2243,1,"",shellscript,content +10669,10925029,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2247,0,"",shellscript,selection_command +10670,10925430,"TERMINAL",0,0,"5",,terminal_output +10671,10925464,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2243,0,"\n",shellscript,content +10672,10925467,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2243,0,"",shellscript,selection_command +10673,10925867,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2243,0," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,content +10674,10925871,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2243,0,"",shellscript,selection_command +10675,10926468,"TERMINAL",0,0,"6",,terminal_output +10676,10927385,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2243,49,"",shellscript,content +10677,10927460,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2247,0,"",shellscript,selection_command +10678,10927544,"TERMINAL",0,0,"7",,terminal_output +10679,10927687,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2243,52,"",shellscript,content +10680,10927709,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2247,0,"",shellscript,selection_command +10681,10928142,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2221,0,"",shellscript,selection_command +10682,10928344,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2195,0,"",shellscript,selection_command +10683,10928477,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2141,0,"",shellscript,selection_command +10684,10928567,"TERMINAL",0,0,"8",,terminal_output +10685,10929566,"TERMINAL",0,0,"9",,terminal_output +10686,10930080,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,0,"",shellscript,selection_mouse +10687,10930273,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,1,"p",shellscript,selection_mouse +10688,10930336,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,3,"pos",shellscript,selection_mouse +10689,10930337,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,5,"post-",shellscript,selection_mouse +10690,10930337,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,6,"post-l",shellscript,selection_mouse +10691,10930338,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,7,"post-la",shellscript,selection_mouse +10692,10930373,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,8,"post-lau",shellscript,selection_mouse +10693,10930374,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,9,"post-laun",shellscript,selection_mouse +10694,10930414,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,10,"post-launc",shellscript,selection_mouse +10695,10930467,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,11,"post-launch",shellscript,selection_mouse +10696,10930467,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,12,"post-launch-",shellscript,selection_mouse +10697,10930505,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,13,"post-launch-m",shellscript,selection_mouse +10698,10930534,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,14,"post-launch-ma",shellscript,selection_mouse +10699,10930578,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,15,"post-launch-mai",shellscript,selection_mouse +10700,10930642,"TERMINAL",0,0,"20",,terminal_output +10701,10930782,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,16,"post-launch-main",shellscript,selection_mouse +10702,10931077,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,16,"",shellscript,content +10703,10931527,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2176,0,"c",shellscript,content +10704,10931528,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2177,0,"",shellscript,selection_keyboard +10705,10931646,"TERMINAL",0,0,"120",,terminal_output +10706,10931684,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2177,0,"u",shellscript,content +10707,10931685,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2178,0,"",shellscript,selection_keyboard +10708,10932122,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2177,1,"",shellscript,content +10709,10932294,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2177,0,"h",shellscript,content +10710,10932295,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2178,0,"",shellscript,selection_keyboard +10711,10932415,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2178,0,"u",shellscript,content +10712,10932416,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2179,0,"",shellscript,selection_keyboard +10713,10932566,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2179,0,"n",shellscript,content +10714,10932567,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2180,0,"",shellscript,selection_keyboard +10715,10932659,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2180,0,"k",shellscript,content +10716,10932660,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2181,0,"",shellscript,selection_keyboard +10717,10932700,"TERMINAL",0,0,"2",,terminal_output +10718,10932836,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2181,0,"e",shellscript,content +10719,10932837,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2182,0,"",shellscript,selection_keyboard +10720,10932883,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2182,0,"d",shellscript,content +10721,10932884,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2183,0,"",shellscript,selection_keyboard +10722,10933730,"TERMINAL",0,0,"3",,terminal_output +10723,10934363,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2116,0,"",shellscript,selection_mouse +10724,10934770,"TERMINAL",0,0,"4",,terminal_output +10725,10935151,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2124,0,"",shellscript,selection_mouse +10726,10935807,"TERMINAL",0,0,"5",,terminal_output +10727,10936579,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2124,0,"c",shellscript,content +10728,10936580,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2125,0,"",shellscript,selection_keyboard +10729,10936676,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2125,0,"u",shellscript,content +10730,10936677,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2126,0,"",shellscript,selection_keyboard +10731,10936848,"TERMINAL",0,0,"6",,terminal_output +10732,10937078,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2125,1,"",shellscript,content +10733,10937492,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2124,1,"",shellscript,content +10734,10937750,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2124,0,"-",shellscript,content +10735,10937751,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2125,0,"",shellscript,selection_keyboard +10736,10937890,"TERMINAL",0,0,"7",,terminal_output +10737,10938132,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2125,0,"c",shellscript,content +10738,10938133,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2126,0,"",shellscript,selection_keyboard +10739,10938190,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2126,0,"h",shellscript,content +10740,10938190,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2127,0,"",shellscript,selection_keyboard +10741,10938389,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2127,0,"u",shellscript,content +10742,10938390,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2128,0,"",shellscript,selection_keyboard +10743,10938521,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2128,0,"n",shellscript,content +10744,10938522,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2129,0,"",shellscript,selection_keyboard +10745,10938746,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2129,0,"k",shellscript,content +10746,10938747,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2130,0,"",shellscript,selection_keyboard +10747,10938880,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2130,0,"e",shellscript,content +10748,10938881,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2131,0,"",shellscript,selection_keyboard +10749,10938936,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2131,0,"d",shellscript,content +10750,10938937,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2132,0,"",shellscript,selection_keyboard +10751,10938975,"TERMINAL",0,0,"8",,terminal_output +10752,10939986,"TERMINAL",0,0,"9",,terminal_output +10753,10941017,"TERMINAL",0,0,"30",,terminal_output +10754,10941460,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2132,0,"-",shellscript,content +10755,10941461,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2133,0,"",shellscript,selection_keyboard +10756,10941613,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2133,0,"d",shellscript,content +10757,10941613,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2134,0,"",shellscript,selection_keyboard +10758,10941775,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2134,0,"a",shellscript,content +10759,10941776,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2135,0,"",shellscript,selection_keyboard +10760,10941950,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2135,0,"t",shellscript,content +10761,10941951,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2136,0,"",shellscript,selection_keyboard +10762,10942046,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2136,0,"a",shellscript,content +10763,10942047,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2137,0,"",shellscript,selection_keyboard +10764,10942088,"TERMINAL",0,0,"1",,terminal_output +10765,10943099,"TERMINAL",0,0,"2",,terminal_output +10766,10943392,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2008,0,"",shellscript,selection_mouse +10767,10944190,"TERMINAL",0,0,"3",,terminal_output +10768,10944716,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2025,0,"",shellscript,selection_mouse +10769,10945158,"TERMINAL",0,0,"4",,terminal_output +10770,10945254,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2008,0,"",shellscript,selection_mouse +10771,10946231,"TERMINAL",0,0,"5",,terminal_output +10772,10946492,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1980,0,"",shellscript,selection_mouse +10773,10946744,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1979,0,"",shellscript,selection_command +10774,10947244,"TERMINAL",0,0,"6",,terminal_output +10775,10947972,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1950,0,"",shellscript,selection_mouse +10776,10948381,"TERMINAL",0,0,"7",,terminal_output +10777,10948484,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1910,0,"",shellscript,selection_mouse +10778,10948658,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1908,3,"160",shellscript,selection_mouse +10779,10949317,"TERMINAL",0,0,"9",,terminal_output +10780,10949406,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1801,0,"",shellscript,selection_mouse +10781,10949423,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1800,0,"",shellscript,selection_command +10782,10949906,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1845,0,"",shellscript,selection_mouse +10783,10950375,"TERMINAL",0,0,"40",,terminal_output +10784,10951441,"TERMINAL",0,0,"1",,terminal_output +10785,10952483,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",528,0,"",shellscript,selection_mouse +10786,10952499,"TERMINAL",0,0,"2",,terminal_output +10787,10953475,"TERMINAL",0,0,"3",,terminal_output +10788,10954516,"TERMINAL",0,0,"4",,terminal_output +10789,10955555,"TERMINAL",0,0,"5",,terminal_output +10790,10955830,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1799,0,"",shellscript,selection_mouse +10791,10956459,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1769,0,"",shellscript,selection_mouse +10792,10956599,"TERMINAL",0,0,"6",,terminal_output +10793,10957633,"TERMINAL",0,0,"7",,terminal_output +10794,10957869,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1985,0,"",shellscript,selection_mouse +10795,10958726,"TERMINAL",0,0,"8",,terminal_output +10796,10958783,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1801,0,"",shellscript,selection_mouse +10797,10958801,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1800,0,"",shellscript,selection_command +10798,10959175,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1775,0,"",shellscript,selection_mouse +10799,10959716,"TERMINAL",0,0,"9",,terminal_output +10800,10960234,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1783,0,"\n",shellscript,content +10801,10960752,"TERMINAL",0,0,"50",,terminal_output +10802,10961279,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1784,0," ",shellscript,content +10803,10961709,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1788,0,"-",shellscript,content +10804,10961710,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1789,0,"",shellscript,selection_keyboard +10805,10961804,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1789,0,"-",shellscript,content +10806,10961805,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1790,0,"",shellscript,selection_keyboard +10807,10961805,"TERMINAL",0,0,"1",,terminal_output +10808,10962146,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1790,0,"n",shellscript,content +10809,10962147,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1791,0,"",shellscript,selection_keyboard +10810,10962279,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1791,0,"u",shellscript,content +10811,10962279,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1792,0,"",shellscript,selection_keyboard +10812,10962422,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1792,0,"m",shellscript,content +10813,10962423,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1793,0,"",shellscript,selection_keyboard +10814,10962692,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1793,0,"_",shellscript,content +10815,10962693,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1794,0,"",shellscript,selection_keyboard +10816,10962874,"TERMINAL",0,0,"2",,terminal_output +10817,10962896,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1794,0,"s",shellscript,content +10818,10962897,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1795,0,"",shellscript,selection_keyboard +10819,10963095,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1795,0,"t",shellscript,content +10820,10963096,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1796,0,"",shellscript,selection_keyboard +10821,10963234,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1796,0,"e",shellscript,content +10822,10963235,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1797,0,"",shellscript,selection_keyboard +10823,10963351,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1797,0,"p",shellscript,content +10824,10963352,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1798,0,"",shellscript,selection_keyboard +10825,10963448,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1798,0,"s",shellscript,content +10826,10963449,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1799,0,"",shellscript,selection_keyboard +10827,10963875,"TERMINAL",0,0,"3",,terminal_output +10828,10964239,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1799,0,"=",shellscript,content +10829,10964240,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1800,0,"",shellscript,selection_keyboard +10830,10964571,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1800,0,"1",shellscript,content +10831,10964572,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1801,0,"",shellscript,selection_keyboard +10832,10964674,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1801,0,"0",shellscript,content +10833,10964675,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1802,0,"",shellscript,selection_keyboard +10834,10964869,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1802,0,"0",shellscript,content +10835,10964870,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1803,0,"",shellscript,selection_keyboard +10836,10964926,"TERMINAL",0,0,"4",,terminal_output +10837,10965227,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1803,0,"0",shellscript,content +10838,10965228,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1804,0,"",shellscript,selection_keyboard +10839,10965995,"TERMINAL",0,0,"51",,terminal_output +10840,10966142,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1804,0," ",shellscript,content +10841,10966143,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1805,0,"",shellscript,selection_keyboard +10842,10966276,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1805,0,"\",shellscript,content +10843,10966276,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1806,0,"",shellscript,selection_keyboard +10844,10966778,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1805,0,"",shellscript,selection_command +10845,10966992,"TERMINAL",0,0,"6",,terminal_output +10846,10968151,"TERMINAL",0,0,"7",,terminal_output +10847,10969066,"TERMINAL",0,0,"8",,terminal_output +10848,10969754,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1806,0,"\n ",shellscript,content +10849,10970146,"TERMINAL",0,0,"9",,terminal_output +10850,10970558,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1811,0,"-",shellscript,content +10851,10970559,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1812,0,"",shellscript,selection_keyboard +10852,10970676,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1812,0,"-",shellscript,content +10853,10970676,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1813,0,"",shellscript,selection_keyboard +10854,10970970,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1813,0,"n",shellscript,content +10855,10970971,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1814,0,"",shellscript,selection_keyboard +10856,10971137,"TERMINAL",0,0,"9:00",,terminal_output +10857,10971309,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1814,0,"u",shellscript,content +10858,10971310,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1815,0,"",shellscript,selection_keyboard +10859,10971426,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1815,0,"m",shellscript,content +10860,10971426,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1816,0,"",shellscript,selection_keyboard +10861,10971703,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1816,0,"_",shellscript,content +10862,10971703,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1817,0,"",shellscript,selection_keyboard +10863,10972175,"TERMINAL",0,0,"1",,terminal_output +10864,10972307,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1817,0,"w",shellscript,content +10865,10972308,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1818,0,"",shellscript,selection_keyboard +10866,10972477,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1818,0,"a",shellscript,content +10867,10972477,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1819,0,"",shellscript,selection_keyboard +10868,10972799,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1819,0,"r",shellscript,content +10869,10972800,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1820,0,"",shellscript,selection_keyboard +10870,10972926,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1820,0,"m",shellscript,content +10871,10972926,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1821,0,"",shellscript,selection_keyboard +10872,10973144,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1821,0,"u",shellscript,content +10873,10973144,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1822,0,"",shellscript,selection_keyboard +10874,10973270,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1822,0,"p",shellscript,content +10875,10973271,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1823,0,"",shellscript,selection_keyboard +10876,10973271,"TERMINAL",0,0,"2",,terminal_output +10877,10973667,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1823,0,"-",shellscript,content +10878,10973668,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1824,0,"",shellscript,selection_keyboard +10879,10973926,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1824,0,"s",shellscript,content +10880,10973926,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1825,0,"",shellscript,selection_keyboard +10881,10974274,"TERMINAL",0,0,"3",,terminal_output +10882,10974634,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1824,1,"",shellscript,content +10883,10975294,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1823,1,"",shellscript,content +10884,10975297,"TERMINAL",0,0,"4",,terminal_output +10885,10975636,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1823,0,"_",shellscript,content +10886,10975637,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1824,0,"",shellscript,selection_keyboard +10887,10976090,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1824,0,"s",shellscript,content +10888,10976090,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1825,0,"",shellscript,selection_keyboard +10889,10976337,"TERMINAL",0,0,"6",,terminal_output +10890,10976358,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1825,0,"t",shellscript,content +10891,10976359,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1826,0,"",shellscript,selection_keyboard +10892,10976573,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1826,0,"e",shellscript,content +10893,10976574,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1827,0,"",shellscript,selection_keyboard +10894,10976716,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1827,0,"p",shellscript,content +10895,10976716,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1828,0,"",shellscript,selection_keyboard +10896,10976808,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1828,0,"s",shellscript,content +10897,10976808,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1829,0,"",shellscript,selection_keyboard +10898,10977462,"TERMINAL",0,0,"7",,terminal_output +10899,10977654,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1829,0,"=",shellscript,content +10900,10977654,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1830,0,"",shellscript,selection_keyboard +10901,10977818,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1830,0,"0",shellscript,content +10902,10977818,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1831,0,"",shellscript,selection_keyboard +10903,10978250,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1831,0," ",shellscript,content +10904,10978250,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1832,0,"",shellscript,selection_keyboard +10905,10978415,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1832,0,"\",shellscript,content +10906,10978416,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1833,0,"",shellscript,selection_keyboard +10907,10978455,"TERMINAL",0,0,"8",,terminal_output +10908,10979491,"TERMINAL",0,0,"9",,terminal_output +10909,10980616,"TERMINAL",0,0,"10",,terminal_output +10910,10981440,"slurm/dev/mihir/horeka/train_lam.sh",0,0,"#!/usr/bin/env bash\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=""debug""\nslurm_job_id=""0000""\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\npython train_lam.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +10911,10981772,"TERMINAL",0,0,"1",,terminal_output +10912,10982576,"TERMINAL",0,0,"2",,terminal_output +10913,10983624,"TERMINAL",0,0,"3",,terminal_output +10914,10984900,"TERMINAL",0,0,"4",,terminal_output +10915,10985684,"TERMINAL",0,0,"5",,terminal_output +10916,10986776,"TERMINAL",0,0,"6",,terminal_output +10917,10987764,"TERMINAL",0,0,"7",,terminal_output +10918,10988832,"TERMINAL",0,0,"8",,terminal_output +10919,10989965,"TERMINAL",0,0,"9",,terminal_output +10920,10990976,"TERMINAL",0,0,"20",,terminal_output +10921,10992020,"TERMINAL",0,0,"1",,terminal_output +10922,10992967,"TERMINAL",0,0,"2",,terminal_output +10923,10994052,"TERMINAL",0,0,"3",,terminal_output +10924,10995072,"TERMINAL",0,0,"4",,terminal_output +10925,10996082,"TERMINAL",0,0,"5",,terminal_output +10926,10997116,"TERMINAL",0,0,"6",,terminal_output +10927,10998250,"TERMINAL",0,0,"7",,terminal_output +10928,11000503,"TERMINAL",0,0,"814",,terminal_output +10929,11002047,"TERMINAL",0,0,"310",,terminal_output +10930,11003075,"TERMINAL",0,0,"2",,terminal_output +10931,11004105,"TERMINAL",0,0,"3",,terminal_output +10932,11005263,"TERMINAL",0,0,"4",,terminal_output +10933,11006188,"TERMINAL",0,0,"5",,terminal_output +10934,11007261,"TERMINAL",0,0,"6",,terminal_output +10935,11008280,"TERMINAL",0,0,"7",,terminal_output +10936,11008790,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=100 \\n --log \\n --name=coinrun-lam-dev-$slurm_job_id \\n --tags lam coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --data_dir $array_records_dir_train\n\n",shellscript,tab +10937,11009316,"TERMINAL",0,0,"8",,terminal_output +10938,11010334,"TERMINAL",0,0,"40",,terminal_output +10939,11011374,"TERMINAL",0,0,"1",,terminal_output +10940,11012474,"TERMINAL",0,0,"2",,terminal_output +10941,11013454,"TERMINAL",0,0,"3",,terminal_output +10942,11014496,"TERMINAL",0,0,"4",,terminal_output +10943,11014555,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1537,0,"",shellscript,selection_mouse +10944,11014596,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1536,0,"",shellscript,selection_command +10945,11014707,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1536,1,"\",shellscript,selection_mouse +10946,11014708,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1536,0,"",shellscript,selection_mouse +10947,11014708,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1534,2,"0 ",shellscript,selection_mouse +10948,11014708,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1529,7,"teps 0 ",shellscript,selection_mouse +10949,11014762,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1537,0,"",shellscript,selection_command +10950,11014763,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1526,11,"y_steps 0 \",shellscript,selection_mouse +10951,11014763,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1524,13,"cay_steps 0 \",shellscript,selection_mouse +10952,11014763,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1523,14,"ecay_steps 0 \",shellscript,selection_mouse +10953,11014959,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1522,15,"decay_steps 0 \",shellscript,selection_mouse +10954,11014959,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1520,17,"d_decay_steps 0 \",shellscript,selection_mouse +10955,11014959,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1519,18,"sd_decay_steps 0 \",shellscript,selection_mouse +10956,11014959,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1518,19,"wsd_decay_steps 0 \",shellscript,selection_mouse +10957,11014959,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1517,20,"-wsd_decay_steps 0 \",shellscript,selection_mouse +10958,11014959,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1516,21,"--wsd_decay_steps 0 \",shellscript,selection_mouse +10959,11014959,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1515,22," --wsd_decay_steps 0 \",shellscript,selection_mouse +10960,11014959,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1514,23," --wsd_decay_steps 0 \",shellscript,selection_mouse +10961,11014959,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1513,24," --wsd_decay_steps 0 \",shellscript,selection_mouse +10962,11014960,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1512,25," --wsd_decay_steps 0 \",shellscript,selection_mouse +10963,11015008,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",1489,48," --warmup_steps 0 \\n --wsd_decay_steps 0 \",shellscript,selection_mouse +10964,11015534,"TERMINAL",0,0,"5",,terminal_output +10965,11016599,"TERMINAL",0,0,"6",,terminal_output +10966,11017636,"TERMINAL",0,0,"7",,terminal_output +10967,11018257,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",shellscript,tab +10968,11018709,"TERMINAL",0,0,"8",,terminal_output +10969,11019108,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",2270,0,"",shellscript,selection_mouse +10970,11019700,"TERMINAL",0,0,"9",,terminal_output +10971,11020040,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1826,0,"",shellscript,selection_mouse +10972,11020733,"TERMINAL",0,0,"50",,terminal_output +10973,11021163,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1825,0,"",shellscript,selection_command +10974,11021370,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1833,0,"\n ",shellscript,content +10975,11021777,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1838,0," --warmup_steps 0 \\n --wsd_decay_steps 0 \",shellscript,content +10976,11021820,"TERMINAL",0,0,"1",,terminal_output +10977,11022807,"TERMINAL",0,0,"2",,terminal_output +10978,11023448,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1842,0,"",shellscript,selection_mouse +10979,11023772,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1838,4,"",shellscript,content +10980,11023851,"TERMINAL",0,0,"3",,terminal_output +10981,11024897,"TERMINAL",0,0,"4",,terminal_output +10982,11026001,"TERMINAL",0,0,"5",,terminal_output +10983,11027025,"TERMINAL",0,0,"6",,terminal_output +10984,11027265,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1830,0,"",shellscript,selection_mouse +10985,11027579,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1829,1,"",shellscript,content +10986,11027706,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1829,0," ",shellscript,content +10987,11027707,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1830,0,"",shellscript,selection_keyboard +10988,11028008,"TERMINAL",0,0,"7",,terminal_output +10989,11028134,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1806,0,"",shellscript,selection_command +10990,11028823,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1804,0,"",shellscript,selection_command +10991,11029041,"TERMINAL",0,0,"8",,terminal_output +10992,11029376,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1799,1,"",shellscript,content +10993,11029470,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1799,0," ",shellscript,content +10994,11029471,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1800,0,"",shellscript,selection_keyboard +10995,11030091,"TERMINAL",0,0,"9",,terminal_output +10996,11030250,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1799,0,"",shellscript,selection_command +10997,11031106,"TERMINAL",0,0,"30:00",,terminal_output +10998,11032252,"TERMINAL",0,0,"1",,terminal_output +10999,11033272,"TERMINAL",0,0,"2",,terminal_output +11000,11034333,"TERMINAL",0,0,"3",,terminal_output +11001,11035244,"TERMINAL",0,0,"4",,terminal_output +11002,11036277,"TERMINAL",0,0,"5",,terminal_output +11003,11037347,"TERMINAL",0,0,"7",,terminal_output +11004,11038400,"TERMINAL",0,0,"8",,terminal_output +11005,11039386,"TERMINAL",0,0,"9",,terminal_output +11006,11040549,"TERMINAL",0,0,"10",,terminal_output +11007,11041490,"TERMINAL",0,0,"1",,terminal_output +11008,11042508,"TERMINAL",0,0,"2",,terminal_output +11009,11043572,"TERMINAL",0,0,"3",,terminal_output +11010,11044576,"TERMINAL",0,0,"4",,terminal_output +11011,11045608,"TERMINAL",0,0,"5",,terminal_output +11012,11046686,"TERMINAL",0,0,"6",,terminal_output +11013,11047706,"TERMINAL",0,0,"7",,terminal_output +11014,11048715,"TERMINAL",0,0,"8",,terminal_output +11015,11049864,"TERMINAL",0,0,"9",,terminal_output +11016,11050881,"TERMINAL",0,0,"20",,terminal_output +11017,11051916,"TERMINAL",0,0,"1",,terminal_output +11018,11052890,"TERMINAL",0,0,"2",,terminal_output +11019,11053891,"TERMINAL",0,0,"3",,terminal_output +11020,11054990,"TERMINAL",0,0,"4",,terminal_output +11021,11056017,"TERMINAL",0,0,"5",,terminal_output +11022,11057008,"TERMINAL",0,0,"6",,terminal_output +11023,11058059,"TERMINAL",0,0,"7",,terminal_output +11024,11059190,"TERMINAL",0,0,"8",,terminal_output +11025,11059358,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",495,0,"",shellscript,selection_mouse +11026,11060121,"TERMINAL",0,0,"9",,terminal_output +11027,11060628,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",497,0,"\n",shellscript,content +11028,11061238,"TERMINAL",0,0,"30",,terminal_output +11029,11061842,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",498,0,"#",shellscript,content +11030,11061843,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",499,0,"",shellscript,selection_keyboard +11031,11062134,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",499,0,"S",shellscript,content +11032,11062135,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",500,0,"",shellscript,selection_keyboard +11033,11062225,"TERMINAL",0,0,"1",,terminal_output +11034,11062309,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",500,0,"B",shellscript,content +11035,11062310,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",501,0,"",shellscript,selection_keyboard +11036,11062406,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",501,0,"A",shellscript,content +11037,11062407,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",502,0,"",shellscript,selection_keyboard +11038,11062511,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",502,0,"T",shellscript,content +11039,11062512,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",503,0,"",shellscript,selection_keyboard +11040,11062691,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",503,0,"C",shellscript,content +11041,11062692,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",504,0,"",shellscript,selection_keyboard +11042,11062800,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",504,0,"h",shellscript,content +11043,11062800,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",505,0,"",shellscript,selection_keyboard +11044,11063275,"TERMINAL",0,0,"2",,terminal_output +11045,11063521,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",504,1,"",shellscript,content +11046,11063886,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",504,0,"H",shellscript,content +11047,11063886,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",505,0,"",shellscript,selection_keyboard +11048,11064216,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",505,0," ",shellscript,content +11049,11064216,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",506,0,"",shellscript,selection_keyboard +11050,11064299,"TERMINAL",0,0,"3",,terminal_output +11051,11064340,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",506,0,"-",shellscript,content +11052,11064341,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",507,0,"",shellscript,selection_keyboard +11053,11064647,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",506,1,"",shellscript,content +11054,11064758,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",505,1,"",shellscript,content +11055,11065204,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",505,0," ",shellscript,content +11056,11065205,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",506,0,"",shellscript,selection_keyboard +11057,11065288,"TERMINAL",0,0,"4",,terminal_output +11058,11065357,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",506,0,"-",shellscript,content +11059,11065357,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",507,0,"",shellscript,selection_keyboard +11060,11065501,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",507,0,"-",shellscript,content +11061,11065502,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",508,0,"",shellscript,selection_keyboard +11062,11065909,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",508,0,"r",shellscript,content +11063,11065910,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",509,0,"",shellscript,selection_keyboard +11064,11066093,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",509,0,"e",shellscript,content +11065,11066094,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",510,0,"",shellscript,selection_keyboard +11066,11066272,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",510,0,"s",shellscript,content +11067,11066273,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",511,0,"",shellscript,selection_keyboard +11068,11066332,"TERMINAL",0,0,"6",,terminal_output +11069,11066436,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",511,0,"e",shellscript,content +11070,11066437,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",512,0,"",shellscript,selection_keyboard +11071,11066521,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",512,0,"r",shellscript,content +11072,11066521,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",513,0,"",shellscript,selection_keyboard +11073,11066655,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",513,0,"v",shellscript,content +11074,11066656,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",514,0,"",shellscript,selection_keyboard +11075,11066782,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",514,0,"a",shellscript,content +11076,11066783,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",515,0,"",shellscript,selection_keyboard +11077,11066973,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",515,0,"t",shellscript,content +11078,11066974,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",516,0,"",shellscript,selection_keyboard +11079,11067126,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",516,0,"o",shellscript,content +11080,11067126,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",517,0,"",shellscript,selection_keyboard +11081,11067371,"TERMINAL",0,0,"7",,terminal_output +11082,11067527,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",516,1,"",shellscript,content +11083,11067679,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",516,0,"i",shellscript,content +11084,11067679,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",517,0,"",shellscript,selection_keyboard +11085,11067774,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",517,0,"o",shellscript,content +11086,11067774,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",518,0,"",shellscript,selection_keyboard +11087,11067906,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",518,0,"n",shellscript,content +11088,11067907,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",519,0,"",shellscript,selection_keyboard +11089,11068396,"TERMINAL",0,0,"8",,terminal_output +11090,11068652,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",519,0,"=",shellscript,content +11091,11068652,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",520,0,"",shellscript,selection_keyboard +11092,11069009,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",520,0,"l",shellscript,content +11093,11069009,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",521,0,"",shellscript,selection_keyboard +11094,11069131,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",521,0,"l",shellscript,content +11095,11069132,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",522,0,"",shellscript,selection_keyboard +11096,11069304,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",522,0,"m",shellscript,content +11097,11069305,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",523,0,"",shellscript,selection_keyboard +11098,11069436,"TERMINAL",0,0,"9",,terminal_output +11099,11069624,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",523,0,"t",shellscript,content +11100,11069625,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",524,0,"",shellscript,selection_keyboard +11101,11069671,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",524,0,"u",shellscript,content +11102,11069672,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",525,0,"",shellscript,selection_keyboard +11103,11069760,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",525,0,"m",shellscript,content +11104,11069761,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",526,0,"",shellscript,selection_keyboard +11105,11070501,"TERMINAL",0,0,"40",,terminal_output +11106,11071587,"TERMINAL",0,0,"1",,terminal_output +11107,11072544,"TERMINAL",0,0,"2",,terminal_output +11108,11073693,"TERMINAL",0,0,"3",,terminal_output +11109,11074664,"TERMINAL",0,0,"4",,terminal_output +11110,11075653,"TERMINAL",0,0,"5",,terminal_output +11111,11076240,"TERMINAL",0,0,"bash",,terminal_focus +11112,11076718,"TERMINAL",0,0,"6",,terminal_output +11113,11077746,"TERMINAL",0,0,"7",,terminal_output +11114,11078760,"TERMINAL",0,0,"8",,terminal_output +11115,11079863,"TERMINAL",0,0,"9",,terminal_output +11116,11080430,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",,terminal_command +11117,11080519,"TERMINAL",0,0,"]633;CSubmitted batch job 3469457\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +11118,11080894,"TERMINAL",0,0,"50",,terminal_output +11119,11081880,"TERMINAL",0,0,"124",,terminal_output +11120,11082900,"TERMINAL",0,0,"2",,terminal_output +11121,11084020,"TERMINAL",0,0,"3",,terminal_output +11122,11085076,"TERMINAL",0,0,"4",,terminal_output +11123,11086061,"TERMINAL",0,0,"5",,terminal_output +11124,11087158,"TERMINAL",0,0,"6",,terminal_output +11125,11088115,"TERMINAL",0,0,"7",,terminal_output +11126,11089154,"TERMINAL",0,0,"8",,terminal_output +11127,11090194,"TERMINAL",0,0,"9",,terminal_output +11128,11090309,"TERMINAL",0,0,"logs",,terminal_command +11129,11090351,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir",,terminal_output +11130,11091225,"TERMINAL",0,0,"1:00",,terminal_output +11131,11092277,"TERMINAL",0,0,"1",,terminal_output +11132,11093469,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",shellscript,tab +11133,11093668,"TERMINAL",0,0,"2",,terminal_output +11134,11094390,"TERMINAL",0,0,"4",,terminal_output +11135,11095406,"TERMINAL",0,0,"5",,terminal_output +11136,11096475,"TERMINAL",0,0,"6",,terminal_output +11137,11097478,"TERMINAL",0,0,"7",,terminal_output +11138,11098476,"TERMINAL",0,0,"8",,terminal_output +11139,11099510,"TERMINAL",0,0,"9",,terminal_output +11140,11100549,"TERMINAL",0,0,"1010",,terminal_output +11141,11101577,"TERMINAL",0,0,"1",,terminal_output +11142,11102629,"TERMINAL",0,0,"2",,terminal_output +11143,11103737,"TERMINAL",0,0,"3",,terminal_output +11144,11104683,"TERMINAL",0,0,"4",,terminal_output +11145,11105725,"TERMINAL",0,0,"5",,terminal_output +11146,11106758,"TERMINAL",0,0,"6",,terminal_output +11147,11107790,"TERMINAL",0,0,"7",,terminal_output +11148,11108836,"TERMINAL",0,0,"8",,terminal_output +11149,11109905,"TERMINAL",0,0,"9",,terminal_output +11150,11110902,"TERMINAL",0,0,"20",,terminal_output +11151,11111019,"TERMINAL",0,0,"cd maskgit/dynamics-cotraining/",,terminal_command +11152,11111089,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining",,terminal_output +11153,11111324,"TERMINAL",0,0,"ls",,terminal_command +11154,11111409,"TERMINAL",0,0,"]633;Ctrain_dynamics_maskgit_8_node_3412350.log train_dynamics_maskgit_8_node_3417226.log train_dynamics_maskgit_8_node_3423234.log train_dynamics_maskgit_8_node_chunked_3469457.log\r\ntrain_dynamics_maskgit_8_node_3412354.log train_dynamics_maskgit_8_node_3418833.log train_dynamics_maskgit_8_node_3423250.log\r\ntrain_dynamics_maskgit_8_node_3417225.log train_dynamics_maskgit_8_node_3418834.log train_dynamics_maskgit_8_node_3429868.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining",,terminal_output +11155,11112018,"TERMINAL",0,0,"1",,terminal_output +11156,11112975,"TERMINAL",0,0,"2",,terminal_output +11157,11114001,"TERMINAL",0,0,"3",,terminal_output +11158,11115246,"TERMINAL",0,0,"4",,terminal_output +11159,11116488,"TERMINAL",0,0,"5",,terminal_output +11160,11116808,"TERMINAL",0,0,"tail -f train_dynamics_maskgit_8_node_chunked_3469457.log",,terminal_command +11161,11116869,"TERMINAL",0,0,"]633;CSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0703,0706-0707,0711-0715]\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +11162,11117259,"TERMINAL",0,0,"6",,terminal_output +11163,11118146,"TERMINAL",0,0,"7",,terminal_output +11164,11119182,"TERMINAL",0,0,"8",,terminal_output +11165,11120225,"TERMINAL",0,0,"9",,terminal_output +11166,11121272,"TERMINAL",0,0,"30",,terminal_output +11167,11121665,"TERMINAL",0,0,"╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\nsrun: error: hkn0713: tasks 20-23: Exited with exit code 2\r\nsrun: error: hkn0714: tasks 24-27: Exited with exit code 2\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n",,terminal_output +11168,11122661,"TERMINAL",0,0,"srun: error: hkn0715: tasks 28-31: Exited with exit code 2\r\nsrun: error: hkn0707: tasks 8-10: Exited with exit code 2\r\nsrun: error: hkn0707: task 11: Exited with exit code 2\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --num-warmup-steps │\r\n│ ─────────────────────────────────────────────── │\r\n│ Perhaps you meant: │\r\n│ --warmup-steps INT │\r\n│ Optimization (default: 5000) │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\nsrun: error: hkn0703: tasks 0-3: Exited with exit code 2\r\nsrun: error: hkn0706: tasks 4-7: Exited with exit code 2\r\nsrun: error: hkn0712: tasks 16-19: Exited with exit code 2\r\nsrun: error: hkn0711: tasks 12-15: Exited with exit code 2\r\n",,terminal_output +11169,11124100,"TERMINAL",0,0,"1",,terminal_output +11170,11125266,"TERMINAL",0,0,"4 832",,terminal_output +11171,11126360,"TERMINAL",0,0,"5",,terminal_output +11172,11127341,"TERMINAL",0,0,"7",,terminal_output +11173,11128384,"TERMINAL",0,0,"8",,terminal_output +11174,11129417,"TERMINAL",0,0,"9",,terminal_output +11175,11130450,"TERMINAL",0,0,"40",,terminal_output +11176,11131491,"TERMINAL",0,0,"1",,terminal_output +11177,11132525,"TERMINAL",0,0,"2",,terminal_output +11178,11133332,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +11179,11133627,"TERMINAL",0,0,"3",,terminal_output +11180,11134604,"TERMINAL",0,0,"4",,terminal_output +11181,11134708,"TERMINAL",0,0,"\r\n============================= JOB FEEDBACK =============================\r\n\r\nJob ID: 3469457\r\nCluster: hk\r\nUser/Group: tum_cte0515/hk-project-p0023960\r\nAccount: hk-project-p0023960\r\nState: FAILED (exit code 2)\r\nPartition: accelerated\r\nNodes: 8\r\nCores per node: 24\r\nNodelist: hkn[0703,0706-0707,0711-0715]\r\nCPU Utilized: 00:01:33\r\nCPU Efficiency: 1.15% of 02:14:24 core-walltime\r\nJob Wall-clock time: 00:00:42\r\nStarttime: Fri Sep 5 16:30:51 2025\r\nEndtime: Fri Sep 5 16:31:33 2025\r\nMemory Utilized: 4.68 GB (estimated maximum)\r\nMemory Efficiency: 0.00% of 0.00 MB (0.00 MB/node)\r\nEnergy Consumed: 990503 Joule / 275.139722222222 Watthours\r\nAverage node power draw: 23583.4047619048 Watt\r\n",,terminal_output +11182,11135201,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",shellscript,tab +11183,11135736,"TERMINAL",0,0,"5",,terminal_output +11184,11136704,"TERMINAL",0,0,"6",,terminal_output +11185,11137750,"TERMINAL",0,0,"7",,terminal_output +11186,11138751,"TERMINAL",0,0,"8",,terminal_output +11187,11139070,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1842,0,"",shellscript,selection_mouse +11188,11139263,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1842,1,"n",shellscript,selection_mouse +11189,11139328,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1842,2,"nu",shellscript,selection_mouse +11190,11139452,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1842,3,"num",shellscript,selection_mouse +11191,11139789,"TERMINAL",0,0,"9",,terminal_output +11192,11140510,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1842,3,"",shellscript,content +11193,11140840,"TERMINAL",0,0,"50",,terminal_output +11194,11141033,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1842,1,"",shellscript,content +11195,11141925,"TERMINAL",0,0,"1",,terminal_output +11196,11142233,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1841,0,"",shellscript,selection_command +11197,11142713,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1836,23,"",shellscript,content +11198,11142758,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",1840,0,"",shellscript,selection_command +11199,11142911,"TERMINAL",0,0,"2",,terminal_output +11200,11143958,"TERMINAL",0,0,"3",,terminal_output +11201,11145010,"TERMINAL",0,0,"4",,terminal_output +11202,11145994,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining",,terminal_output +11203,11146071,"TERMINAL",0,0,"5",,terminal_output +11204,11147081,"TERMINAL",0,0,"6",,terminal_output +11205,11147187,"TERMINAL",0,0,"bash",,terminal_focus +11206,11148172,"TERMINAL",0,0,"7",,terminal_output +11207,11149204,"TERMINAL",0,0,"8",,terminal_output +11208,11150203,"TERMINAL",0,0,"9",,terminal_output +11209,11150687,"TERMINAL",0,0,"sbatch train_dynamics_maskgit_8_node_chunked_3469457.log",,terminal_command +11210,11150715,"TERMINAL",0,0,"]633;Csbatch: error: Unable to open file train_dynamics_maskgit_8_node_chunked_3469457.log\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +11211,11151246,"TERMINAL",0,0,"2:00",,terminal_output +11212,11152348,"TERMINAL",0,0,"1",,terminal_output +11213,11153388,"TERMINAL",0,0,"39",,terminal_output +11214,11154376,"TERMINAL",0,0,"4",,terminal_output +11215,11155402,"TERMINAL",0,0,"5",,terminal_output +11216,11156459,"TERMINAL",0,0,"6",,terminal_output +11217,11157489,"TERMINAL",0,0,"7",,terminal_output +11218,11157862,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",shellscript,tab +11219,11158524,"TERMINAL",0,0,"8",,terminal_output +11220,11159559,"TERMINAL",0,0,"910",,terminal_output +11221,11160681,"TERMINAL",0,0,"10",,terminal_output +11222,11161639,"TERMINAL",0,0,"1",,terminal_output +11223,11162989,"TERMINAL",0,0,"2",,terminal_output +11224,11163756,"TERMINAL",0,0,"3",,terminal_output +11225,11164755,"TERMINAL",0,0,"4",,terminal_output +11226,11165467,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",,terminal_command +11227,11165517,"TERMINAL",0,0,"]633;CSubmitted batch job 3469458\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +11228,11165796,"TERMINAL",0,0,"5",,terminal_output +11229,11166896,"TERMINAL",0,0,"624",,terminal_output +11230,11167583,"TERMINAL",0,0,"bash",,terminal_focus +11231,11167867,"TERMINAL",0,0,"7",,terminal_output +11232,11168364,"TERMINAL",0,0,"ls",,terminal_command +11233,11168416,"TERMINAL",0,0,"]633;Ctrain_dynamics_maskgit_8_node_3412350.log train_dynamics_maskgit_8_node_3417226.log train_dynamics_maskgit_8_node_3423234.log train_dynamics_maskgit_8_node_chunked_3469457.log\r\ntrain_dynamics_maskgit_8_node_3412354.log train_dynamics_maskgit_8_node_3418833.log train_dynamics_maskgit_8_node_3423250.log\r\ntrain_dynamics_maskgit_8_node_3417225.log train_dynamics_maskgit_8_node_3418834.log train_dynamics_maskgit_8_node_3429868.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining",,terminal_output +11234,11168898,"TERMINAL",0,0,"8",,terminal_output +11235,11169941,"TERMINAL",0,0,"9",,terminal_output +11236,11170983,"TERMINAL",0,0,"20",,terminal_output +11237,11172019,"TERMINAL",0,0,"1",,terminal_output +11238,11173058,"TERMINAL",0,0,"2",,terminal_output +11239,11173585,"TERMINAL",0,0,"rm train_dynamics_maskgit_8_node_chunked_3469457.log",,terminal_command +11240,11173624,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining",,terminal_output +11241,11174141,"TERMINAL",0,0,"3",,terminal_output +11242,11174398,"TERMINAL",0,0,"ls",,terminal_command +11243,11174428,"TERMINAL",0,0,"]633;Ctrain_dynamics_maskgit_8_node_3412350.log train_dynamics_maskgit_8_node_3417226.log train_dynamics_maskgit_8_node_3423234.log\r\ntrain_dynamics_maskgit_8_node_3412354.log train_dynamics_maskgit_8_node_3418833.log train_dynamics_maskgit_8_node_3423250.log\r\ntrain_dynamics_maskgit_8_node_3417225.log train_dynamics_maskgit_8_node_3418834.log train_dynamics_maskgit_8_node_3429868.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining",,terminal_output +11244,11175131,"TERMINAL",0,0,"4",,terminal_output +11245,11176169,"TERMINAL",0,0,"5",,terminal_output +11246,11177203,"TERMINAL",0,0,"6",,terminal_output +11247,11178337,"TERMINAL",0,0,"7",,terminal_output +11248,11179531,"TERMINAL",0,0,"8",,terminal_output +11249,11180510,"TERMINAL",0,0,"30",,terminal_output +11250,11181509,"TERMINAL",0,0,"1",,terminal_output +11251,11182564,"TERMINAL",0,0,"2",,terminal_output +11252,11183686,"TERMINAL",0,0,"3",,terminal_output +11253,11184502,"TERMINAL",0,0,"4",,terminal_output +11254,11185542,"TERMINAL",0,0,"5",,terminal_output +11255,11186574,"TERMINAL",0,0,"6",,terminal_output +11256,11187568,"TERMINAL",0,0,"7",,terminal_output +11257,11188641,"TERMINAL",0,0,"8",,terminal_output +11258,11188942,"TERMINAL",0,0,"watch",,terminal_focus +11259,11189641,"TERMINAL",0,0,"9",,terminal_output +11260,11190434,"TERMINAL",0,0,"",,terminal_focus +11261,11190588,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 16:32:40 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 14 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated: 24 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 5 nodes idle\rPartition accelerated-h200:\t 5 nodes idle",,terminal_output +11262,11191607,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 16:32:41 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 14 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated: 24 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 5 nodes idle\rPartition accelerated-h200:\t 5 nodes idle",,terminal_output +11263,11192522,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command +11264,11192546,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +11265,11192575,"TERMINAL",0,0,"queue",,terminal_command +11266,11192668,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 16:32:42 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469360 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 19:16:47\t 1 hkn07363466287 accelerat train_la tum_cte0 R 19:16:47\t 1 hkn07363469458 accelerat train_dy tum_cte0 R\t0:26\t 8 hkn[0703,0706-0707,0711-0715]",,terminal_output +11267,11192703,"TERMINAL",0,0,"2\t",,terminal_output +11268,11193677,"TERMINAL",0,0,"3887",,terminal_output +11269,11193683,"TERMINAL",0,0,"35",,terminal_output +11270,11194705,"TERMINAL",0,0,"4998",,terminal_output +11271,11194721,"TERMINAL",0,0,"4\t",,terminal_output +11272,11194999,"TERMINAL",0,0,"bash",,terminal_focus +11273,11195805,"TERMINAL",0,0,"550509",,terminal_output +11274,11195806,"TERMINAL",0,0,"5\t",,terminal_output +11275,11196819,"TERMINAL",0,0,"61130",,terminal_output +11276,11196820,"TERMINAL",0,0,"6\t",,terminal_output +11277,11196934,"TERMINAL",0,0,"tail -f train_dynamics_maskgit_8_node_chunked_3469458.log",,terminal_command +11278,11197000,"TERMINAL",0,0,"]633;CSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0703,0706-0707,0711-0715]\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +11279,11197823,"TERMINAL",0,0,"7221",,terminal_output +11280,11197830,"TERMINAL",0,0,"7\t",,terminal_output +11281,11198906,"TERMINAL",0,0,"8332",,terminal_output +11282,11198908,"TERMINAL",0,0,"8\t",,terminal_output +11283,11200155,"TERMINAL",0,0,"9443",,terminal_output +11284,11200155,"TERMINAL",0,0,"9\t",,terminal_output +11285,11200929,"TERMINAL",0,0,"50554",,terminal_output +11286,11200955,"TERMINAL",0,0,"50\t",,terminal_output +11287,11202029,"TERMINAL",0,0,"1665",,terminal_output +11288,11202029,"TERMINAL",0,0,"1\t",,terminal_output +11289,11203085,"TERMINAL",0,0,"2776",,terminal_output +11290,11203086,"TERMINAL",0,0,"2\t",,terminal_output +11291,11204076,"TERMINAL",0,0,"3887",,terminal_output +11292,11204077,"TERMINAL",0,0,"3\t",,terminal_output +11293,11205249,"TERMINAL",0,0,"4998",,terminal_output +11294,11205249,"TERMINAL",0,0,"4\t",,terminal_output +11295,11206133,"TERMINAL",0,0,"57:007:009",,terminal_output +11296,11206143,"TERMINAL",0,0,"5\t",,terminal_output +11297,11206929,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +11298,11207175,"TERMINAL",0,0,"61140",,terminal_output +11299,11207176,"TERMINAL",0,0,"6\t",,terminal_output +11300,11207946,"TERMINAL",0,0,"wandb: creating run\r\nwandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_163256-3469458\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-maskgit-8-node-chunked-data-3469458\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/3469458\r\n",,terminal_output +11301,11208219,"TERMINAL",0,0,"7221",,terminal_output +11302,11208263,"TERMINAL",0,0,"7\t",,terminal_output +11303,11209256,"TERMINAL",0,0,"8\t",,terminal_output +11304,11209279,"TERMINAL",0,0,"8332",,terminal_output +11305,11210387,"TERMINAL",0,0,"9\t",,terminal_output +11306,11210387,"TERMINAL",0,0,"9554",,terminal_output +11307,11211496,"TERMINAL",0,0,"3:01\t",,terminal_output +11308,11211497,"TERMINAL",0,0,"3:01665",,terminal_output +11309,11212476,"TERMINAL",0,0,"2\t",,terminal_output +11310,11212476,"TERMINAL",0,0,"2776",,terminal_output +11311,11213493,"TERMINAL",0,0,"3\t",,terminal_output +11312,11213497,"TERMINAL",0,0,"3887",,terminal_output +11313,11214448,"TERMINAL",0,0,"4\t",,terminal_output +11314,11214488,"TERMINAL",0,0,"4998",,terminal_output +11315,11215610,"TERMINAL",0,0,"5\t",,terminal_output +11316,11215610,"TERMINAL",0,0,"510109",,terminal_output +11317,11216520,"TERMINAL",0,0,"6\t",,terminal_output +11318,11216537,"TERMINAL",0,0,"61150",,terminal_output +11319,11217557,"TERMINAL",0,0,"7\t",,terminal_output +11320,11217579,"TERMINAL",0,0,"7221",,terminal_output +11321,11218608,"TERMINAL",0,0,"8\t",,terminal_output +11322,11218620,"TERMINAL",0,0,"8332",,terminal_output +11323,11219641,"TERMINAL",0,0,"9\t",,terminal_output +11324,11219673,"TERMINAL",0,0,"9443",,terminal_output +11325,11220674,"TERMINAL",0,0,"10\t",,terminal_output +11326,11220706,"TERMINAL",0,0,"10554",,terminal_output +11327,11221713,"TERMINAL",0,0,"1\t",,terminal_output +11328,11221746,"TERMINAL",0,0,"1665",,terminal_output +11329,11222754,"TERMINAL",0,0,"2\t",,terminal_output +11330,11222786,"TERMINAL",0,0,"2776",,terminal_output +11331,11223792,"TERMINAL",0,0,"3\t",,terminal_output +11332,11223829,"TERMINAL",0,0,"3887",,terminal_output +11333,11224864,"TERMINAL",0,0,"4\t",,terminal_output +11334,11224891,"TERMINAL",0,0,"4998",,terminal_output +11335,11225874,"TERMINAL",0,0,"5\t",,terminal_output +11336,11225934,"TERMINAL",0,0,"520209",,terminal_output +11337,11226904,"TERMINAL",0,0,"6\t",,terminal_output +11338,11226971,"TERMINAL",0,0,"6111:00",,terminal_output +11339,11227951,"TERMINAL",0,0,"7\t",,terminal_output +11340,11228005,"TERMINAL",0,0,"7221",,terminal_output +11341,11228978,"TERMINAL",0,0,"8\t",,terminal_output +11342,11229074,"TERMINAL",0,0,"8332",,terminal_output +11343,11230022,"TERMINAL",0,0,"9\t",,terminal_output +11344,11230103,"TERMINAL",0,0,"9443",,terminal_output +11345,11231060,"TERMINAL",0,0,"20\t",,terminal_output +11346,11231123,"TERMINAL",0,0,"20554",,terminal_output +11347,11232097,"TERMINAL",0,0,"1\t",,terminal_output +11348,11232202,"TERMINAL",0,0,"1665",,terminal_output +11349,11233132,"TERMINAL",0,0,"2\t",,terminal_output +11350,11233192,"TERMINAL",0,0,"2776",,terminal_output +11351,11234165,"TERMINAL",0,0,"3\t",,terminal_output +11352,11234224,"TERMINAL",0,0,"3887",,terminal_output +11353,11235208,"TERMINAL",0,0,"4\t",,terminal_output +11354,11235265,"TERMINAL",0,0,"4998",,terminal_output +11355,11236333,"TERMINAL",0,0,"5\t",,terminal_output +11356,11236333,"TERMINAL",0,0,"5313110",,terminal_output +11357,11237288,"TERMINAL",0,0,"6\t",,terminal_output +11358,11237405,"TERMINAL",0,0,"7221",,terminal_output +11359,11238396,"TERMINAL",0,0,"8\t",,terminal_output +11360,11238441,"TERMINAL",0,0,"8332",,terminal_output +11361,11239379,"TERMINAL",0,0,"9\t",,terminal_output +11362,11239425,"TERMINAL",0,0,"9443",,terminal_output +11363,11240392,"TERMINAL",0,0,"30\t",,terminal_output +11364,11240504,"TERMINAL",0,0,"30554",,terminal_output +11365,11241509,"TERMINAL",0,0,"1\t",,terminal_output +11366,11241533,"TERMINAL",0,0,"1665",,terminal_output +11367,11242580,"TERMINAL",0,0,"2\t",,terminal_output +11368,11242581,"TERMINAL",0,0,"2776",,terminal_output +11369,11243501,"TERMINAL",0,0,"3\t",,terminal_output +11370,11243644,"TERMINAL",0,0,"3887",,terminal_output +11371,11244532,"TERMINAL",0,0,"4\t",,terminal_output +11372,11244674,"TERMINAL",0,0,"4998",,terminal_output +11373,11247259,"TERMINAL",0,0,"5414120",,terminal_output +11374,11247259,"TERMINAL",0,0,"54",,terminal_output +11375,11248518,"TERMINAL",0,0,"7332",,terminal_output +11376,11248519,"TERMINAL",0,0,"7\t",,terminal_output +11377,11249519,"TERMINAL",0,0,"9443",,terminal_output +11378,11249553,"TERMINAL",0,0,"9\t",,terminal_output +11379,11250555,"TERMINAL",0,0,"40\t",,terminal_output +11380,11250562,"TERMINAL",0,0,"40554",,terminal_output +11381,11251600,"TERMINAL",0,0,"1\t",,terminal_output +11382,11251601,"TERMINAL",0,0,"1665",,terminal_output +11383,11252627,"TERMINAL",0,0,"2\t",,terminal_output +11384,11252667,"TERMINAL",0,0,"2776",,terminal_output +11385,11253674,"TERMINAL",0,0,"3\t",,terminal_output +11386,11253674,"TERMINAL",0,0,"3887",,terminal_output +11387,11254867,"TERMINAL",0,0,"4\t",,terminal_output +11388,11254885,"TERMINAL",0,0,"4998",,terminal_output +11389,11255842,"TERMINAL",0,0,"5\t",,terminal_output +11390,11255843,"TERMINAL",0,0,"550509",,terminal_output +11391,11256898,"TERMINAL",0,0,"6\t",,terminal_output +11392,11256899,"TERMINAL",0,0,"61130",,terminal_output +11393,11257926,"TERMINAL",0,0,"7\t",,terminal_output +11394,11257927,"TERMINAL",0,0,"7221",,terminal_output +11395,11258970,"TERMINAL",0,0,"8\t",,terminal_output +11396,11258970,"TERMINAL",0,0,"8332",,terminal_output +11397,11259887,"TERMINAL",0,0,"9\t",,terminal_output +11398,11259929,"TERMINAL",0,0,"9443",,terminal_output +11399,11261006,"TERMINAL",0,0,"50\t",,terminal_output +11400,11261007,"TERMINAL",0,0,"50554",,terminal_output +11401,11262056,"TERMINAL",0,0,"1\t",,terminal_output +11402,11262056,"TERMINAL",0,0,"1665",,terminal_output +11403,11263004,"TERMINAL",0,0,"2\t",,terminal_output +11404,11263019,"TERMINAL",0,0,"2776",,terminal_output +11405,11264083,"TERMINAL",0,0,"3\t",,terminal_output +11406,11264083,"TERMINAL",0,0,"3887",,terminal_output +11407,11265066,"TERMINAL",0,0,"4\t",,terminal_output +11408,11265119,"TERMINAL",0,0,"4998",,terminal_output +11409,11266106,"TERMINAL",0,0,"5\t",,terminal_output +11410,11266147,"TERMINAL",0,0,"58:008:009",,terminal_output +11411,11267144,"TERMINAL",0,0,"6\t",,terminal_output +11412,11267186,"TERMINAL",0,0,"61140",,terminal_output +11413,11268288,"TERMINAL",0,0,"7\t",,terminal_output +11414,11268289,"TERMINAL",0,0,"7221",,terminal_output +11415,11269308,"TERMINAL",0,0,"8\t",,terminal_output +11416,11269309,"TERMINAL",0,0,"8332",,terminal_output +11417,11270254,"TERMINAL",0,0,"9\t",,terminal_output +11418,11270300,"TERMINAL",0,0,"9554",,terminal_output +11419,11271348,"TERMINAL",0,0,"4:00\t",,terminal_output +11420,11271358,"TERMINAL",0,0,"4:01665",,terminal_output +11421,11272326,"TERMINAL",0,0,"2\t",,terminal_output +11422,11272388,"TERMINAL",0,0,"2776",,terminal_output +11423,11273405,"TERMINAL",0,0,"3\t",,terminal_output +11424,11273417,"TERMINAL",0,0,"3887",,terminal_output +11425,11274426,"TERMINAL",0,0,"4\t",,terminal_output +11426,11274455,"TERMINAL",0,0,"4998",,terminal_output +11427,11275456,"TERMINAL",0,0,"5\t",,terminal_output +11428,11275561,"TERMINAL",0,0,"510109",,terminal_output +11429,11276504,"TERMINAL",0,0,"6\t",,terminal_output +11430,11276531,"TERMINAL",0,0,"61150",,terminal_output +11431,11277516,"TERMINAL",0,0,"7\t",,terminal_output +11432,11277573,"TERMINAL",0,0,"7221",,terminal_output +11433,11278555,"TERMINAL",0,0,"8\t",,terminal_output +11434,11278610,"TERMINAL",0,0,"8332",,terminal_output +11435,11279592,"TERMINAL",0,0,"9\t",,terminal_output +11436,11279689,"TERMINAL",0,0,"9443",,terminal_output +11437,11280630,"TERMINAL",0,0,"10\t",,terminal_output +11438,11280732,"TERMINAL",0,0,"10554",,terminal_output +11439,11281664,"TERMINAL",0,0,"1\t",,terminal_output +11440,11281772,"TERMINAL",0,0,"1665",,terminal_output +11441,11282704,"TERMINAL",0,0,"2\t",,terminal_output +11442,11282789,"TERMINAL",0,0,"2776",,terminal_output +11443,11283757,"TERMINAL",0,0,"3\t",,terminal_output +11444,11283831,"TERMINAL",0,0,"3887",,terminal_output +11445,11284776,"TERMINAL",0,0,"4\t",,terminal_output +11446,11284871,"TERMINAL",0,0,"4998",,terminal_output +11447,11285818,"TERMINAL",0,0,"5\t",,terminal_output +11448,11285916,"TERMINAL",0,0,"520209",,terminal_output +11449,11287004,"TERMINAL",0,0,"6\t",,terminal_output +11450,11287005,"TERMINAL",0,0,"6112:00",,terminal_output +11451,11287898,"TERMINAL",0,0,"7\t",,terminal_output +11452,11288038,"TERMINAL",0,0,"7221",,terminal_output +11453,11288972,"TERMINAL",0,0,"8\t",,terminal_output +11454,11289068,"TERMINAL",0,0,"8332",,terminal_output +11455,11290092,"TERMINAL",0,0,"9\t",,terminal_output +11456,11290100,"TERMINAL",0,0,"9443",,terminal_output +11457,11291116,"TERMINAL",0,0,"20\t",,terminal_output +11458,11291140,"TERMINAL",0,0,"20554",,terminal_output +11459,11292054,"TERMINAL",0,0,"1\t",,terminal_output +11460,11292174,"TERMINAL",0,0,"1665",,terminal_output +11461,11293085,"TERMINAL",0,0,"2\t",,terminal_output +11462,11293222,"TERMINAL",0,0,"2776",,terminal_output +11463,11294120,"TERMINAL",0,0,"3\t",,terminal_output +11464,11294259,"TERMINAL",0,0,"3887",,terminal_output +11465,11295165,"TERMINAL",0,0,"4\t",,terminal_output +11466,11295302,"TERMINAL",0,0,"430309",,terminal_output +11467,11296202,"TERMINAL",0,0,"5\t",,terminal_output +11468,11296348,"TERMINAL",0,0,"61110",,terminal_output +11469,11297237,"TERMINAL",0,0,"6\t",,terminal_output +11470,11297375,"TERMINAL",0,0,"7221",,terminal_output +11471,11298291,"TERMINAL",0,0,"7\t",,terminal_output +11472,11298401,"TERMINAL",0,0,"8332",,terminal_output +11473,11299374,"TERMINAL",0,0,"9\t",,terminal_output +11474,11299438,"TERMINAL",0,0,"9443",,terminal_output +11475,11300375,"TERMINAL",0,0,"30\t",,terminal_output +11476,11300554,"TERMINAL",0,0,"30554",,terminal_output +11477,11301387,"TERMINAL",0,0,"1\t",,terminal_output +11478,11301549,"TERMINAL",0,0,"1665",,terminal_output +11479,11302415,"TERMINAL",0,0,"2\t",,terminal_output +11480,11302558,"TERMINAL",0,0,"2776",,terminal_output +11481,11303454,"TERMINAL",0,0,"3\t",,terminal_output +11482,11303589,"TERMINAL",0,0,"3887",,terminal_output +11483,11304541,"TERMINAL",0,0,"4\t",,terminal_output +11484,11304636,"TERMINAL",0,0,"4998",,terminal_output +11485,11305557,"TERMINAL",0,0,"5\t",,terminal_output +11486,11305694,"TERMINAL",0,0,"540409",,terminal_output +11487,11306603,"TERMINAL",0,0,"6\t",,terminal_output +11488,11306741,"TERMINAL",0,0,"61120",,terminal_output +11489,11307607,"TERMINAL",0,0,"7\t",,terminal_output +11490,11307756,"TERMINAL",0,0,"7221",,terminal_output +11491,11307971,"TERMINAL",0,0,"WARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\nWARNING:absl:Dropping 18 examples of 89394 examples (shard 32).\r\n",,terminal_output +11492,11308641,"TERMINAL",0,0,"8\t",,terminal_output +11493,11308787,"TERMINAL",0,0,"8332",,terminal_output +11494,11308992,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +11495,11309675,"TERMINAL",0,0,"9\t",,terminal_output +11496,11309820,"TERMINAL",0,0,"9443",,terminal_output +11497,11310708,"TERMINAL",0,0,"40\t",,terminal_output +11498,11310855,"TERMINAL",0,0,"40554",,terminal_output +11499,11311749,"TERMINAL",0,0,"1\t",,terminal_output +11500,11311899,"TERMINAL",0,0,"1665",,terminal_output +11501,11312062,"TERMINAL",0,0,"Running on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\nRunning on 32 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\n",,terminal_output +11502,11312787,"TERMINAL",0,0,"2\t",,terminal_output +11503,11312932,"TERMINAL",0,0,"2776",,terminal_output +11504,11313817,"TERMINAL",0,0,"3\t",,terminal_output +11505,11313980,"TERMINAL",0,0,"3887",,terminal_output +11506,11314854,"TERMINAL",0,0,"4\t",,terminal_output +11507,11315024,"TERMINAL",0,0,"4998",,terminal_output +11508,11315898,"TERMINAL",0,0,"5\t",,terminal_output +11509,11316053,"TERMINAL",0,0,"550509",,terminal_output +11510,11317020,"TERMINAL",0,0,"6\t",,terminal_output +11511,11317116,"TERMINAL",0,0,"61130",,terminal_output +11512,11317988,"TERMINAL",0,0,"7\t",,terminal_output +11513,11318152,"TERMINAL",0,0,"7221",,terminal_output +11514,11319028,"TERMINAL",0,0,"8\t",,terminal_output +11515,11319169,"TERMINAL",0,0,"8332",,terminal_output +11516,11320039,"TERMINAL",0,0,"9\t",,terminal_output +11517,11320202,"TERMINAL",0,0,"9443",,terminal_output +11518,11321115,"TERMINAL",0,0,"50\t",,terminal_output +11519,11321255,"TERMINAL",0,0,"50554",,terminal_output +11520,11322114,"TERMINAL",0,0,"1\t",,terminal_output +11521,11322300,"TERMINAL",0,0,"1776",,terminal_output +11522,11323157,"TERMINAL",0,0,"2\t",,terminal_output +11523,11323324,"TERMINAL",0,0,"3887",,terminal_output +11524,11324193,"TERMINAL",0,0,"3\t",,terminal_output +11525,11324363,"TERMINAL",0,0,"4998",,terminal_output +11526,11325231,"TERMINAL",0,0,"4\t",,terminal_output +11527,11325398,"TERMINAL",0,0,"59:009:009",,terminal_output +11528,11326271,"TERMINAL",0,0,"5\t",,terminal_output +11529,11326432,"TERMINAL",0,0,"61140",,terminal_output +11530,11327367,"TERMINAL",0,0,"7\t",,terminal_output +11531,11327537,"TERMINAL",0,0,"7221",,terminal_output +11532,11328391,"TERMINAL",0,0,"8\t",,terminal_output +11533,11328538,"TERMINAL",0,0,"8332",,terminal_output +11534,11329379,"TERMINAL",0,0,"9\t",,terminal_output +11535,11329553,"TERMINAL",0,0,"9443",,terminal_output +11536,11330415,"TERMINAL",0,0,"5:00\t",,terminal_output +11537,11330595,"TERMINAL",0,0,"5:00554",,terminal_output +11538,11331565,"TERMINAL",0,0,"1\t",,terminal_output +11539,11331624,"TERMINAL",0,0,"1665",,terminal_output +11540,11332592,"TERMINAL",0,0,"2\t",,terminal_output +11541,11332662,"TERMINAL",0,0,"2776",,terminal_output +11542,11333525,"TERMINAL",0,0,"3\t",,terminal_output +11543,11333703,"TERMINAL",0,0,"3887",,terminal_output +11544,11334115,"TERMINAL",0,0,"2025-09-05 16:35:03.594246: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11545,11334638,"TERMINAL",0,0,"4\t",,terminal_output +11546,11334780,"TERMINAL",0,0,"4998",,terminal_output +11547,11335014,"TERMINAL",0,0,"2025-09-05 16:35:04.129837: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11548,11335606,"TERMINAL",0,0,"5\t",,terminal_output +11549,11335780,"TERMINAL",0,0,"510109",,terminal_output +11550,11336632,"TERMINAL",0,0,"6\t",,terminal_output +11551,11336820,"TERMINAL",0,0,"61150",,terminal_output +11552,11337669,"TERMINAL",0,0,"7\t",,terminal_output +11553,11337858,"TERMINAL",0,0,"7221",,terminal_output +11554,11338022,"TERMINAL",0,0,"2025-09-05 16:35:07.269273: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:35:07.374492: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11555,11338702,"TERMINAL",0,0,"85",,terminal_output +11556,11338902,"TERMINAL",0,0,"8332",,terminal_output +11557,11339740,"TERMINAL",0,0,"9\t",,terminal_output +11558,11339942,"TERMINAL",0,0,"9443",,terminal_output +11559,11340048,"TERMINAL",0,0,"2025-09-05 16:35:08.928597: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:35:09.299506: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:35:09.299556: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +11560,11340781,"TERMINAL",0,0,"10\t",,terminal_output +11561,11340991,"TERMINAL",0,0,"10554",,terminal_output +11562,11341907,"TERMINAL",0,0,"1\t",,terminal_output +11563,11342025,"TERMINAL",0,0,"1665",,terminal_output +11564,11342858,"TERMINAL",0,0,"2\t",,terminal_output +11565,11343066,"TERMINAL",0,0,"2776",,terminal_output +11566,11343956,"TERMINAL",0,0,"3\t",,terminal_output +11567,11344107,"TERMINAL",0,0,"3887",,terminal_output +11568,11344985,"TERMINAL",0,0,"4\t",,terminal_output +11569,11345151,"TERMINAL",0,0,"4998",,terminal_output +11570,11345966,"TERMINAL",0,0,"5\t",,terminal_output +11571,11346187,"TERMINAL",0,0,"520209",,terminal_output +11572,11347028,"TERMINAL",0,0,"6\t",,terminal_output +11573,11347232,"TERMINAL",0,0,"6113:00",,terminal_output +11574,11348039,"TERMINAL",0,0,"7\t",,terminal_output +11575,11348274,"TERMINAL",0,0,"7332",,terminal_output +11576,11349087,"TERMINAL",0,0,"86",,terminal_output +11577,11349308,"TERMINAL",0,0,"9443",,terminal_output +11578,11350208,"TERMINAL",0,0,"9\t",,terminal_output +11579,11350351,"TERMINAL",0,0,"20554",,terminal_output +11580,11351221,"TERMINAL",0,0,"20\t",,terminal_output +11581,11351399,"TERMINAL",0,0,"1665",,terminal_output +11582,11352267,"TERMINAL",0,0,"1\t",,terminal_output +11583,11352439,"TERMINAL",0,0,"2776",,terminal_output +11584,11353288,"TERMINAL",0,0,"2\t",,terminal_output +11585,11353479,"TERMINAL",0,0,"3887",,terminal_output +11586,11354266,"TERMINAL",0,0,"33",,terminal_output +11587,11354558,"TERMINAL",0,0,"4998",,terminal_output +11588,11355307,"TERMINAL",0,0,"4\t",,terminal_output +11589,11355583,"TERMINAL",0,0,"530309",,terminal_output +11590,11356461,"TERMINAL",0,0,"6\t",,terminal_output +11591,11356613,"TERMINAL",0,0,"61110",,terminal_output +11592,11357384,"TERMINAL",0,0,"7\t",,terminal_output +11593,11357641,"TERMINAL",0,0,"7221",,terminal_output +11594,11358423,"TERMINAL",0,0,"8\t",,terminal_output +11595,11358701,"TERMINAL",0,0,"8332",,terminal_output +11596,11359457,"TERMINAL",0,0,"9\t",,terminal_output +11597,11359722,"TERMINAL",0,0,"9443",,terminal_output +11598,11360490,"TERMINAL",0,0,"30\t",,terminal_output +11599,11360776,"TERMINAL",0,0,"30554",,terminal_output +11600,11361568,"TERMINAL",0,0,"1\t",,terminal_output +11601,11361798,"TERMINAL",0,0,"1665",,terminal_output +11602,11362595,"TERMINAL",0,0,"2\t",,terminal_output +11603,11362836,"TERMINAL",0,0,"2776",,terminal_output +11604,11363600,"TERMINAL",0,0,"3\t",,terminal_output +11605,11363887,"TERMINAL",0,0,"3887",,terminal_output +11606,11364645,"TERMINAL",0,0,"4\t",,terminal_output +11607,11364949,"TERMINAL",0,0,"4998",,terminal_output +11608,11365677,"TERMINAL",0,0,"5\t",,terminal_output +11609,11365949,"TERMINAL",0,0,"540409",,terminal_output +11610,11366718,"TERMINAL",0,0,"6\t",,terminal_output +11611,11366988,"TERMINAL",0,0,"61120",,terminal_output +11612,11367759,"TERMINAL",0,0,"7\t",,terminal_output +11613,11368133,"TERMINAL",0,0,"7221",,terminal_output +11614,11370581,"TERMINAL",0,0,"8554",,terminal_output +11615,11370581,"TERMINAL",0,0,"8\t",,terminal_output +11616,11371727,"TERMINAL",0,0,"41665",,terminal_output +11617,11371771,"TERMINAL",0,0,"415",,terminal_output +11618,11372766,"TERMINAL",0,0,"2776",,terminal_output +11619,11372773,"TERMINAL",0,0,"2\t",,terminal_output +11620,11373932,"TERMINAL",0,0,"3\t",,terminal_output +11621,11373934,"TERMINAL",0,0,"3887",,terminal_output +11622,11374886,"TERMINAL",0,0,"4\t",,terminal_output +11623,11374887,"TERMINAL",0,0,"4998",,terminal_output +11624,11375891,"TERMINAL",0,0,"5\t",,terminal_output +11625,11375905,"TERMINAL",0,0,"550509",,terminal_output +11626,11377026,"TERMINAL",0,0,"66",,terminal_output +11627,11377031,"TERMINAL",0,0,"61130",,terminal_output +11628,11377971,"TERMINAL",0,0,"7\t",,terminal_output +11629,11377993,"TERMINAL",0,0,"7221",,terminal_output +11630,11379075,"TERMINAL",0,0,"8\t",,terminal_output +11631,11379079,"TERMINAL",0,0,"8332",,terminal_output +11632,11380051,"TERMINAL",0,0,"9\t",,terminal_output +11633,11380076,"TERMINAL",0,0,"9443",,terminal_output +11634,11381133,"TERMINAL",0,0,"50\t",,terminal_output +11635,11381134,"TERMINAL",0,0,"50554",,terminal_output +11636,11382153,"TERMINAL",0,0,"1\t",,terminal_output +11637,11382170,"TERMINAL",0,0,"1665",,terminal_output +11638,11383176,"TERMINAL",0,0,"2\t",,terminal_output +11639,11383199,"TERMINAL",0,0,"2776",,terminal_output +11640,11384215,"TERMINAL",0,0,"3\t",,terminal_output +11641,11384241,"TERMINAL",0,0,"3887",,terminal_output +11642,11385257,"TERMINAL",0,0,"47",,terminal_output +11643,11385282,"TERMINAL",0,0,"420:0020:009",,terminal_output +11644,11386310,"TERMINAL",0,0,"5\t",,terminal_output +11645,11386332,"TERMINAL",0,0,"61140",,terminal_output +11646,11387369,"TERMINAL",0,0,"7\t",,terminal_output +11647,11387370,"TERMINAL",0,0,"7221",,terminal_output +11648,11388378,"TERMINAL",0,0,"8\t",,terminal_output +11649,11388412,"TERMINAL",0,0,"8332",,terminal_output +11650,11389416,"TERMINAL",0,0,"9\t",,terminal_output +11651,11389448,"TERMINAL",0,0,"9443",,terminal_output +11652,11390500,"TERMINAL",0,0,"6:00\t",,terminal_output +11653,11390584,"TERMINAL",0,0,"6:00554",,terminal_output +11654,11391499,"TERMINAL",0,0,"18",,terminal_output +11655,11391541,"TERMINAL",0,0,"1665",,terminal_output +11656,11392574,"TERMINAL",0,0,"2\t",,terminal_output +11657,11392574,"TERMINAL",0,0,"2776",,terminal_output +11658,11393575,"TERMINAL",0,0,"3\t",,terminal_output +11659,11393607,"TERMINAL",0,0,"3887",,terminal_output +11660,11394616,"TERMINAL",0,0,"4\t",,terminal_output +11661,11394650,"TERMINAL",0,0,"4998",,terminal_output +11662,11395653,"TERMINAL",0,0,"5\t",,terminal_output +11663,11395688,"TERMINAL",0,0,"510109",,terminal_output +11664,11396694,"TERMINAL",0,0,"6\t",,terminal_output +11665,11396727,"TERMINAL",0,0,"61150",,terminal_output +11666,11397722,"TERMINAL",0,0,"7\t",,terminal_output +11667,11397763,"TERMINAL",0,0,"7221",,terminal_output +11668,11398768,"TERMINAL",0,0,"89",,terminal_output +11669,11398815,"TERMINAL",0,0,"8332",,terminal_output +11670,11399798,"TERMINAL",0,0,"9\t",,terminal_output +11671,11399833,"TERMINAL",0,0,"9443",,terminal_output +11672,11400835,"TERMINAL",0,0,"10\t",,terminal_output +11673,11400876,"TERMINAL",0,0,"10554",,terminal_output +11674,11401871,"TERMINAL",0,0,"1\t",,terminal_output +11675,11401914,"TERMINAL",0,0,"1665",,terminal_output +11676,11402916,"TERMINAL",0,0,"2\t",,terminal_output +11677,11402945,"TERMINAL",0,0,"2776",,terminal_output +11678,11404066,"TERMINAL",0,0,"3\t",,terminal_output +11679,11404066,"TERMINAL",0,0,"3887",,terminal_output +11680,11405091,"TERMINAL",0,0,"4\t",,terminal_output +11681,11405095,"TERMINAL",0,0,"4998",,terminal_output +11682,11406063,"TERMINAL",0,0,"5\t",,terminal_output +11683,11406084,"TERMINAL",0,0,"520209",,terminal_output +11684,11406225,"TERMINAL",0,0,"watch",,terminal_focus +11685,11407101,"TERMINAL",0,0,"6\t",,terminal_output +11686,11407142,"TERMINAL",0,0,"6114:00",,terminal_output +11687,11407854,"TERMINAL",0,0,"watch",,terminal_focus +11688,11408096,"TERMINAL",0,0,"7\t",,terminal_output +11689,11408138,"TERMINAL",0,0,"7221",,terminal_output +11690,11409187,"TERMINAL",0,0,"8\t",,terminal_output +11691,11409188,"TERMINAL",0,0,"8332",,terminal_output +11692,11410213,"TERMINAL",0,0,"9\t",,terminal_output +11693,11410213,"TERMINAL",0,0,"9443",,terminal_output +11694,11410687,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",shellscript,tab +11695,11411219,"TERMINAL",0,0,"20\t",,terminal_output +11696,11411270,"TERMINAL",0,0,"20554",,terminal_output +11697,11412163,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset copy.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:30:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_8_node_chunked\n#SBATCH --requeue\n#SBATCH --reservation=llmtum\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --num_steps 1000 \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-chunked-data-$slurm_job_id \\n --tags dynamics maskgit 8-node chunked \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +11698,11412380,"TERMINAL",0,0,"120",,terminal_output +11699,11412380,"TERMINAL",0,0,"1776",,terminal_output +11700,11413404,"TERMINAL",0,0,"2\t",,terminal_output +11701,11413404,"TERMINAL",0,0,"3887",,terminal_output +11702,11414420,"TERMINAL",0,0,"4\t",,terminal_output +11703,11414420,"TERMINAL",0,0,"4998",,terminal_output +11704,11415426,"TERMINAL",0,0,"5\t",,terminal_output +11705,11415431,"TERMINAL",0,0,"530309",,terminal_output +11706,11416468,"TERMINAL",0,0,"6 9",,terminal_output +11707,11416468,"TERMINAL",0,0,"61110",,terminal_output +11708,11417479,"TERMINAL",0,0,"7\t",,terminal_output +11709,11417480,"TERMINAL",0,0,"7221",,terminal_output +11710,11418481,"TERMINAL",0,0,"8\t",,terminal_output +11711,11418515,"TERMINAL",0,0,"8332",,terminal_output +11712,11419531,"TERMINAL",0,0,"91",,terminal_output +11713,11419548,"TERMINAL",0,0,"9443",,terminal_output +11714,11420311,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:30:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_8_node_chunked\n#SBATCH --requeue\n#SBATCH --reservation=llmtum\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --num_steps 1000 \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-chunked-data-$slurm_job_id \\n --tags dynamics maskgit 8-node chunked \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +11715,11420582,"TERMINAL",0,0,"30\t",,terminal_output +11716,11420621,"TERMINAL",0,0,"30554",,terminal_output +11717,11421616,"TERMINAL",0,0,"1\t",,terminal_output +11718,11421648,"TERMINAL",0,0,"1665",,terminal_output +11719,11422636,"TERMINAL",0,0,"2\t",,terminal_output +11720,11422676,"TERMINAL",0,0,"2776",,terminal_output +11721,11423685,"TERMINAL",0,0,"3\t",,terminal_output +11722,11423700,"TERMINAL",0,0,"3887",,terminal_output +11723,11424692,"TERMINAL",0,0,"4\t",,terminal_output +11724,11424731,"TERMINAL",0,0,"4998",,terminal_output +11725,11425769,"TERMINAL",0,0,"5\t",,terminal_output +11726,11425775,"TERMINAL",0,0,"540409",,terminal_output +11727,11426798,"TERMINAL",0,0,"6\t",,terminal_output +11728,11426858,"TERMINAL",0,0,"61120",,terminal_output +11729,11427804,"TERMINAL",0,0,"7\t",,terminal_output +11730,11427849,"TERMINAL",0,0,"7221",,terminal_output +11731,11428072,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,0,"",shellscript,selection_mouse +11732,11428173,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,2,"/h",shellscript,selection_mouse +11733,11428174,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,5,"/hkfs",shellscript,selection_mouse +11734,11428231,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,14,"/hkfs/work/wor",shellscript,selection_mouse +11735,11428231,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1295,19,"\narray_records_dir=",shellscript,selection_mouse +11736,11428531,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,91,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrec",shellscript,selection_mouse +11737,11428617,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,94,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecord",shellscript,selection_mouse +11738,11428618,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,95,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords",shellscript,selection_mouse +11739,11428618,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,96,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_",shellscript,selection_mouse +11740,11428618,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,97,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_c",shellscript,selection_mouse +11741,11428619,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,98,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_ch",shellscript,selection_mouse +11742,11428665,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,99,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chu",shellscript,selection_mouse +11743,11428710,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,100,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chun",shellscript,selection_mouse +11744,11428751,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,101,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunk",shellscript,selection_mouse +11745,11428755,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,104,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n",shellscript,selection_mouse +11746,11428877,"TERMINAL",0,0,"8\t",,terminal_output +11747,11428919,"TERMINAL",0,0,"8332",,terminal_output +11748,11429245,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,103,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked",shellscript,selection_mouse +11749,11429924,"TERMINAL",0,0,"9\t",,terminal_output +11750,11429927,"TERMINAL",0,0,"9443",,terminal_output +11751,11430048,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,103,"",shellscript,content +11752,11430091,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1313,0,"",shellscript,selection_command +11753,11430436,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,0,"",shellscript,selection_command +11754,11430854,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",1314,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords/10fps_160x90",shellscript,content +11755,11430927,"TERMINAL",0,0,"402",,terminal_output +11756,11430974,"TERMINAL",0,0,"40554",,terminal_output +11757,11431973,"TERMINAL",0,0,"1\t",,terminal_output +11758,11432022,"TERMINAL",0,0,"1665",,terminal_output +11759,11433021,"TERMINAL",0,0,"2\t",,terminal_output +11760,11433057,"TERMINAL",0,0,"2776",,terminal_output +11761,11434122,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch",0,0,"",shellscript,tab +11762,11434249,"TERMINAL",0,0,"3\t",,terminal_output +11763,11434249,"TERMINAL",0,0,"3887",,terminal_output +11764,11435078,"TERMINAL",0,0,"4\t",,terminal_output +11765,11435182,"TERMINAL",0,0,"4998",,terminal_output +11766,11436132,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",0,0,"",shellscript,tab +11767,11436294,"TERMINAL",0,0,"5\t",,terminal_output +11768,11436294,"TERMINAL",0,0,"550509",,terminal_output +11769,11437215,"TERMINAL",0,0,"6\t",,terminal_output +11770,11437255,"TERMINAL",0,0,"61130",,terminal_output +11771,11437560,"TERMINAL",0,0,"tail",,terminal_focus +11772,11438249,"TERMINAL",0,0,"7\t",,terminal_output +11773,11438253,"TERMINAL",0,0,"7221",,terminal_output +11774,11438877,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",0,0,"",shellscript,tab +11775,11439240,"TERMINAL",0,0,"8\t",,terminal_output +11776,11439304,"TERMINAL",0,0,"8443",,terminal_output +11777,11440356,"TERMINAL",0,0,"9\t",,terminal_output +11778,11440435,"TERMINAL",0,0,"50554",,terminal_output +11779,11441317,"TERMINAL",0,0,"51\t",,terminal_output +11780,11441382,"TERMINAL",0,0,"1665",,terminal_output +11781,11441483,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2232,0,"",shellscript,selection_mouse +11782,11442267,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2231,0,"",shellscript,selection_command +11783,11442341,"TERMINAL",0,0,"2\t",,terminal_output +11784,11442427,"TERMINAL",0,0,"2776",,terminal_output +11785,11442571,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2231,0,"u",shellscript,content +11786,11442572,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2232,0,"",shellscript,selection_keyboard +11787,11442944,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2232,0,"n",shellscript,content +11788,11442945,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2233,0,"",shellscript,selection_keyboard +11789,11443348,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2299,0,"",shellscript,selection_command +11790,11443381,"TERMINAL",0,0,"3\t",,terminal_output +11791,11443437,"TERMINAL",0,0,"3887",,terminal_output +11792,11443658,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2298,0,"",shellscript,selection_command +11793,11443821,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2297,0,"",shellscript,selection_command +11794,11444350,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2297,0,"u",shellscript,content +11795,11444351,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2298,0,"",shellscript,selection_keyboard +11796,11444389,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2298,0,"n",shellscript,content +11797,11444389,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",2299,0,"",shellscript,selection_keyboard +11798,11444468,"TERMINAL",0,0,"4\t",,terminal_output +11799,11444509,"TERMINAL",0,0,"4998",,terminal_output +11800,11445456,"TERMINAL",0,0,"5\t",,terminal_output +11801,11445578,"TERMINAL",0,0,"51:001:009",,terminal_output +11802,11446557,"TERMINAL",0,0,"6\t",,terminal_output +11803,11446561,"TERMINAL",0,0,"61140",,terminal_output +11804,11447532,"TERMINAL",0,0,"7\t",,terminal_output +11805,11447617,"TERMINAL",0,0,"7221",,terminal_output +11806,11448676,"TERMINAL",0,0,"8\t",,terminal_output +11807,11448681,"TERMINAL",0,0,"8332",,terminal_output +11808,11449974,"TERMINAL",0,0,"9\t",,terminal_output +11809,11449974,"TERMINAL",0,0,"9443",,terminal_output +11810,11450828,"TERMINAL",0,0,"7:00\t",,terminal_output +11811,11450943,"TERMINAL",0,0,"7:00554",,terminal_output +11812,11451825,"TERMINAL",0,0,"13",,terminal_output +11813,11451901,"TERMINAL",0,0,"1665",,terminal_output +11814,11452862,"TERMINAL",0,0,"24",,terminal_output +11815,11452950,"TERMINAL",0,0,"2776",,terminal_output +11816,11453912,"TERMINAL",0,0,"3\t",,terminal_output +11817,11453998,"TERMINAL",0,0,"3887",,terminal_output +11818,11454940,"TERMINAL",0,0,"4\t",,terminal_output +11819,11455065,"TERMINAL",0,0,"4998",,terminal_output +11820,11455800,"TERMINAL",0,0,"5\t",,terminal_output +11821,11455936,"TERMINAL",0,0,"510109",,terminal_output +11822,11456937,"TERMINAL",0,0,"6\t",,terminal_output +11823,11456941,"TERMINAL",0,0,"61150",,terminal_output +11824,11457757,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",882,0,"",shellscript,selection_command +11825,11457894,"TERMINAL",0,0,"7\t",,terminal_output +11826,11457987,"TERMINAL",0,0,"7221",,terminal_output +11827,11458915,"TERMINAL",0,0,"8\t",,terminal_output +11828,11459065,"TERMINAL",0,0,"8332",,terminal_output +11829,11459189,"slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",316,0,"",shellscript,selection_command +11830,11459947,"TERMINAL",0,0,"9\t",,terminal_output +11831,11460084,"TERMINAL",0,0,"9443",,terminal_output +11832,11461115,"TERMINAL",0,0,"10\t",,terminal_output +11833,11461123,"TERMINAL",0,0,"10554",,terminal_output +11834,11462058,"TERMINAL",0,0,"1\t",,terminal_output +11835,11462160,"TERMINAL",0,0,"1665",,terminal_output +11836,11462835,"TERMINAL",0,0,"bash",,terminal_focus +11837,11463079,"TERMINAL",0,0,"2\t",,terminal_output +11838,11463204,"TERMINAL",0,0,"2776",,terminal_output +11839,11464174,"TERMINAL",0,0,"3\t",,terminal_output +11840,11464224,"TERMINAL",0,0,"3887",,terminal_output +11841,11465203,"TERMINAL",0,0,"4\t",,terminal_output +11842,11465257,"TERMINAL",0,0,"4998",,terminal_output +11843,11466182,"TERMINAL",0,0,"5\t",,terminal_output +11844,11466318,"TERMINAL",0,0,"521215:00",,terminal_output +11845,11466591,"TERMINAL",0,0,"sbatch slurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch",,terminal_command +11846,11466673,"TERMINAL",0,0,"]633;CSubmitted batch job 3469465\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +11847,11467218,"TERMINAL",0,0,"6\t",,terminal_output +11848,11467341,"TERMINAL",0,0,"\r7223469465 accelerat train_dy tum_cte0 R\t0:00\t 8 hkn[0521-0528]1",,terminal_output +11849,11468372,"TERMINAL",0,0,"715",,terminal_output +11850,11468375,"TERMINAL",0,0,"83312",,terminal_output +11851,11469373,"TERMINAL",0,0,"8\t",,terminal_output +11852,11469412,"TERMINAL",0,0,"94423",,terminal_output +11853,11470366,"TERMINAL",0,0,"20\t",,terminal_output +11854,11470455,"TERMINAL",0,0,"205534",,terminal_output +11855,11471441,"TERMINAL",0,0,"1\t",,terminal_output +11856,11471500,"TERMINAL",0,0,"16645",,terminal_output +11857,11472408,"TERMINAL",0,0,"2\t",,terminal_output +11858,11472547,"TERMINAL",0,0,"27756",,terminal_output +11859,11473446,"TERMINAL",0,0,"3\t",,terminal_output +11860,11473585,"TERMINAL",0,0,"38867",,terminal_output +11861,11474482,"TERMINAL",0,0,"4\t",,terminal_output +11862,11474621,"TERMINAL",0,0,"49978",,terminal_output +11863,11475522,"TERMINAL",0,0,"5\t",,terminal_output +11864,11475658,"TERMINAL",0,0,"5303089",,terminal_output +11865,11476591,"TERMINAL",0,0,"6\t",,terminal_output +11866,11476698,"TERMINAL",0,0,"611910",,terminal_output +11867,11477591,"TERMINAL",0,0,"7\t",,terminal_output +11868,11477739,"TERMINAL",0,0,"722101",,terminal_output +11869,11478645,"TERMINAL",0,0,"8\t",,terminal_output +11870,11478747,"TERMINAL",0,0,"83312",,terminal_output +11871,11479669,"TERMINAL",0,0,"9\t",,terminal_output +11872,11479831,"TERMINAL",0,0,"94423",,terminal_output +11873,11480714,"TERMINAL",0,0,"30\t",,terminal_output +11874,11480857,"TERMINAL",0,0,"305534",,terminal_output +11875,11481892,"TERMINAL",0,0,"tail",,terminal_focus +11876,11481965,"TERMINAL",0,0,"1\t",,terminal_output +11877,11482051,"TERMINAL",0,0,"16645",,terminal_output +11878,11482791,"TERMINAL",0,0,"2\t",,terminal_output +11879,11482964,"TERMINAL",0,0,"27756",,terminal_output +11880,11483332,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining",,terminal_output +11881,11484146,"TERMINAL",0,0,"3\t",,terminal_output +11882,11484316,"TERMINAL",0,0,"38867",,terminal_output +11883,11485166,"TERMINAL",0,0,"4\t",,terminal_output +11884,11485352,"TERMINAL",0,0,"49978",,terminal_output +11885,11486032,"TERMINAL",0,0,"clear",,terminal_command +11886,11486158,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining",,terminal_output +11887,11486271,"TERMINAL",0,0,"5\t",,terminal_output +11888,11486414,"TERMINAL",0,0,"5404089",,terminal_output +11889,11487394,"TERMINAL",0,0,"6\t",,terminal_output +11890,11487540,"TERMINAL",0,0,"611920",,terminal_output +11891,11488168,"TERMINAL",0,0,"7\t",,terminal_output +11892,11488169,"TERMINAL",0,0,"722201",,terminal_output +11893,11488999,"TERMINAL",0,0,"8\t",,terminal_output +11894,11489160,"TERMINAL",0,0,"83312",,terminal_output +11895,11490081,"TERMINAL",0,0,"9\t",,terminal_output +11896,11490170,"TERMINAL",0,0,"94423",,terminal_output +11897,11491115,"TERMINAL",0,0,"40\t",,terminal_output +11898,11491208,"TERMINAL",0,0,"405534",,terminal_output +11899,11493823,"TERMINAL",0,0,"18867",,terminal_output +11900,11493826,"TERMINAL",0,0,"119",,terminal_output +11901,11495374,"TERMINAL",0,0,"4505089",,terminal_output +11902,11495374,"TERMINAL",0,0,"44",,terminal_output +11903,11496430,"TERMINAL",0,0,"611930",,terminal_output +11904,11496430,"TERMINAL",0,0,"6\t",,terminal_output +11905,11497588,"TERMINAL",0,0,"722301",,terminal_output +11906,11497618,"TERMINAL",0,0,"7\t",,terminal_output +11907,11498581,"TERMINAL",0,0,"83312",,terminal_output +11908,11498582,"TERMINAL",0,0,"8\t",,terminal_output +11909,11499521,"TERMINAL",0,0,"94423",,terminal_output +11910,11499562,"TERMINAL",0,0,"9\t",,terminal_output +11911,11500629,"TERMINAL",0,0,"505534",,terminal_output +11912,11500637,"TERMINAL",0,0,"50\t",,terminal_output +11913,11501592,"TERMINAL",0,0,"16645",,terminal_output +11914,11501632,"TERMINAL",0,0,"1\t",,terminal_output +11915,11502683,"TERMINAL",0,0,"27756",,terminal_output +11916,11502689,"TERMINAL",0,0,"2\t",,terminal_output +11917,11503675,"TERMINAL",0,0,"38867",,terminal_output +11918,11503686,"TERMINAL",0,0,"3\t",,terminal_output +11919,11504718,"TERMINAL",0,0,"49978",,terminal_output +11920,11504725,"TERMINAL",0,0,"4\t",,terminal_output +11921,11505757,"TERMINAL",0,0,"52:002:0089",,terminal_output +11922,11505770,"TERMINAL",0,0,"5\t",,terminal_output +11923,11506794,"TERMINAL",0,0,"611940",,terminal_output +11924,11506799,"TERMINAL",0,0,"6\t",,terminal_output +11925,11507834,"TERMINAL",0,0,"722401",,terminal_output +11926,11507840,"TERMINAL",0,0,"7\t",,terminal_output +11927,11508924,"TERMINAL",0,0,"8\t",,terminal_output +11928,11508924,"TERMINAL",0,0,"83312",,terminal_output +11929,11509947,"TERMINAL",0,0,"9\t",,terminal_output +11930,11509948,"TERMINAL",0,0,"94423",,terminal_output +11931,11510974,"TERMINAL",0,0,"8:005534",,terminal_output +11932,11510974,"TERMINAL",0,0,"8:00\t",,terminal_output +11933,11512098,"TERMINAL",0,0,"1\t",,terminal_output +11934,11512100,"TERMINAL",0,0,"16645",,terminal_output +11935,11513089,"TERMINAL",0,0,"2\t",,terminal_output +11936,11513090,"TERMINAL",0,0,"27756",,terminal_output +11937,11514158,"TERMINAL",0,0,"3\t",,terminal_output +11938,11514158,"TERMINAL",0,0,"38867",,terminal_output +11939,11515243,"TERMINAL",0,0,"4\t",,terminal_output +11940,11515243,"TERMINAL",0,0,"49978",,terminal_output +11941,11516253,"TERMINAL",0,0,"5\t",,terminal_output +11942,11516254,"TERMINAL",0,0,"5101089",,terminal_output +11943,11517188,"TERMINAL",0,0,"6\t",,terminal_output +11944,11517188,"TERMINAL",0,0,"611950",,terminal_output +11945,11518252,"TERMINAL",0,0,"7\t",,terminal_output +11946,11518253,"TERMINAL",0,0,"722501",,terminal_output +11947,11519266,"TERMINAL",0,0,"8\t",,terminal_output +11948,11519272,"TERMINAL",0,0,"83312",,terminal_output +11949,11520306,"TERMINAL",0,0,"9\t",,terminal_output +11950,11520315,"TERMINAL",0,0,"95534",,terminal_output +11951,11521424,"TERMINAL",0,0,"11\t",,terminal_output +11952,11521424,"TERMINAL",0,0,"116645",,terminal_output +11953,11522449,"TERMINAL",0,0,"2\t",,terminal_output +11954,11522456,"TERMINAL",0,0,"27756",,terminal_output +11955,11523464,"TERMINAL",0,0,"3\t",,terminal_output +11956,11523465,"TERMINAL",0,0,"38867",,terminal_output +11957,11524445,"TERMINAL",0,0,"4\t",,terminal_output +11958,11524458,"TERMINAL",0,0,"49978",,terminal_output +11959,11525512,"TERMINAL",0,0,"5\t",,terminal_output +11960,11525513,"TERMINAL",0,0,"5202089",,terminal_output +11961,11526541,"TERMINAL",0,0,"6\t",,terminal_output +11962,11526558,"TERMINAL",0,0,"61196:00",,terminal_output +11963,11527630,"TERMINAL",0,0,"7\t",,terminal_output +11964,11527631,"TERMINAL",0,0,"7221:001",,terminal_output +11965,11528587,"TERMINAL",0,0,"8\t",,terminal_output +11966,11528635,"TERMINAL",0,0,"83312",,terminal_output +11967,11529622,"TERMINAL",0,0,"9\t",,terminal_output +11968,11529731,"TERMINAL",0,0,"94423",,terminal_output +11969,11530700,"TERMINAL",0,0,"20\t",,terminal_output +11970,11530735,"TERMINAL",0,0,"205534",,terminal_output +11971,11531697,"TERMINAL",0,0,"1\t",,terminal_output +11972,11531800,"TERMINAL",0,0,"16645",,terminal_output +11973,11532738,"TERMINAL",0,0,"2\t",,terminal_output +11974,11532805,"TERMINAL",0,0,"27756",,terminal_output +11975,11533783,"TERMINAL",0,0,"3\t",,terminal_output +11976,11533848,"TERMINAL",0,0,"38867",,terminal_output +11977,11534933,"TERMINAL",0,0,"4\t",,terminal_output +11978,11534939,"TERMINAL",0,0,"49978",,terminal_output +11979,11535956,"TERMINAL",0,0,"5\t",,terminal_output +11980,11535957,"TERMINAL",0,0,"5303089",,terminal_output +11981,11536991,"TERMINAL",0,0,"6\t",,terminal_output +11982,11536992,"TERMINAL",0,0,"611910",,terminal_output +11983,11537931,"TERMINAL",0,0,"7\t",,terminal_output +11984,11538008,"TERMINAL",0,0,"722101",,terminal_output +11985,11538964,"TERMINAL",0,0,"8\t",,terminal_output +11986,11539046,"TERMINAL",0,0,"83312",,terminal_output +11987,11540005,"TERMINAL",0,0,"9\t",,terminal_output +11988,11540087,"TERMINAL",0,0,"94423",,terminal_output +11989,11541084,"TERMINAL",0,0,"30\t",,terminal_output +11990,11541125,"TERMINAL",0,0,"305534",,terminal_output +11991,11542084,"TERMINAL",0,0,"1\t",,terminal_output +11992,11542171,"TERMINAL",0,0,"16645",,terminal_output +11993,11543186,"TERMINAL",0,0,"2\t",,terminal_output +11994,11543203,"TERMINAL",0,0,"27756",,terminal_output +11995,11544252,"TERMINAL",0,0,"3\t",,terminal_output +11996,11544253,"TERMINAL",0,0,"38867",,terminal_output +11997,11545218,"TERMINAL",0,0,"4\t",,terminal_output +11998,11545287,"TERMINAL",0,0,"4404089",,terminal_output +11999,11546238,"TERMINAL",0,0,"5\t",,terminal_output +12000,11546374,"TERMINAL",0,0,"611920",,terminal_output +12001,11547364,"TERMINAL",0,0,"6\t",,terminal_output +12002,11547384,"TERMINAL",0,0,"722201",,terminal_output +12003,11548322,"TERMINAL",0,0,"8\t",,terminal_output +12004,11548434,"TERMINAL",0,0,"83312",,terminal_output +12005,11549354,"TERMINAL",0,0,"9\t",,terminal_output +12006,11549502,"TERMINAL",0,0,"94423",,terminal_output +12007,11550502,"TERMINAL",0,0,"40\t",,terminal_output +12008,11550533,"TERMINAL",0,0,"405534",,terminal_output +12009,11551521,"TERMINAL",0,0,"1\t",,terminal_output +12010,11551543,"TERMINAL",0,0,"16645",,terminal_output +12011,11552587,"TERMINAL",0,0,"2\t",,terminal_output +12012,11552588,"TERMINAL",0,0,"27756",,terminal_output +12013,11553573,"TERMINAL",0,0,"3\t",,terminal_output +12014,11553626,"TERMINAL",0,0,"38867",,terminal_output +12015,11554546,"TERMINAL",0,0,"4\t",,terminal_output +12016,11554686,"TERMINAL",0,0,"49978",,terminal_output +12017,11555613,"TERMINAL",0,0,"5\t",,terminal_output +12018,11555704,"TERMINAL",0,0,"5505089",,terminal_output +12019,11556623,"TERMINAL",0,0,"6\t",,terminal_output +12020,11556762,"TERMINAL",0,0,"611930",,terminal_output +12021,11557665,"TERMINAL",0,0,"7\t",,terminal_output +12022,11557811,"TERMINAL",0,0,"722301",,terminal_output +12023,11558704,"TERMINAL",0,0,"8\t",,terminal_output +12024,11558854,"TERMINAL",0,0,"83312",,terminal_output +12025,11559744,"TERMINAL",0,0,"9\t",,terminal_output +12026,11559893,"TERMINAL",0,0,"94423",,terminal_output +12027,11560783,"TERMINAL",0,0,"50\t",,terminal_output +12028,11560924,"TERMINAL",0,0,"505534",,terminal_output +12029,11561820,"TERMINAL",0,0,"1\t",,terminal_output +12030,11561961,"TERMINAL",0,0,"16645",,terminal_output +12031,11562866,"TERMINAL",0,0,"2\t",,terminal_output +12032,11563004,"TERMINAL",0,0,"27756",,terminal_output +12033,11563903,"TERMINAL",0,0,"3\t",,terminal_output +12034,11564041,"TERMINAL",0,0,"38867",,terminal_output +12035,11565090,"TERMINAL",0,0,"4\t",,terminal_output +12036,11565110,"TERMINAL",0,0,"49978",,terminal_output +12037,11566064,"TERMINAL",0,0,"5\t",,terminal_output +12038,11566202,"TERMINAL",0,0,"53:003:0089",,terminal_output +12039,11567087,"TERMINAL",0,0,"6\t",,terminal_output +12040,11567154,"TERMINAL",0,0,"611940",,terminal_output +12041,11568058,"TERMINAL",0,0,"7\t",,terminal_output +12042,11568199,"TERMINAL",0,0,"722401",,terminal_output +12043,11569137,"TERMINAL",0,0,"8\t",,terminal_output +12044,11569280,"TERMINAL",0,0,"83312",,terminal_output +12045,11570153,"TERMINAL",0,0,"9\t",,terminal_output +12046,11570295,"TERMINAL",0,0,"95534",,terminal_output +12047,11571169,"TERMINAL",0,0,"9:00\t",,terminal_output +12048,11571320,"TERMINAL",0,0,"9:016645",,terminal_output +12049,11572321,"TERMINAL",0,0,"16",,terminal_output +12050,11572358,"TERMINAL",0,0,"27756",,terminal_output +12051,11573336,"TERMINAL",0,0,"2\t",,terminal_output +12052,11573395,"TERMINAL",0,0,"38867",,terminal_output +12053,11574279,"TERMINAL",0,0,"3\t",,terminal_output +12054,11574434,"TERMINAL",0,0,"49978",,terminal_output +12055,11575322,"TERMINAL",0,0,"5\t",,terminal_output +12056,11575475,"TERMINAL",0,0,"5101089",,terminal_output +12057,11576406,"TERMINAL",0,0,"6\t",,terminal_output +12058,11576544,"TERMINAL",0,0,"611950",,terminal_output +12059,11577402,"TERMINAL",0,0,"7\t",,terminal_output +12060,11577620,"TERMINAL",0,0,"722501",,terminal_output +12061,11578590,"TERMINAL",0,0,"8\t",,terminal_output +12062,11578653,"TERMINAL",0,0,"83312",,terminal_output +12063,11579483,"TERMINAL",0,0,"9\t",,terminal_output +12064,11579674,"TERMINAL",0,0,"94423",,terminal_output +12065,11580622,"TERMINAL",0,0,"10\t",,terminal_output +12066,11580754,"TERMINAL",0,0,"105534",,terminal_output +12067,11581616,"TERMINAL",0,0,"1\t",,terminal_output +12068,11581759,"TERMINAL",0,0,"16645",,terminal_output +12069,11582652,"TERMINAL",0,0,"2\t",,terminal_output +12070,11582796,"TERMINAL",0,0,"27756",,terminal_output +12071,11583687,"TERMINAL",0,0,"3\t",,terminal_output +12072,11583847,"TERMINAL",0,0,"38867",,terminal_output +12073,11584700,"TERMINAL",0,0,"4\t",,terminal_output +12074,11584886,"TERMINAL",0,0,"49978",,terminal_output +12075,11585738,"TERMINAL",0,0,"5\t",,terminal_output +12076,11585929,"TERMINAL",0,0,"5202089",,terminal_output +12077,11586843,"TERMINAL",0,0,"6\t",,terminal_output +12078,11587000,"TERMINAL",0,0,"61197:00",,terminal_output +12079,11587817,"TERMINAL",0,0,"7\t",,terminal_output +12080,11588038,"TERMINAL",0,0,"7222:001",,terminal_output +12081,11588898,"TERMINAL",0,0,"8\t",,terminal_output +12082,11589059,"TERMINAL",0,0,"83312",,terminal_output +12083,11589908,"TERMINAL",0,0,"9\t",,terminal_output +12084,11590104,"TERMINAL",0,0,"94423",,terminal_output +12085,11590926,"TERMINAL",0,0,"20\t",,terminal_output +12086,11591141,"TERMINAL",0,0,"205534",,terminal_output +12087,11591979,"TERMINAL",0,0,"1\t",,terminal_output +12088,11592175,"TERMINAL",0,0,"16645",,terminal_output +12089,11593013,"TERMINAL",0,0,"2\t",,terminal_output +12090,11593224,"TERMINAL",0,0,"27756",,terminal_output +12091,11594121,"TERMINAL",0,0,"3\t",,terminal_output +12092,11594273,"TERMINAL",0,0,"39978",,terminal_output +12093,11595093,"TERMINAL",0,0,"4\t",,terminal_output +12094,11595324,"TERMINAL",0,0,"5303089",,terminal_output +12095,11596137,"TERMINAL",0,0,"5\t",,terminal_output +12096,11596373,"TERMINAL",0,0,"611910",,terminal_output +12097,11597174,"TERMINAL",0,0,"6\t",,terminal_output +12098,11597506,"TERMINAL",0,0,"722101",,terminal_output +12099,11598325,"TERMINAL",0,0,"7\t",,terminal_output +12100,11598484,"TERMINAL",0,0,"83312",,terminal_output +12101,11599345,"TERMINAL",0,0,"8\t",,terminal_output +12102,11599491,"TERMINAL",0,0,"94423",,terminal_output +12103,11600296,"TERMINAL",0,0,"9\t",,terminal_output +12104,11600622,"TERMINAL",0,0,"305534",,terminal_output +12105,11601331,"TERMINAL",0,0,"31\t",,terminal_output +12106,11601620,"TERMINAL",0,0,"16645",,terminal_output +12107,11602417,"TERMINAL",0,0,"2\t",,terminal_output +12108,11602625,"TERMINAL",0,0,"27756",,terminal_output +12109,11603543,"TERMINAL",0,0,"3\t",,terminal_output +12110,11603678,"TERMINAL",0,0,"38867",,terminal_output +12111,11604454,"TERMINAL",0,0,"4\t",,terminal_output +12112,11604723,"TERMINAL",0,0,"49978",,terminal_output +12113,11605508,"TERMINAL",0,0,"5\t",,terminal_output +12114,11605745,"TERMINAL",0,0,"5404089",,terminal_output +12115,11606621,"TERMINAL",0,0,"6\t",,terminal_output +12116,11606774,"TERMINAL",0,0,"611920",,terminal_output +12117,11607571,"TERMINAL",0,0,"7\t",,terminal_output +12118,11607823,"TERMINAL",0,0,"722201",,terminal_output +12119,11608617,"TERMINAL",0,0,"8\t",,terminal_output +12120,11608908,"TERMINAL",0,0,"83312",,terminal_output +12121,11609718,"TERMINAL",0,0,"9\t",,terminal_output +12122,11609921,"TERMINAL",0,0,"94423",,terminal_output +12123,11610717,"TERMINAL",0,0,"40\t",,terminal_output +12124,11611017,"TERMINAL",0,0,"405534",,terminal_output +12125,11611762,"TERMINAL",0,0,"1\t",,terminal_output +12126,11611981,"TERMINAL",0,0,"16645",,terminal_output +12127,11612790,"TERMINAL",0,0,"2\t",,terminal_output +12128,11613017,"TERMINAL",0,0,"27756",,terminal_output +12129,11613890,"TERMINAL",0,0,"3\t",,terminal_output +12130,11614060,"TERMINAL",0,0,"38867",,terminal_output +12131,11615024,"TERMINAL",0,0,"4\t",,terminal_output +12132,11615119,"TERMINAL",0,0,"49978",,terminal_output +12133,11617369,"TERMINAL",0,0,"552523031",,terminal_output +12134,11617374,"TERMINAL",0,0,"5\t",,terminal_output +12135,11618699,"TERMINAL",0,0,"83312",,terminal_output +12136,11618699,"TERMINAL",0,0,"8\t",,terminal_output +12137,11619728,"TERMINAL",0,0,"9\t",,terminal_output +12138,11619736,"TERMINAL",0,0,"94423",,terminal_output +12139,11620660,"TERMINAL",0,0,"50\t",,terminal_output +12140,11620724,"TERMINAL",0,0,"505534",,terminal_output +12141,11621709,"TERMINAL",0,0,"1\t",,terminal_output +12142,11621749,"TERMINAL",0,0,"16645",,terminal_output +12143,11622738,"TERMINAL",0,0,"2\t",,terminal_output +12144,11622751,"TERMINAL",0,0,"27756",,terminal_output +12145,11623774,"TERMINAL",0,0,"3\t",,terminal_output +12146,11623859,"TERMINAL",0,0,"38867",,terminal_output +12147,11624823,"TERMINAL",0,0,"4\t",,terminal_output +12148,11624841,"TERMINAL",0,0,"49978",,terminal_output +12149,11625850,"TERMINAL",0,0,"5\t",,terminal_output +12150,11625871,"TERMINAL",0,0,"54:004:0089",,terminal_output +12151,11626980,"TERMINAL",0,0,"6\t",,terminal_output +12152,11626980,"TERMINAL",0,0,"611940",,terminal_output +12153,11627914,"TERMINAL",0,0,"7\t",,terminal_output +12154,11627941,"TERMINAL",0,0,"722401",,terminal_output +12155,11628974,"TERMINAL",0,0,"8\t",,terminal_output +12156,11628990,"TERMINAL",0,0,"83312",,terminal_output +12157,11630072,"TERMINAL",0,0,"9\t",,terminal_output +12158,11630089,"TERMINAL",0,0,"94423",,terminal_output +12159,11631056,"TERMINAL",0,0,"40:00\t",,terminal_output +12160,11631061,"TERMINAL",0,0,"40:005534",,terminal_output +12161,11632056,"TERMINAL",0,0,"1\t",,terminal_output +12162,11632088,"TERMINAL",0,0,"16645",,terminal_output +12163,11633142,"TERMINAL",0,0,"2\t",,terminal_output +12164,11633152,"TERMINAL",0,0,"27756",,terminal_output +12165,11634172,"TERMINAL",0,0,"3\t",,terminal_output +12166,11634184,"TERMINAL",0,0,"38867",,terminal_output +12167,11635203,"TERMINAL",0,0,"4\t",,terminal_output +12168,11635203,"TERMINAL",0,0,"49978",,terminal_output +12169,11636211,"TERMINAL",0,0,"5\t",,terminal_output +12170,11636237,"TERMINAL",0,0,"5101089",,terminal_output +12171,11637263,"TERMINAL",0,0,"6\t",,terminal_output +12172,11637296,"TERMINAL",0,0,"6225051",,terminal_output +12173,11638287,"TERMINAL",0,0,"7\t",,terminal_output +12174,11638323,"TERMINAL",0,0,"83312",,terminal_output +12175,11639383,"TERMINAL",0,0,"9\t",,terminal_output +12176,11639384,"TERMINAL",0,0,"94423",,terminal_output +12177,11640356,"TERMINAL",0,0,"10\t",,terminal_output +12178,11640390,"TERMINAL",0,0,"105534",,terminal_output +12179,11641434,"TERMINAL",0,0,"1\t",,terminal_output +12180,11641440,"TERMINAL",0,0,"16645",,terminal_output +12181,11642454,"TERMINAL",0,0,"2\t",,terminal_output +12182,11642486,"TERMINAL",0,0,"27756",,terminal_output +12183,11643638,"TERMINAL",0,0,"3\t",,terminal_output +12184,11643638,"TERMINAL",0,0,"38867",,terminal_output +12185,11644508,"TERMINAL",0,0,"4\t",,terminal_output +12186,11644547,"TERMINAL",0,0,"49978",,terminal_output +12187,11645542,"TERMINAL",0,0,"5\t",,terminal_output +12188,11645588,"TERMINAL",0,0,"5202089",,terminal_output +12189,11646658,"TERMINAL",0,0,"6\t",,terminal_output +12190,11646659,"TERMINAL",0,0,"61198:00",,terminal_output +12191,11647682,"TERMINAL",0,0,"7\t",,terminal_output +12192,11647682,"TERMINAL",0,0,"7223:001",,terminal_output +12193,11648701,"TERMINAL",0,0,"8\t",,terminal_output +12194,11648715,"TERMINAL",0,0,"83312",,terminal_output +12195,11649472,"TERMINAL",0,0,"bash",,terminal_focus +12196,11649692,"TERMINAL",0,0,"9\t",,terminal_output +12197,11649804,"TERMINAL",0,0,"94423",,terminal_output +12198,11650715,"TERMINAL",0,0,"20\t",,terminal_output +12199,11650818,"TERMINAL",0,0,"205534",,terminal_output +12200,11651754,"TERMINAL",0,0,"1\t",,terminal_output +12201,11651822,"TERMINAL",0,0,"16645",,terminal_output +12202,11652807,"TERMINAL",0,0,"2\t",,terminal_output +12203,11652919,"TERMINAL",0,0,"27756",,terminal_output +12204,11653896,"TERMINAL",0,0,"bash",,terminal_focus +12205,11653931,"TERMINAL",0,0,"3\t",,terminal_output +12206,11654020,"TERMINAL",0,0,"38867",,terminal_output +12207,11654865,"TERMINAL",0,0,"4\t",,terminal_output +12208,11654934,"TERMINAL",0,0,"49978",,terminal_output +12209,11655937,"TERMINAL",0,0,"5\t",,terminal_output +12210,11656024,"TERMINAL",0,0,"ls",,terminal_command +12211,11656086,"TERMINAL",0,0,"]633;Ctrain_dynamics_maskgit_8_node_3412350.log train_dynamics_maskgit_8_node_3417226.log train_dynamics_maskgit_8_node_3423234.log train_dynamics_maskgit_8_node_chunked_3469458.log\r\ntrain_dynamics_maskgit_8_node_3412354.log train_dynamics_maskgit_8_node_3418833.log train_dynamics_maskgit_8_node_3423250.log train_dynamics_maskgit_8_node_chunked_3469465.log\r\ntrain_dynamics_maskgit_8_node_3417225.log train_dynamics_maskgit_8_node_3418834.log train_dynamics_maskgit_8_node_3429868.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining",,terminal_output +12212,11656099,"TERMINAL",0,0,"5303089",,terminal_output +12213,11656934,"TERMINAL",0,0,"6\t",,terminal_output +12214,11657036,"TERMINAL",0,0,"611910",,terminal_output +12215,11657970,"TERMINAL",0,0,"7\t",,terminal_output +12216,11658078,"TERMINAL",0,0,"722101",,terminal_output +12217,11659044,"TERMINAL",0,0,"8\t",,terminal_output +12218,11659092,"TERMINAL",0,0,"83312",,terminal_output +12219,11660041,"TERMINAL",0,0,"9\t",,terminal_output +12220,11660183,"TERMINAL",0,0,"94423",,terminal_output +12221,11661209,"TERMINAL",0,0,"30\t",,terminal_output +12222,11661210,"TERMINAL",0,0,"305534",,terminal_output +12223,11662238,"TERMINAL",0,0,"1\t",,terminal_output +12224,11662238,"TERMINAL",0,0,"16645",,terminal_output +12225,11662432,"TERMINAL",0,0,"tail -f train_dynamics_maskgit_8_node_chunked_3469465.log",,terminal_command +12226,11662486,"TERMINAL",0,0,"]633;CParameter counts:\r\n{'dynamics': 26555904, 'lam': 35118240, 'tokenizer': 33750256, 'total': 95424400}\r\nStarting training from step 0...\r\n2025-09-05 16:40:15.762089: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:40:16.283495: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:40:18.671567: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:40:19.228726: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:40:21.842598: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:40:21.842643: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 16:40:23.453725: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12227,11663207,"TERMINAL",0,0,"2\t",,terminal_output +12228,11663290,"TERMINAL",0,0,"27756",,terminal_output +12229,11664275,"TERMINAL",0,0,"3\t",,terminal_output +12230,11664275,"TERMINAL",0,0,"38867",,terminal_output +12231,11665224,"TERMINAL",0,0,"4\t",,terminal_output +12232,11665323,"TERMINAL",0,0,"4404089",,terminal_output +12233,11666258,"TERMINAL",0,0,"5\t",,terminal_output +12234,11666362,"TERMINAL",0,0,"611920",,terminal_output +12235,11667375,"TERMINAL",0,0,"6\t",,terminal_output +12236,11667403,"TERMINAL",0,0,"722201",,terminal_output +12237,11668331,"TERMINAL",0,0,"8\t",,terminal_output +12238,11668430,"TERMINAL",0,0,"83312",,terminal_output +12239,11669364,"TERMINAL",0,0,"9\t",,terminal_output +12240,11669467,"TERMINAL",0,0,"94423",,terminal_output +12241,11670394,"TERMINAL",0,0,"40\t",,terminal_output +12242,11670487,"TERMINAL",0,0,"405534",,terminal_output +12243,11671428,"TERMINAL",0,0,"1\t",,terminal_output +12244,11671523,"TERMINAL",0,0,"16645",,terminal_output +12245,11672474,"TERMINAL",0,0,"2\t",,terminal_output +12246,11672571,"TERMINAL",0,0,"27756",,terminal_output +12247,11673507,"TERMINAL",0,0,"3\t",,terminal_output +12248,11673590,"TERMINAL",0,0,"38867",,terminal_output +12249,11674540,"TERMINAL",0,0,"4\t",,terminal_output +12250,11674669,"TERMINAL",0,0,"49978",,terminal_output +12251,11675666,"TERMINAL",0,0,"5\t",,terminal_output +12252,11675667,"TERMINAL",0,0,"5505089",,terminal_output +12253,11676656,"TERMINAL",0,0,"6\t",,terminal_output +12254,11676699,"TERMINAL",0,0,"611930",,terminal_output +12255,11677680,"TERMINAL",0,0,"7\t",,terminal_output +12256,11677738,"TERMINAL",0,0,"722301",,terminal_output +12257,11678706,"TERMINAL",0,0,"8\t",,terminal_output +12258,11678774,"TERMINAL",0,0,"83312",,terminal_output +12259,11679731,"TERMINAL",0,0,"9\t",,terminal_output +12260,11679819,"TERMINAL",0,0,"94423",,terminal_output +12261,11680757,"TERMINAL",0,0,"50\t",,terminal_output +12262,11680854,"TERMINAL",0,0,"505534",,terminal_output +12263,11681795,"TERMINAL",0,0,"1\t",,terminal_output +12264,11681903,"TERMINAL",0,0,"16645",,terminal_output +12265,11682830,"TERMINAL",0,0,"2\t",,terminal_output +12266,11682969,"TERMINAL",0,0,"27756",,terminal_output +12267,11683866,"TERMINAL",0,0,"35",,terminal_output +12268,11684004,"TERMINAL",0,0,"38867",,terminal_output +12269,11684950,"TERMINAL",0,0,"4\t",,terminal_output +12270,11685006,"TERMINAL",0,0,"49978",,terminal_output +12271,11685970,"TERMINAL",0,0,"5\t",,terminal_output +12272,11686050,"TERMINAL",0,0,"55:005:0089",,terminal_output +12273,11686972,"TERMINAL",0,0,"6\t",,terminal_output +12274,11687081,"TERMINAL",0,0,"611940",,terminal_output +12275,11688132,"TERMINAL",0,0,"7\t",,terminal_output +12276,11688132,"TERMINAL",0,0,"722401",,terminal_output +12277,11689074,"TERMINAL",0,0,"8\t",,terminal_output +12278,11689157,"TERMINAL",0,0,"83312",,terminal_output +12279,11690081,"TERMINAL",0,0,"9\t",,terminal_output +12280,11690223,"TERMINAL",0,0,"94423",,terminal_output +12281,11691125,"TERMINAL",0,0,"1:00\t",,terminal_output +12282,11691259,"TERMINAL",0,0,"1:005534",,terminal_output +12283,11692222,"TERMINAL",0,0,"1\t",,terminal_output +12284,11692312,"TERMINAL",0,0,"17756",,terminal_output +12285,11693201,"TERMINAL",0,0,"2\t",,terminal_output +12286,11693347,"TERMINAL",0,0,"38867",,terminal_output +12287,11694264,"TERMINAL",0,0,"3\t",,terminal_output +12288,11694383,"TERMINAL",0,0,"49978",,terminal_output +12289,11695280,"TERMINAL",0,0,"4\t",,terminal_output +12290,11695426,"TERMINAL",0,0,"5101089",,terminal_output +12291,11696317,"TERMINAL",0,0,"6\t",,terminal_output +12292,11696461,"TERMINAL",0,0,"611950",,terminal_output +12293,11697451,"TERMINAL",0,0,"7\t",,terminal_output +12294,11697497,"TERMINAL",0,0,"722501",,terminal_output +12295,11698389,"TERMINAL",0,0,"8\t",,terminal_output +12296,11698530,"TERMINAL",0,0,"83312",,terminal_output +12297,11699425,"TERMINAL",0,0,"9\t",,terminal_output +12298,11699585,"TERMINAL",0,0,"94423",,terminal_output +12299,11700478,"TERMINAL",0,0,"10\t",,terminal_output +12300,11700659,"TERMINAL",0,0,"105534",,terminal_output +12301,11701501,"TERMINAL",0,0,"1\t",,terminal_output +12302,11701679,"TERMINAL",0,0,"16645",,terminal_output +12303,11702536,"TERMINAL",0,0,"2\t",,terminal_output +12304,11702695,"TERMINAL",0,0,"27756",,terminal_output +12305,11703653,"TERMINAL",0,0,"3\t",,terminal_output +12306,11703714,"TERMINAL",0,0,"38867",,terminal_output +12307,11704714,"TERMINAL",0,0,"4\t",,terminal_output +12308,11704750,"TERMINAL",0,0,"49978",,terminal_output +12309,11705743,"TERMINAL",0,0,"5\t",,terminal_output +12310,11705788,"TERMINAL",0,0,"5202089",,terminal_output +12311,11706724,"TERMINAL",0,0,"6\t",,terminal_output +12312,11706901,"TERMINAL",0,0,"61199:00",,terminal_output +12313,11707707,"TERMINAL",0,0,"7\t",,terminal_output +12314,11707865,"TERMINAL",0,0,"7224:001",,terminal_output +12315,11708753,"TERMINAL",0,0,"8\t",,terminal_output +12316,11708904,"TERMINAL",0,0,"83312",,terminal_output +12317,11709782,"TERMINAL",0,0,"9\t",,terminal_output +12318,11709937,"TERMINAL",0,0,"94423",,terminal_output +12319,11710824,"TERMINAL",0,0,"20\t",,terminal_output +12320,11710978,"TERMINAL",0,0,"205534",,terminal_output +12321,11711851,"TERMINAL",0,0,"1\t",,terminal_output +12322,11712025,"TERMINAL",0,0,"16645",,terminal_output +12323,11712890,"TERMINAL",0,0,"2\t",,terminal_output +12324,11713057,"TERMINAL",0,0,"27756",,terminal_output +12325,11713914,"TERMINAL",0,0,"3\t",,terminal_output +12326,11714093,"TERMINAL",0,0,"38867",,terminal_output +12327,11714963,"TERMINAL",0,0,"4\t",,terminal_output +12328,11715140,"TERMINAL",0,0,"49978",,terminal_output +12329,11716002,"TERMINAL",0,0,"5\t",,terminal_output +12330,11716176,"TERMINAL",0,0,"5303089",,terminal_output +12331,11717043,"TERMINAL",0,0,"6\t",,terminal_output +12332,11717206,"TERMINAL",0,0,"611910",,terminal_output +12333,11718060,"TERMINAL",0,0,"7\t",,terminal_output +12334,11718246,"TERMINAL",0,0,"722101",,terminal_output +12335,11719097,"TERMINAL",0,0,"8\t",,terminal_output +12336,11719284,"TERMINAL",0,0,"84423",,terminal_output +12337,11720139,"TERMINAL",0,0,"9\t",,terminal_output +12338,11720322,"TERMINAL",0,0,"305534",,terminal_output +12339,11721207,"TERMINAL",0,0,"30\t",,terminal_output +12340,11721372,"TERMINAL",0,0,"16645",,terminal_output +12341,11722216,"TERMINAL",0,0,"1\t",,terminal_output +12342,11722407,"TERMINAL",0,0,"27756",,terminal_output +12343,11723351,"TERMINAL",0,0,"2\t",,terminal_output +12344,11723443,"TERMINAL",0,0,"38867",,terminal_output +12345,11724273,"TERMINAL",0,0,"3\t",,terminal_output +12346,11724478,"TERMINAL",0,0,"49978",,terminal_output +12347,11725331,"TERMINAL",0,0,"5\t",,terminal_output +12348,11725521,"TERMINAL",0,0,"5404089",,terminal_output +12349,11726427,"TERMINAL",0,0,"6\t",,terminal_output +12350,11726583,"TERMINAL",0,0,"611920",,terminal_output +12351,11728632,"TERMINAL",0,0,"7\t",,terminal_output +12352,11728633,"TERMINAL",0,0,"722201",,terminal_output +12353,11728633,"TERMINAL",0,0,"8\t",,terminal_output +12354,11728897,"TERMINAL",0,0,"83312",,terminal_output +12355,11729661,"TERMINAL",0,0,"9\t",,terminal_output +12356,11729838,"TERMINAL",0,0,"94423",,terminal_output +12357,11730679,"TERMINAL",0,0,"40\t",,terminal_output +12358,11730711,"TERMINAL",0,0,"405534",,terminal_output +12359,11731676,"TERMINAL",0,0,"1\t",,terminal_output +12360,11731747,"TERMINAL",0,0,"16645",,terminal_output +12361,11732586,"TERMINAL",0,0,"2\t",,terminal_output +12362,11732795,"TERMINAL",0,0,"27756",,terminal_output +12363,11733707,"TERMINAL",0,0,"3\t",,terminal_output +12364,11733844,"TERMINAL",0,0,"38867",,terminal_output +12365,11734659,"TERMINAL",0,0,"4\t",,terminal_output +12366,11734943,"TERMINAL",0,0,"49978",,terminal_output +12367,11735736,"TERMINAL",0,0,"5\t",,terminal_output +12368,11735907,"TERMINAL",0,0,"5505089",,terminal_output +12369,11736739,"TERMINAL",0,0,"66",,terminal_output +12370,11736945,"TERMINAL",0,0,"611930",,terminal_output +12371,11737769,"TERMINAL",0,0,"7\t",,terminal_output +12372,11737989,"TERMINAL",0,0,"722301",,terminal_output +12373,11740693,"TERMINAL",0,0,"85534",,terminal_output +12374,11740693,"TERMINAL",0,0,"85",,terminal_output +12375,11742397,"TERMINAL",0,0,"517756",,terminal_output +12376,11742398,"TERMINAL",0,0,"51\t",,terminal_output +12377,11743421,"TERMINAL",0,0,"38867",,terminal_output +12378,11743421,"TERMINAL",0,0,"3\t",,terminal_output +12379,11744372,"TERMINAL",0,0,"4\t",,terminal_output +12380,11744373,"TERMINAL",0,0,"49978",,terminal_output +12381,11745469,"TERMINAL",0,0,"56:006:0089",,terminal_output +12382,11745470,"TERMINAL",0,0,"5\t",,terminal_output +12383,11746432,"TERMINAL",0,0,"611940",,terminal_output +12384,11746438,"TERMINAL",0,0,"6\t",,terminal_output +12385,11747513,"TERMINAL",0,0,"722401",,terminal_output +12386,11747514,"TERMINAL",0,0,"7\t",,terminal_output +12387,11748504,"TERMINAL",0,0,"83312",,terminal_output +12388,11748516,"TERMINAL",0,0,"8\t",,terminal_output +12389,11749541,"TERMINAL",0,0,"94423",,terminal_output +12390,11749542,"TERMINAL",0,0,"9\t",,terminal_output +12391,11750575,"TERMINAL",0,0,"2:005534",,terminal_output +12392,11750703,"TERMINAL",0,0,"2:00\t",,terminal_output +12393,11751627,"TERMINAL",0,0,"16645",,terminal_output +12394,11751669,"TERMINAL",0,0,"1\t",,terminal_output +12395,11752729,"TERMINAL",0,0,"27756",,terminal_output +12396,11752730,"TERMINAL",0,0,"2\t",,terminal_output +12397,11753686,"TERMINAL",0,0,"38867",,terminal_output +12398,11753723,"TERMINAL",0,0,"3\t",,terminal_output +12399,11754791,"TERMINAL",0,0,"49978",,terminal_output +12400,11754791,"TERMINAL",0,0,"4\t",,terminal_output +12401,11755773,"TERMINAL",0,0,"5101089",,terminal_output +12402,11755773,"TERMINAL",0,0,"5\t",,terminal_output +12403,11756806,"TERMINAL",0,0,"6\t",,terminal_output +12404,11756815,"TERMINAL",0,0,"611950",,terminal_output +12405,11757839,"TERMINAL",0,0,"7\t",,terminal_output +12406,11757846,"TERMINAL",0,0,"722501",,terminal_output +12407,11758885,"TERMINAL",0,0,"86",,terminal_output +12408,11758927,"TERMINAL",0,0,"83312",,terminal_output +12409,11759903,"TERMINAL",0,0,"9\t",,terminal_output +12410,11759927,"TERMINAL",0,0,"94423",,terminal_output +12411,11761034,"TERMINAL",0,0,"10\t",,terminal_output +12412,11761034,"TERMINAL",0,0,"105534",,terminal_output +12413,11762054,"TERMINAL",0,0,"1\t",,terminal_output +12414,11762055,"TERMINAL",0,0,"16645",,terminal_output +12415,11763078,"TERMINAL",0,0,"2\t",,terminal_output +12416,11763078,"TERMINAL",0,0,"27756",,terminal_output +12417,11764106,"TERMINAL",0,0,"3\t",,terminal_output +12418,11764106,"TERMINAL",0,0,"38867",,terminal_output +12419,11765130,"TERMINAL",0,0,"4\t",,terminal_output +12420,11765130,"TERMINAL",0,0,"49978",,terminal_output +12421,11766141,"TERMINAL",0,0,"57",,terminal_output +12422,11766158,"TERMINAL",0,0,"5202089",,terminal_output +12423,11767220,"TERMINAL",0,0,"6\t",,terminal_output +12424,11767220,"TERMINAL",0,0,"611910:00",,terminal_output +12425,11768217,"TERMINAL",0,0,"7\t",,terminal_output +12426,11768219,"TERMINAL",0,0,"7225:001",,terminal_output +12427,11769331,"TERMINAL",0,0,"8\t",,terminal_output +12428,11769338,"TERMINAL",0,0,"83312",,terminal_output +12429,11770292,"TERMINAL",0,0,"95534",,terminal_output +12430,11770292,"TERMINAL",0,0,"9\t",,terminal_output +12431,11771372,"TERMINAL",0,0,"21\t",,terminal_output +12432,11771373,"TERMINAL",0,0,"216645",,terminal_output +12433,11772397,"TERMINAL",0,0,"2\t",,terminal_output +12434,11772397,"TERMINAL",0,0,"27756",,terminal_output +12435,11773419,"TERMINAL",0,0,"3\t",,terminal_output +12436,11773420,"TERMINAL",0,0,"38867",,terminal_output +12437,11774448,"TERMINAL",0,0,"4\t",,terminal_output +12438,11774517,"TERMINAL",0,0,"49978",,terminal_output +12439,11775576,"TERMINAL",0,0,"5\t",,terminal_output +12440,11775576,"TERMINAL",0,0,"5303089",,terminal_output +12441,11776611,"TERMINAL",0,0,"6\t",,terminal_output +12442,11776612,"TERMINAL",0,0,"611910",,terminal_output +12443,11777633,"TERMINAL",0,0,"7\t",,terminal_output +12444,11777634,"TERMINAL",0,0,"722101",,terminal_output +12445,11778573,"TERMINAL",0,0,"8\t",,terminal_output +12446,11778614,"TERMINAL",0,0,"83312",,terminal_output +12447,11779713,"TERMINAL",0,0,"9\t",,terminal_output +12448,11779713,"TERMINAL",0,0,"94423",,terminal_output +12449,11780697,"TERMINAL",0,0,"30\t",,terminal_output +12450,11780698,"TERMINAL",0,0,"305534",,terminal_output +12451,11781679,"TERMINAL",0,0,"1\t",,terminal_output +12452,11781697,"TERMINAL",0,0,"16645",,terminal_output +12453,11782732,"TERMINAL",0,0,"2\t",,terminal_output +12454,11782736,"TERMINAL",0,0,"27756",,terminal_output +12455,11783767,"TERMINAL",0,0,"3\t",,terminal_output +12456,11783776,"TERMINAL",0,0,"38867",,terminal_output +12457,11784791,"TERMINAL",0,0,"4\t",,terminal_output +12458,11784816,"TERMINAL",0,0,"49978",,terminal_output +12459,11785870,"TERMINAL",0,0,"5\t",,terminal_output +12460,11785913,"TERMINAL",0,0,"5404089",,terminal_output +12461,11786937,"TERMINAL",0,0,"6\t",,terminal_output +12462,11786941,"TERMINAL",0,0,"611920",,terminal_output +12463,11787901,"TERMINAL",0,0,"7\t",,terminal_output +12464,11787940,"TERMINAL",0,0,"722201",,terminal_output +12465,11788943,"TERMINAL",0,0,"8\t",,terminal_output +12466,11788973,"TERMINAL",0,0,"83312",,terminal_output +12467,11789984,"TERMINAL",0,0,"9\t",,terminal_output +12468,11790020,"TERMINAL",0,0,"94423",,terminal_output +12469,11791047,"TERMINAL",0,0,"40\t",,terminal_output +12470,11791052,"TERMINAL",0,0,"405534",,terminal_output +12471,11792164,"TERMINAL",0,0,"1\t",,terminal_output +12472,11792165,"TERMINAL",0,0,"16645",,terminal_output +12473,11793085,"TERMINAL",0,0,"2\t",,terminal_output +12474,11793123,"TERMINAL",0,0,"27756",,terminal_output +12475,11794119,"TERMINAL",0,0,"3\t",,terminal_output +12476,11794159,"TERMINAL",0,0,"38867",,terminal_output +12477,11795156,"TERMINAL",0,0,"4\t",,terminal_output +12478,11795196,"TERMINAL",0,0,"49978",,terminal_output +12479,11796194,"TERMINAL",0,0,"5\t",,terminal_output +12480,11796231,"TERMINAL",0,0,"5505089",,terminal_output +12481,11797298,"TERMINAL",0,0,"68",,terminal_output +12482,11797303,"TERMINAL",0,0,"6223031",,terminal_output +12483,11798308,"TERMINAL",0,0,"7\t",,terminal_output +12484,11798314,"TERMINAL",0,0,"83312",,terminal_output +12485,11799299,"TERMINAL",0,0,"8\t",,terminal_output +12486,11799361,"TERMINAL",0,0,"94423",,terminal_output +12487,11800460,"TERMINAL",0,0,"50\t",,terminal_output +12488,11800461,"TERMINAL",0,0,"505534",,terminal_output +12489,11801369,"TERMINAL",0,0,"1\t",,terminal_output +12490,11801424,"TERMINAL",0,0,"16645",,terminal_output +12491,11802512,"TERMINAL",0,0,"2\t",,terminal_output +12492,11802512,"TERMINAL",0,0,"27756",,terminal_output +12493,11803531,"TERMINAL",0,0,"3\t",,terminal_output +12494,11803531,"TERMINAL",0,0,"38867",,terminal_output +12495,11804581,"TERMINAL",0,0,"4\t",,terminal_output +12496,11804697,"TERMINAL",0,0,"49978",,terminal_output +12497,11805778,"TERMINAL",0,0,"5\t",,terminal_output +12498,11805780,"TERMINAL",0,0,"57:007:0089",,terminal_output +12499,11806700,"TERMINAL",0,0,"6\t",,terminal_output +12500,11806779,"TERMINAL",0,0,"611940",,terminal_output +12501,11807740,"TERMINAL",0,0,"7\t",,terminal_output +12502,11807824,"TERMINAL",0,0,"722401",,terminal_output +12503,11808848,"TERMINAL",0,0,"8\t",,terminal_output +12504,11808864,"TERMINAL",0,0,"83312",,terminal_output +12505,11809838,"TERMINAL",0,0,"9\t",,terminal_output +12506,11809895,"TERMINAL",0,0,"94423",,terminal_output +12507,11810873,"TERMINAL",0,0,"3:00\t",,terminal_output +12508,11810944,"TERMINAL",0,0,"3:005534",,terminal_output +12509,11811881,"TERMINAL",0,0,"1\t",,terminal_output +12510,11811975,"TERMINAL",0,0,"16645",,terminal_output +12511,11812925,"TERMINAL",0,0,"2\t",,terminal_output +12512,11813015,"TERMINAL",0,0,"27756",,terminal_output +12513,11813951,"TERMINAL",0,0,"3\t",,terminal_output +12514,11814055,"TERMINAL",0,0,"38867",,terminal_output +12515,11814903,"TERMINAL",0,0,"4\t",,terminal_output +12516,11814918,"TERMINAL",0,0,"49978",,terminal_output +12517,11815928,"TERMINAL",0,0,"5\t",,terminal_output +12518,11815952,"TERMINAL",0,0,"5101089",,terminal_output +12519,11816928,"TERMINAL",0,0,"6\t",,terminal_output +12520,11816990,"TERMINAL",0,0,"611950",,terminal_output +12521,11817922,"TERMINAL",0,0,"7\t",,terminal_output +12522,11818063,"TERMINAL",0,0,"722501",,terminal_output +12523,11818989,"TERMINAL",0,0,"8\t",,terminal_output +12524,11819086,"TERMINAL",0,0,"83312",,terminal_output +12525,11819996,"TERMINAL",0,0,"9\t",,terminal_output +12526,11820140,"TERMINAL",0,0,"94423",,terminal_output +12527,11821153,"TERMINAL",0,0,"10\t",,terminal_output +12528,11821154,"TERMINAL",0,0,"105534",,terminal_output +12529,11822109,"TERMINAL",0,0,"1\t",,terminal_output +12530,11822288,"TERMINAL",0,0,"17756",,terminal_output +12531,11823193,"TERMINAL",0,0,"2\t",,terminal_output +12532,11823315,"TERMINAL",0,0,"38867",,terminal_output +12533,11824157,"TERMINAL",0,0,"3\t",,terminal_output +12534,11824349,"TERMINAL",0,0,"49978",,terminal_output +12535,11825180,"TERMINAL",0,0,"4\t",,terminal_output +12536,11825391,"TERMINAL",0,0,"5202089",,terminal_output +12537,11826287,"TERMINAL",0,0,"5\t",,terminal_output +12538,11826425,"TERMINAL",0,0,"61191:00",,terminal_output +12539,11827290,"TERMINAL",0,0,"6\t",,terminal_output +12540,11827460,"TERMINAL",0,0,"7226:001",,terminal_output +12541,11828294,"TERMINAL",0,0,"7\t",,terminal_output +12542,11828507,"TERMINAL",0,0,"83312",,terminal_output +12543,11829329,"TERMINAL",0,0,"9\t",,terminal_output +12544,11829540,"TERMINAL",0,0,"94423",,terminal_output +12545,11830361,"TERMINAL",0,0,"20\t",,terminal_output +12546,11830577,"TERMINAL",0,0,"205534",,terminal_output +12547,11831446,"TERMINAL",0,0,"1\t",,terminal_output +12548,11831637,"TERMINAL",0,0,"16645",,terminal_output +12549,11832511,"TERMINAL",0,0,"2\t",,terminal_output +12550,11832705,"TERMINAL",0,0,"27756",,terminal_output +12551,11833473,"TERMINAL",0,0,"3\t",,terminal_output +12552,11833692,"TERMINAL",0,0,"38867",,terminal_output +12553,11834508,"TERMINAL",0,0,"49",,terminal_output +12554,11834742,"TERMINAL",0,0,"49978",,terminal_output +12555,11835577,"TERMINAL",0,0,"5\t",,terminal_output +12556,11835765,"TERMINAL",0,0,"5303089",,terminal_output +12557,11836671,"TERMINAL",0,0,"6\t",,terminal_output +12558,11836812,"TERMINAL",0,0,"611910",,terminal_output +12559,11837619,"TERMINAL",0,0,"7\t",,terminal_output +12560,11837859,"TERMINAL",0,0,"722101",,terminal_output +12561,11838767,"TERMINAL",0,0,"8\t",,terminal_output +12562,11838912,"TERMINAL",0,0,"83312",,terminal_output +12563,11839689,"TERMINAL",0,0,"9\t",,terminal_output +12564,11839952,"TERMINAL",0,0,"94423",,terminal_output +12565,11840732,"TERMINAL",0,0,"30\t",,terminal_output +12566,11840971,"TERMINAL",0,0,"305534",,terminal_output +12567,11841769,"TERMINAL",0,0,"1\t",,terminal_output +12568,11842149,"TERMINAL",0,0,"16645",,terminal_output +12569,11842807,"TERMINAL",0,0,"2\t",,terminal_output +12570,11843052,"TERMINAL",0,0,"27756",,terminal_output +12571,11843847,"TERMINAL",0,0,"3\t",,terminal_output +12572,11844084,"TERMINAL",0,0,"38867",,terminal_output +12573,11844885,"TERMINAL",0,0,"4\t",,terminal_output +12574,11845128,"TERMINAL",0,0,"49978",,terminal_output +12575,11846022,"TERMINAL",0,0,"5\t",,terminal_output +12576,11846159,"TERMINAL",0,0,"5404089",,terminal_output +12577,11847051,"TERMINAL",0,0,"6\t",,terminal_output +12578,11847200,"TERMINAL",0,0,"611920",,terminal_output +12579,11848078,"TERMINAL",0,0,"7\t",,terminal_output +12580,11848235,"TERMINAL",0,0,"722201",,terminal_output +12581,11849129,"TERMINAL",0,0,"8\t",,terminal_output +12582,11849283,"TERMINAL",0,0,"84423",,terminal_output +12583,11850063,"TERMINAL",0,0,"9\t",,terminal_output +12584,11850443,"TERMINAL",0,0,"405534",,terminal_output +12585,11851099,"TERMINAL",0,0,"40\t",,terminal_output +12586,11851456,"TERMINAL",0,0,"16645",,terminal_output +12587,11852186,"TERMINAL",0,0,"1\t",,terminal_output +12588,11852490,"TERMINAL",0,0,"27756",,terminal_output +12589,11853300,"TERMINAL",0,0,"2\t",,terminal_output +12590,11853465,"TERMINAL",0,0,"38867",,terminal_output +12591,11854363,"TERMINAL",0,0,"3\t",,terminal_output +12592,11854501,"TERMINAL",0,0,"49978",,terminal_output +12593,11855254,"TERMINAL",0,0,"4\t",,terminal_output +12594,11855705,"TERMINAL",0,0,"5505089",,terminal_output +12595,11856378,"TERMINAL",0,0,"520",,terminal_output +12596,11856587,"TERMINAL",0,0,"611930",,terminal_output +12597,11857394,"TERMINAL",0,0,"7\t",,terminal_output +12598,11857707,"TERMINAL",0,0,"722301",,terminal_output +12599,11858428,"TERMINAL",0,0,"8\t",,terminal_output +12600,11858695,"TERMINAL",0,0,"83312",,terminal_output +12601,11859404,"TERMINAL",0,0,"9\t",,terminal_output +12602,11859750,"TERMINAL",0,0,"94423",,terminal_output +12603,11860465,"TERMINAL",0,0,"50\t",,terminal_output +12604,11860758,"TERMINAL",0,0,"505534",,terminal_output +12605,11861591,"TERMINAL",0,0,"1\t",,terminal_output +12606,11861792,"TERMINAL",0,0,"16645",,terminal_output +12607,11864357,"TERMINAL",0,0,"29978",,terminal_output +12608,11864357,"TERMINAL",0,0,"215",,terminal_output +12609,11865422,"TERMINAL",0,0,"58:008:0089",,terminal_output +12610,11865422,"TERMINAL",0,0,"5\t",,terminal_output +12611,11866404,"TERMINAL",0,0,"611940",,terminal_output +12612,11866405,"TERMINAL",0,0,"6\t",,terminal_output +12613,11867533,"TERMINAL",0,0,"722401",,terminal_output +12614,11867533,"TERMINAL",0,0,"7\t",,terminal_output +12615,11868241,"TERMINAL",0,0,"watch",,terminal_focus +12616,11868472,"TERMINAL",0,0,"83312",,terminal_output +12617,11868506,"TERMINAL",0,0,"8\t",,terminal_output +12618,11869483,"TERMINAL",0,0,"9\t",,terminal_output +12619,11869489,"TERMINAL",0,0,"94423",,terminal_output +12620,11870603,"TERMINAL",0,0,"4:00\t",,terminal_output +12621,11870610,"TERMINAL",0,0,"4:005534",,terminal_output +12622,11871633,"TERMINAL",0,0,"1\t",,terminal_output +12623,11871640,"TERMINAL",0,0,"16645",,terminal_output +12624,11872692,"TERMINAL",0,0,"2\t",,terminal_output +12625,11872692,"TERMINAL",0,0,"27756",,terminal_output +12626,11873705,"TERMINAL",0,0,"3\t",,terminal_output +12627,11873705,"TERMINAL",0,0,"38867",,terminal_output +12628,11874701,"TERMINAL",0,0,"4\t",,terminal_output +12629,11874704,"TERMINAL",0,0,"49978",,terminal_output +12630,11875723,"TERMINAL",0,0,"5\t",,terminal_output +12631,11875738,"TERMINAL",0,0,"5101089",,terminal_output +12632,11876753,"TERMINAL",0,0,"6\t",,terminal_output +12633,11876765,"TERMINAL",0,0,"611950",,terminal_output +12634,11877794,"TERMINAL",0,0,"7\t",,terminal_output +12635,11877801,"TERMINAL",0,0,"722501",,terminal_output +12636,11878828,"TERMINAL",0,0,"8\t",,terminal_output +12637,11878845,"TERMINAL",0,0,"83312",,terminal_output +12638,11879081,"TERMINAL",0,0,"watch",,terminal_focus +12639,11879871,"TERMINAL",0,0,"9\t",,terminal_output +12640,11879907,"TERMINAL",0,0,"94423",,terminal_output +12641,11880901,"TERMINAL",0,0,"10\t",,terminal_output +12642,11880926,"TERMINAL",0,0,"105534",,terminal_output +12643,11881943,"TERMINAL",0,0,"1\t",,terminal_output +12644,11881965,"TERMINAL",0,0,"16645",,terminal_output +12645,11882978,"TERMINAL",0,0,"2\t",,terminal_output +12646,11883006,"TERMINAL",0,0,"27756",,terminal_output +12647,11884104,"TERMINAL",0,0,"3\t",,terminal_output +12648,11884111,"TERMINAL",0,0,"38867",,terminal_output +12649,11885062,"TERMINAL",0,0,"4\t",,terminal_output +12650,11885090,"TERMINAL",0,0,"49978",,terminal_output +12651,11886090,"TERMINAL",0,0,"5\t",,terminal_output +12652,11886141,"TERMINAL",0,0,"5202089",,terminal_output +12653,11887124,"TERMINAL",0,0,"6\t",,terminal_output +12654,11887181,"TERMINAL",0,0,"61192:00",,terminal_output +12655,11888163,"TERMINAL",0,0,"7\t",,terminal_output +12656,11888218,"TERMINAL",0,0,"7227:001",,terminal_output +12657,11889201,"TERMINAL",0,0,"8\t",,terminal_output +12658,11889260,"TERMINAL",0,0,"83312",,terminal_output +12659,11890246,"TERMINAL",0,0,"9\t",,terminal_output +12660,11890300,"TERMINAL",0,0,"95534",,terminal_output +12661,11891390,"TERMINAL",0,0,"20\t",,terminal_output +12662,11891390,"TERMINAL",0,0,"216645",,terminal_output +12663,11892318,"TERMINAL",0,0,"2\t",,terminal_output +12664,11892380,"TERMINAL",0,0,"27756",,terminal_output +12665,11893353,"TERMINAL",0,0,"3\t",,terminal_output +12666,11893424,"TERMINAL",0,0,"38867",,terminal_output +12667,11894395,"TERMINAL",0,0,"4\t",,terminal_output +12668,11894490,"TERMINAL",0,0,"49978",,terminal_output +12669,11895426,"TERMINAL",0,0,"5\t",,terminal_output +12670,11895528,"TERMINAL",0,0,"5303089",,terminal_output +12671,11896515,"TERMINAL",0,0,"6\t",,terminal_output +12672,11896570,"TERMINAL",0,0,"611910",,terminal_output +12673,11897537,"TERMINAL",0,0,"7\t",,terminal_output +12674,11897590,"TERMINAL",0,0,"722101",,terminal_output +12675,11898562,"TERMINAL",0,0,"8\t",,terminal_output +12676,11898706,"TERMINAL",0,0,"83312",,terminal_output +12677,11899571,"TERMINAL",0,0,"9\t",,terminal_output +12678,11899715,"TERMINAL",0,0,"94423",,terminal_output +12679,11900720,"TERMINAL",0,0,"30\t",,terminal_output +12680,11900721,"TERMINAL",0,0,"305534",,terminal_output +12681,11901643,"TERMINAL",0,0,"1\t",,terminal_output +12682,11901746,"TERMINAL",0,0,"16645",,terminal_output +12683,11902763,"TERMINAL",0,0,"2\t",,terminal_output +12684,11902777,"TERMINAL",0,0,"27756",,terminal_output +12685,11903743,"TERMINAL",0,0,"3\t",,terminal_output +12686,11903840,"TERMINAL",0,0,"38867",,terminal_output +12687,11904824,"TERMINAL",0,0,"4\t",,terminal_output +12688,11904856,"TERMINAL",0,0,"49978",,terminal_output +12689,11905834,"TERMINAL",0,0,"5\t",,terminal_output +12690,11905881,"TERMINAL",0,0,"5404089",,terminal_output +12691,11906840,"TERMINAL",0,0,"6\t",,terminal_output +12692,11906940,"TERMINAL",0,0,"611920",,terminal_output +12693,11907878,"TERMINAL",0,0,"7\t",,terminal_output +12694,11907979,"TERMINAL",0,0,"722201",,terminal_output +12695,11908906,"TERMINAL",0,0,"8\t",,terminal_output +12696,11909021,"TERMINAL",0,0,"83312",,terminal_output +12697,11909944,"TERMINAL",0,0,"9\t",,terminal_output +12698,11910030,"TERMINAL",0,0,"94423",,terminal_output +12699,11910982,"TERMINAL",0,0,"40\t",,terminal_output +12700,11911070,"TERMINAL",0,0,"405534",,terminal_output +12701,11912081,"TERMINAL",0,0,"1\t",,terminal_output +12702,11912109,"TERMINAL",0,0,"16645",,terminal_output +12703,11913099,"TERMINAL",0,0,"2\t",,terminal_output +12704,11913156,"TERMINAL",0,0,"27756",,terminal_output +12705,11914086,"TERMINAL",0,0,"3\t",,terminal_output +12706,11914256,"TERMINAL",0,0,"38867",,terminal_output +12707,11915154,"TERMINAL",0,0,"4\t",,terminal_output +12708,11915226,"TERMINAL",0,0,"49978",,terminal_output +12709,11916158,"TERMINAL",0,0,"5\t",,terminal_output +12710,11916300,"TERMINAL",0,0,"5505089",,terminal_output +12711,11917310,"TERMINAL",0,0,"6\t",,terminal_output +12712,11917310,"TERMINAL",0,0,"6223031",,terminal_output +12713,11918339,"TERMINAL",0,0,"7\t",,terminal_output +12714,11918340,"TERMINAL",0,0,"83312",,terminal_output +12715,11919357,"TERMINAL",0,0,"8\t",,terminal_output +12716,11919387,"TERMINAL",0,0,"94423",,terminal_output +12717,11920302,"TERMINAL",0,0,"9\t",,terminal_output +12718,11920439,"TERMINAL",0,0,"505534",,terminal_output +12719,11921395,"TERMINAL",0,0,"51\t",,terminal_output +12720,11921496,"TERMINAL",0,0,"16645",,terminal_output +12721,11922371,"TERMINAL",0,0,"2\t",,terminal_output +12722,11922507,"TERMINAL",0,0,"27756",,terminal_output +12723,11923447,"TERMINAL",0,0,"3\t",,terminal_output +12724,11923541,"TERMINAL",0,0,"38867",,terminal_output +12725,11924443,"TERMINAL",0,0,"4\t",,terminal_output +12726,11924573,"TERMINAL",0,0,"49978",,terminal_output +12727,11925480,"TERMINAL",0,0,"5\t",,terminal_output +12728,11925614,"TERMINAL",0,0,"59:009:0089",,terminal_output +12729,11926549,"TERMINAL",0,0,"6\t",,terminal_output +12730,11926663,"TERMINAL",0,0,"611940",,terminal_output +12731,11927640,"TERMINAL",0,0,"7\t",,terminal_output +12732,11927679,"TERMINAL",0,0,"722401",,terminal_output +12733,11928595,"TERMINAL",0,0,"8\t",,terminal_output +12734,11928739,"TERMINAL",0,0,"83312",,terminal_output +12735,11929633,"TERMINAL",0,0,"9\t",,terminal_output +12736,11929783,"TERMINAL",0,0,"94423",,terminal_output +12737,11930678,"TERMINAL",0,0,"5:00\t",,terminal_output +12738,11930816,"TERMINAL",0,0,"5:005534",,terminal_output +12739,11931737,"TERMINAL",0,0,"1\t",,terminal_output +12740,11931820,"TERMINAL",0,0,"16645",,terminal_output +12741,11932733,"TERMINAL",0,0,"2\t",,terminal_output +12742,11932869,"TERMINAL",0,0,"27756",,terminal_output +12743,11933786,"TERMINAL",0,0,"3\t",,terminal_output +12744,11933900,"TERMINAL",0,0,"38867",,terminal_output +12745,11934810,"TERMINAL",0,0,"4\t",,terminal_output +12746,11934986,"TERMINAL",0,0,"49978",,terminal_output +12747,11935841,"TERMINAL",0,0,"5\t",,terminal_output +12748,11935972,"TERMINAL",0,0,"5101089",,terminal_output +12749,11936874,"TERMINAL",0,0,"6\t",,terminal_output +12750,11937053,"TERMINAL",0,0,"611950",,terminal_output +12751,11937643,"TERMINAL",0,0,"watch",,terminal_focus +12752,11937906,"TERMINAL",0,0,"7\t",,terminal_output +12753,11938088,"TERMINAL",0,0,"722501",,terminal_output +12754,11938957,"TERMINAL",0,0,"8\t",,terminal_output +12755,11939094,"TERMINAL",0,0,"83312",,terminal_output +12756,11939982,"TERMINAL",0,0,"9\t",,terminal_output +12757,11940133,"TERMINAL",0,0,"94423",,terminal_output +12758,11941066,"TERMINAL",0,0,"10\t",,terminal_output +12759,11941204,"TERMINAL",0,0,"105534",,terminal_output +12760,11942079,"TERMINAL",0,0,"1\t",,terminal_output +12761,11942212,"TERMINAL",0,0,"16645",,terminal_output +12762,11943190,"TERMINAL",0,0,"2\t",,terminal_output +12763,11943253,"TERMINAL",0,0,"27756",,terminal_output +12764,11944234,"TERMINAL",0,0,"3\t",,terminal_output +12765,11944286,"TERMINAL",0,0,"39978",,terminal_output +12766,11945161,"TERMINAL",0,0,"4\t",,terminal_output +12767,11945341,"TERMINAL",0,0,"5202089",,terminal_output +12768,11946197,"TERMINAL",0,0,"5\t",,terminal_output +12769,11946373,"TERMINAL",0,0,"61193:00",,terminal_output +12770,11947306,"TERMINAL",0,0,"6\t",,terminal_output +12771,11947403,"TERMINAL",0,0,"7228:001",,terminal_output +12772,11948338,"TERMINAL",0,0,"7\t",,terminal_output +12773,11948436,"TERMINAL",0,0,"83312",,terminal_output +12774,11949303,"TERMINAL",0,0,"8\t",,terminal_output +12775,11949480,"TERMINAL",0,0,"94423",,terminal_output +12776,11950389,"TERMINAL",0,0,"20\t",,terminal_output +12777,11950528,"TERMINAL",0,0,"205534",,terminal_output +12778,11951397,"TERMINAL",0,0,"1\t",,terminal_output +12779,11951553,"TERMINAL",0,0,"16645",,terminal_output +12780,11952524,"TERMINAL",0,0,"2\t",,terminal_output +12781,11952615,"TERMINAL",0,0,"27756",,terminal_output +12782,11953445,"TERMINAL",0,0,"3\t",,terminal_output +12783,11953623,"TERMINAL",0,0,"38867",,terminal_output +12784,11954500,"TERMINAL",0,0,"4\t",,terminal_output +12785,11954654,"TERMINAL",0,0,"49978",,terminal_output +12786,11955517,"TERMINAL",0,0,"5\t",,terminal_output +12787,11955745,"TERMINAL",0,0,"5303089",,terminal_output +12788,11956643,"TERMINAL",0,0,"6\t",,terminal_output +12789,11956740,"TERMINAL",0,0,"611910",,terminal_output +12790,11957523,"TERMINAL",0,0,"Step 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 122, loss: 5.400245666503906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 122, loss: 5.400245666503906\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 242, loss: 5.068990707397461\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 122, loss: 5.400245666503906\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 122, loss: 5.400245666503906\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 122, loss: 5.400245666503906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 242, loss: 5.068990707397461\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 122, loss: 5.400245666503906\r\nStep 242, loss: 5.068990707397461\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 242, loss: 5.068990707397461\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\n",,terminal_output +12791,11957627,"TERMINAL",0,0,"Step 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 122, loss: 5.400245666503906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 122, loss: 5.400245666503906\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 122, loss: 5.400245666503906\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 242, loss: 5.068990707397461\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 242, loss: 5.068990707397461\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 242, loss: 5.068990707397461\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 242, loss: 5.068990707397461\r\nStep 122, loss: 5.400245666503906\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 122, loss: 5.400245666503906\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 242, loss: 5.068990707397461\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 122, loss: 5.400245666503906\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\n",,terminal_output +12792,11957629,"TERMINAL",0,0,"7\t",,terminal_output +12793,11957777,"TERMINAL",0,0,"Step 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 122, loss: 5.400245666503906\r\nStep 122, loss: 5.400245666503906\r\nStep 242, loss: 5.068990707397461\r\nStep 122, loss: 5.400245666503906\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 0, loss: 12.949273109436035\r\nStep 1, loss: 11.430440902709961\r\nStep 2, loss: 10.384503364562988\r\nStep 3, loss: 9.531347274780273\r\nStep 4, loss: 9.25166130065918\r\nStep 5, loss: 8.985861778259277\r\nStep 6, loss: 8.578680992126465\r\nStep 7, loss: 8.340399742126465\r\nStep 8, loss: 8.152952194213867\r\nStep 9, loss: 8.026482582092285\r\nStep 10, loss: 7.455098628997803\r\nStep 11, loss: 7.713717937469482\r\nStep 12, loss: 7.560019493103027\r\nStep 13, loss: 7.620123863220215\r\nStep 14, loss: 7.55232572555542\r\nStep 15, loss: 7.407571315765381\r\nStep 16, loss: 7.145195960998535\r\nStep 17, loss: 7.073484897613525\r\nStep 18, loss: 6.959710597991943\r\nStep 19, loss: 7.133389949798584\r\nStep 20, loss: 6.902515411376953\r\nStep 21, loss: 6.730587959289551\r\nStep 22, loss: 6.896009922027588\r\nStep 23, loss: 6.658592700958252\r\nStep 24, loss: 6.616141319274902\r\nStep 25, loss: 6.4268083572387695\r\nStep 26, loss: 6.476139545440674\r\nStep 27, loss: 6.152318000793457\r\nStep 28, loss: 6.529809474945068\r\nStep 29, loss: 6.423544883728027\r\nStep 30, loss: 6.3359222412109375\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 31, loss: 6.279480934143066\r\nStep 32, loss: 6.247717380523682\r\nStep 33, loss: 6.217741966247559\r\nStep 34, loss: 6.116343975067139\r\nStep 35, loss: 6.103456974029541\r\nStep 36, loss: 6.241140365600586\r\nStep 37, loss: 6.257292747497559\r\nStep 38, loss: 6.198233127593994\r\nStep 39, loss: 6.313640117645264\r\nStep 40, loss: 6.024031639099121\r\nStep 41, loss: 6.1269941329956055\r\nStep 42, loss: 6.0780029296875\r\nStep 43, loss: 6.035647392272949\r\nStep 44, loss: 6.149398326873779\r\nStep 45, loss: 6.197230339050293\r\nStep 46, loss: 6.02092170715332\r\nStep 47, loss: 5.914671897888184\r\nStep 48, loss: 6.086199760437012\r\nStep 49, loss: 6.124152660369873\r\nStep 50, loss: 5.895163536071777\r\nStep 51, loss: 6.1387505531311035\r\nStep 52, loss: 5.88333797454834\r\nStep 53, loss: 5.988974571228027\r\nStep 54, loss: 5.96379280090332\r\nStep 55, loss: 5.8824992179870605\r\nStep 56, loss: 6.0373430252075195\r\nStep 57, loss: 5.906609058380127\r\nStep 58, loss: 5.9038238525390625\r\nStep 59, loss: 5.693182945251465\r\nStep 60, loss: 5.782966136932373\r\nStep 61, loss: 6.035395622253418\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 122, loss: 5.400245666503906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 62, loss: 5.777761936187744\r\nStep 63, loss: 5.839138507843018\r\nStep 64, loss: 5.780827045440674\r\nStep 65, loss: 5.784126281738281\r\nStep 66, loss: 5.7698540687561035\r\nStep 67, loss: 5.763111114501953\r\nStep 68, loss: 5.864319801330566\r\nStep 69, loss: 5.71303129196167\r\nStep 70, loss: 5.857616901397705\r\nStep 71, loss: 5.914539337158203\r\nStep 72, loss: 5.763561248779297\r\nStep 73, loss: 5.778350830078125\r\nStep 74, loss: 5.857618808746338\r\nStep 75, loss: 5.826387882232666\r\nStep 76, loss: 5.937601566314697\r\nStep 77, loss: 5.786263942718506\r\nStep 78, loss: 6.130350589752197\r\nStep 79, loss: 5.7508978843688965\r\nStep 80, loss: 5.663431167602539\r\nStep 81, loss: 5.876394271850586\r\nStep 82, loss: 5.745809555053711\r\nStep 83, loss: 5.695242404937744\r\nStep 84, loss: 5.774994850158691\r\nStep 85, loss: 5.626899242401123\r\nStep 86, loss: 5.6362504959106445\r\nStep 87, loss: 5.740806579589844\r\nStep 88, loss: 5.581630229949951\r\nStep 89, loss: 5.6040849685668945\r\nStep 90, loss: 5.731827259063721\r\nStep 91, loss: 5.654018878936768\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 92, loss: 5.435694694519043\r\nStep 93, loss: 5.665273189544678\r\nStep 94, loss: 5.677303791046143\r\nStep 95, loss: 5.618515968322754\r\nStep 96, loss: 5.68934965133667\r\nStep 97, loss: 5.642867565155029\r\nStep 98, loss: 5.6840128898620605\r\nStep 99, loss: 5.468347072601318\r\nStep 100, loss: 5.604738235473633\r\nStep 101, loss: 5.668688774108887\r\nStep 102, loss: 5.606945037841797\r\nStep 103, loss: 5.629029750823975\r\nStep 104, loss: 5.692020893096924\r\nStep 105, loss: 5.50466251373291\r\nStep 106, loss: 5.762575626373291\r\nStep 107, loss: 5.649835586547852\r\nStep 108, loss: 5.581820011138916\r\nStep 109, loss: 5.673094272613525\r\nStep 110, loss: 5.519227504730225\r\nStep 111, loss: 5.321502208709717\r\nStep 112, loss: 5.587049961090088\r\nStep 113, loss: 5.682382106781006\r\nStep 114, loss: 5.476174354553223\r\nStep 115, loss: 5.305208206176758\r\nStep 116, loss: 5.514387607574463\r\nStep 117, loss: 5.555805683135986\r\nStep 118, loss: 5.512825012207031\r\nStep 119, loss: 5.548687934875488\r\nStep 120, loss: 5.7550129890441895\r\nStep 121, loss: 5.477591037750244\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 242, loss: 5.068990707397461\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 242, loss: 5.068990707397461\r\nStep 122, loss: 5.400245666503906\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 122, loss: 5.400245666503906\r\nStep 242, loss: 5.068990707397461\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 123, loss: 5.591671943664551\r\nStep 124, loss: 5.552427768707275\r\nStep 125, loss: 5.480953693389893\r\nStep 126, loss: 5.44342041015625\r\nStep 127, loss: 5.484697341918945\r\nStep 128, loss: 5.3651628494262695\r\nStep 129, loss: 5.565526485443115\r\nStep 130, loss: 5.390408515930176\r\nStep 131, loss: 5.672516822814941\r\nStep 132, loss: 5.253285884857178\r\nStep 133, loss: 5.514707565307617\r\nStep 134, loss: 5.446695804595947\r\nStep 135, loss: 5.264585971832275\r\nStep 136, loss: 5.206966876983643\r\nStep 137, loss: 5.244028568267822\r\nStep 138, loss: 5.259130001068115\r\nStep 139, loss: 5.409638404846191\r\nStep 140, loss: 5.364806652069092\r\nStep 141, loss: 5.430793285369873\r\nStep 142, loss: 5.492854118347168\r\nStep 143, loss: 5.460545539855957\r\nStep 144, loss: 5.295399188995361\r\nStep 145, loss: 5.345186710357666\r\nStep 146, loss: 5.332350254058838\r\nStep 147, loss: 5.30581521987915\r\nStep 148, loss: 5.272929668426514\r\nStep 149, loss: 5.335284233093262\r\nStep 150, loss: 5.263681411743164\r\nStep 151, loss: 5.497401714324951\r\nStep 152, loss: 5.332529544830322\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 153, loss: 5.290670394897461\r\nStep 154, loss: 5.283242702484131\r\nStep 155, loss: 5.120883464813232\r\nStep 156, loss: 5.030333042144775\r\nStep 157, loss: 5.376494884490967\r\nStep 158, loss: 5.221498489379883\r\nStep 159, loss: 5.252740859985352\r\nStep 160, loss: 5.322122573852539\r\nStep 161, loss: 5.386086463928223\r\nStep 162, loss: 5.426920413970947\r\nStep 163, loss: 5.201775550842285\r\nStep 164, loss: 5.252358436584473\r\nStep 165, loss: 5.112096309661865\r\nStep 166, loss: 5.3307671546936035\r\nStep 167, loss: 5.441346168518066\r\nStep 168, loss: 5.2537150382995605\r\nStep 169, loss: 5.4077467918396\r\nStep 170, loss: 5.208958148956299\r\nStep 171, loss: 4.978949069976807\r\nStep 172, loss: 5.210290431976318\r\nStep 173, loss: 5.3112053871154785\r\nStep 174, loss: 5.440155029296875\r\nStep 175, loss: 5.209361553192139\r\nStep 176, loss: 5.326188564300537\r\nStep 177, loss: 5.066229820251465\r\nStep 178, loss: 5.061301231384277\r\nStep 179, loss: 5.313010215759277\r\nStep 180, loss: 5.196096420288086\r\nStep 181, loss: 5.29463005065918\r\nStep 182, loss: 5.188632965087891\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 242, loss: 5.068990707397461\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 183, loss: 5.208115577697754\r\nStep 184, loss: 5.09967041015625\r\nStep 185, loss: 5.159571647644043\r\nStep 186, loss: 5.029715538024902\r\nStep 187, loss: 5.00143575668335\r\nStep 188, loss: 5.231716632843018\r\nStep 189, loss: 5.137259483337402\r\nStep 190, loss: 5.068835258483887\r\nStep 191, loss: 5.226624965667725\r\nStep 192, loss: 5.390087604522705\r\nStep 193, loss: 5.277485370635986\r\nStep 194, loss: 5.260311126708984\r\nStep 195, loss: 5.015855312347412\r\nStep 196, loss: 5.083849906921387\r\nStep 197, loss: 5.498945236206055\r\nStep 198, loss: 5.294074058532715\r\nStep 199, loss: 5.049220085144043\r\nStep 200, loss: 5.167503356933594\r\nStep 201, loss: 5.086389064788818\r\nStep 202, loss: 5.06535530090332\r\nStep 203, loss: 5.010951519012451\r\nStep 204, loss: 5.160760402679443\r\nStep 205, loss: 5.302981376647949\r\nStep 206, loss: 5.175704479217529\r\nStep 207, loss: 5.061976432800293\r\nStep 208, loss: 5.101398944854736\r\nStep 209, loss: 5.160239219665527\r\nStep 210, loss: 5.067027568817139\r\nStep 211, loss: 4.976753234863281\r\nStep 212, loss: 4.996925354003906\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 213, loss: 5.033364295959473\r\nStep 214, loss: 5.064614772796631\r\nStep 215, loss: 4.9741997718811035\r\nStep 216, loss: 4.983123302459717\r\nStep 217, loss: 5.148205757141113\r\nStep 218, loss: 4.974318504333496\r\nStep 219, loss: 5.155987739562988\r\nStep 220, loss: 5.045630931854248\r\nStep 221, loss: 5.335808277130127\r\nStep 222, loss: 5.223829746246338\r\nStep 223, loss: 4.957863807678223\r\nStep 224, loss: 5.040962219238281\r\nStep 225, loss: 5.144440650939941\r\nStep 226, loss: 5.1168084144592285\r\nStep 227, loss: 4.904585838317871\r\nStep 228, loss: 5.029483318328857\r\nStep 229, loss: 5.0828447341918945\r\nStep 230, loss: 5.140675067901611\r\nStep 231, loss: 4.984954357147217\r\nStep 232, loss: 4.971665382385254\r\nStep 233, loss: 5.0076189041137695\r\nStep 234, loss: 4.8605265617370605\r\nStep 235, loss: 5.105206489562988\r\nStep 236, loss: 4.828299522399902\r\nStep 237, loss: 5.1811604499816895\r\nStep 238, loss: 4.999291896820068\r\nStep 239, loss: 5.052023410797119\r\nStep 240, loss: 4.781774044036865\r\nStep 241, loss: 4.8852996826171875\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\nStep 242, loss: 5.068990707397461\r\n",,terminal_output +12794,11957778,"TERMINAL",0,0,"722101",,terminal_output +12795,11958641,"TERMINAL",0,0,"8\t",,terminal_output +12796,11958885,"TERMINAL",0,0,"83312",,terminal_output +12797,11959657,"TERMINAL",0,0,"9\t",,terminal_output +12798,11959754,"TERMINAL",0,0,"tail",,terminal_focus +12799,11959840,"TERMINAL",0,0,"94423",,terminal_output +12800,11960748,"TERMINAL",0,0,"30\t",,terminal_output +12801,11960890,"TERMINAL",0,0,"305534",,terminal_output +12802,11961739,"TERMINAL",0,0,"1\t",,terminal_output +12803,11961919,"TERMINAL",0,0,"16645",,terminal_output +12804,11962355,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining",,terminal_output +12805,11962772,"TERMINAL",0,0,"2\t",,terminal_output +12806,11963015,"TERMINAL",0,0,"27756",,terminal_output +12807,11963808,"TERMINAL",0,0,"3\t",,terminal_output +12808,11964019,"TERMINAL",0,0,"38867",,terminal_output +12809,11964849,"TERMINAL",0,0,"4\t",,terminal_output +12810,11965070,"TERMINAL",0,0,"49978",,terminal_output +12811,11965882,"TERMINAL",0,0,"5\t",,terminal_output +12812,11966089,"TERMINAL",0,0,"5404089",,terminal_output +12813,11966969,"TERMINAL",0,0,"6\t",,terminal_output +12814,11967131,"TERMINAL",0,0,"611920",,terminal_output +12815,11967955,"TERMINAL",0,0,"7\t",,terminal_output +12816,11968162,"TERMINAL",0,0,"722201",,terminal_output +12817,11968995,"TERMINAL",0,0,"8\t",,terminal_output +12818,11969195,"TERMINAL",0,0,"83312",,terminal_output +12819,11970137,"TERMINAL",0,0,"9\t",,terminal_output +12820,11970234,"TERMINAL",0,0,"94423",,terminal_output +12821,11971161,"TERMINAL",0,0,"40\t",,terminal_output +12822,11971305,"TERMINAL",0,0,"406645",,terminal_output +12823,11972187,"TERMINAL",0,0,"1\t",,terminal_output +12824,11972324,"TERMINAL",0,0,"27756",,terminal_output +12825,11973210,"TERMINAL",0,0,"2\t",,terminal_output +12826,11973352,"TERMINAL",0,0,"38867",,terminal_output +12827,11974232,"TERMINAL",0,0,"3\t",,terminal_output +12828,11974380,"TERMINAL",0,0,"49978",,terminal_output +12829,11975224,"TERMINAL",0,0,"4\t",,terminal_output +12830,11975549,"TERMINAL",0,0,"5505089",,terminal_output +12831,11976280,"TERMINAL",0,0,"5\t",,terminal_output +12832,11976454,"TERMINAL",0,0,"611930",,terminal_output +12833,11977304,"TERMINAL",0,0,"66",,terminal_output +12834,11977722,"TERMINAL",0,0,"722301",,terminal_output +12835,11978431,"TERMINAL",0,0,"8\t",,terminal_output +12836,11978533,"TERMINAL",0,0,"83312",,terminal_output +12837,11979373,"TERMINAL",0,0,"9\t",,terminal_output +12838,11979577,"TERMINAL",0,0,"94423",,terminal_output +12839,11980391,"TERMINAL",0,0,"50\t",,terminal_output +12840,11980605,"TERMINAL",0,0,"505534",,terminal_output +12841,11981503,"TERMINAL",0,0,"1\t",,terminal_output +12842,11981640,"TERMINAL",0,0,"16645",,terminal_output +12843,11982522,"TERMINAL",0,0,"2\t",,terminal_output +12844,11982677,"TERMINAL",0,0,"27756",,terminal_output +12845,11983546,"TERMINAL",0,0,"3\t",,terminal_output +12846,11983751,"TERMINAL",0,0,"38867",,terminal_output +12847,11984544,"TERMINAL",0,0,"4\t",,terminal_output +12848,11984756,"TERMINAL",0,0,"49978",,terminal_output +12849,11987442,"TERMINAL",0,0,"530:0230:024041",,terminal_output +12850,11987443,"TERMINAL",0,0,"55",,terminal_output +12851,11988480,"TERMINAL",0,0,"83312",,terminal_output +12852,11988522,"TERMINAL",0,0,"8\t",,terminal_output +12853,11989549,"TERMINAL",0,0,"94423",,terminal_output +12854,11989549,"TERMINAL",0,0,"9\t",,terminal_output +12855,11990655,"TERMINAL",0,0,"6:005534",,terminal_output +12856,11990655,"TERMINAL",0,0,"6:00\t",,terminal_output +12857,11991644,"TERMINAL",0,0,"16645",,terminal_output +12858,11991644,"TERMINAL",0,0,"1\t",,terminal_output +12859,11992627,"TERMINAL",0,0,"27756",,terminal_output +12860,11992628,"TERMINAL",0,0,"2\t",,terminal_output +12861,11993731,"TERMINAL",0,0,"38867",,terminal_output +12862,11993731,"TERMINAL",0,0,"3\t",,terminal_output +12863,11994719,"TERMINAL",0,0,"4\t",,terminal_output +12864,11994762,"TERMINAL",0,0,"49978",,terminal_output +12865,11996556,"TERMINAL",0,0,"51111950",,terminal_output +12866,11996556,"TERMINAL",0,0,"56",,terminal_output +12867,11997580,"TERMINAL",0,0,"722501",,terminal_output +12868,11997580,"TERMINAL",0,0,"7\t",,terminal_output +12869,11998605,"TERMINAL",0,0,"83312",,terminal_output +12870,11998605,"TERMINAL",0,0,"8\t",,terminal_output +12871,11999629,"TERMINAL",0,0,"9\t",,terminal_output +12872,11999630,"TERMINAL",0,0,"94423",,terminal_output +12873,12000633,"TERMINAL",0,0,"10\t",,terminal_output +12874,12000639,"TERMINAL",0,0,"105534",,terminal_output +12875,12001655,"TERMINAL",0,0,"1\t",,terminal_output +12876,12001669,"TERMINAL",0,0,"16645",,terminal_output +12877,12002727,"TERMINAL",0,0,"27756",,terminal_output +12878,12002768,"TERMINAL",0,0,"2\t",,terminal_output +12879,12003747,"TERMINAL",0,0,"38867",,terminal_output +12880,12003747,"TERMINAL",0,0,"3\t",,terminal_output +12881,12004761,"TERMINAL",0,0,"49978",,terminal_output +12882,12004762,"TERMINAL",0,0,"4\t",,terminal_output +12883,12005888,"TERMINAL",0,0,"5\t",,terminal_output +12884,12005888,"TERMINAL",0,0,"5202089",,terminal_output +12885,12006941,"TERMINAL",0,0,"6\t",,terminal_output +12886,12006942,"TERMINAL",0,0,"61194:00",,terminal_output +12887,12007900,"TERMINAL",0,0,"7\t",,terminal_output +12888,12007922,"TERMINAL",0,0,"7229:001",,terminal_output +12889,12008917,"TERMINAL",0,0,"8\t",,terminal_output +12890,12008920,"TERMINAL",0,0,"83312",,terminal_output +12891,12009959,"TERMINAL",0,0,"9\t",,terminal_output +12892,12009959,"TERMINAL",0,0,"94423",,terminal_output +12893,12010988,"TERMINAL",0,0,"20\t",,terminal_output +12894,12010994,"TERMINAL",0,0,"205534",,terminal_output +12895,12012122,"TERMINAL",0,0,"1\t",,terminal_output +12896,12012133,"TERMINAL",0,0,"16645",,terminal_output +12897,12013070,"TERMINAL",0,0,"2\t",,terminal_output +12898,12013070,"TERMINAL",0,0,"27756",,terminal_output +12899,12014176,"TERMINAL",0,0,"38867",,terminal_output +12900,12014176,"TERMINAL",0,0,"3\t",,terminal_output +12901,12015199,"TERMINAL",0,0,"49978",,terminal_output +12902,12015199,"TERMINAL",0,0,"4\t",,terminal_output +12903,12016183,"TERMINAL",0,0,"5303089",,terminal_output +12904,12016185,"TERMINAL",0,0,"5\t",,terminal_output +12905,12017219,"TERMINAL",0,0,"6\t",,terminal_output +12906,12017220,"TERMINAL",0,0,"611910",,terminal_output +12907,12018373,"TERMINAL",0,0,"7\t",,terminal_output +12908,12018373,"TERMINAL",0,0,"722101",,terminal_output +12909,12019372,"TERMINAL",0,0,"84423",,terminal_output +12910,12019372,"TERMINAL",0,0,"8\t",,terminal_output +12911,12020336,"TERMINAL",0,0,"30\t",,terminal_output +12912,12020336,"TERMINAL",0,0,"305534",,terminal_output +12913,12021378,"TERMINAL",0,0,"1\t",,terminal_output +12914,12021378,"TERMINAL",0,0,"16645",,terminal_output +12915,12022404,"TERMINAL",0,0,"2\t",,terminal_output +12916,12022404,"TERMINAL",0,0,"27756",,terminal_output +12917,12023420,"TERMINAL",0,0,"3\t",,terminal_output +12918,12023428,"TERMINAL",0,0,"38867",,terminal_output +12919,12024455,"TERMINAL",0,0,"4\t",,terminal_output +12920,12024469,"TERMINAL",0,0,"49978",,terminal_output +12921,12025532,"TERMINAL",0,0,"5\t",,terminal_output +12922,12025532,"TERMINAL",0,0,"5404089",,terminal_output +12923,12026617,"TERMINAL",0,0,"6\t",,terminal_output +12924,12026618,"TERMINAL",0,0,"611920",,terminal_output +12925,12027586,"TERMINAL",0,0,"7\t",,terminal_output +12926,12027587,"TERMINAL",0,0,"722201",,terminal_output +12927,12028610,"TERMINAL",0,0,"8\t",,terminal_output +12928,12028632,"TERMINAL",0,0,"83312",,terminal_output +12929,12029775,"TERMINAL",0,0,"9\t",,terminal_output +12930,12029775,"TERMINAL",0,0,"94423",,terminal_output +12931,12030757,"TERMINAL",0,0,"40\t",,terminal_output +12932,12030757,"TERMINAL",0,0,"405534",,terminal_output +12933,12031764,"TERMINAL",0,0,"1\t",,terminal_output +12934,12031773,"TERMINAL",0,0,"16645",,terminal_output +12935,12032779,"TERMINAL",0,0,"2\t",,terminal_output +12936,12032779,"TERMINAL",0,0,"27756",,terminal_output +12937,12033834,"TERMINAL",0,0,"3\t",,terminal_output +12938,12033834,"TERMINAL",0,0,"38867",,terminal_output +12939,12034854,"TERMINAL",0,0,"4\t",,terminal_output +12940,12034854,"TERMINAL",0,0,"49978",,terminal_output +12941,12035891,"TERMINAL",0,0,"5\t",,terminal_output +12942,12035899,"TERMINAL",0,0,"5505089",,terminal_output +12943,12036911,"TERMINAL",0,0,"6\t",,terminal_output +12944,12036917,"TERMINAL",0,0,"611930",,terminal_output +12945,12037976,"TERMINAL",0,0,"7\t",,terminal_output +12946,12038029,"TERMINAL",0,0,"722301",,terminal_output +12947,12039059,"TERMINAL",0,0,"8\t",,terminal_output +12948,12039067,"TERMINAL",0,0,"83312",,terminal_output +12949,12040082,"TERMINAL",0,0,"9\t",,terminal_output +12950,12040121,"TERMINAL",0,0,"94423",,terminal_output +12951,12041068,"TERMINAL",0,0,"50\t",,terminal_output +12952,12041068,"TERMINAL",0,0,"505534",,terminal_output +12953,12042227,"TERMINAL",0,0,"16645",,terminal_output +12954,12042228,"TERMINAL",0,0,"17",,terminal_output +12955,12043250,"TERMINAL",0,0,"2\t",,terminal_output +12956,12043251,"TERMINAL",0,0,"27756",,terminal_output +12957,12044270,"TERMINAL",0,0,"3\t",,terminal_output +12958,12044270,"TERMINAL",0,0,"38867",,terminal_output +12959,12045241,"TERMINAL",0,0,"4\t",,terminal_output +12960,12045241,"TERMINAL",0,0,"49978",,terminal_output +12961,12046327,"TERMINAL",0,0,"5\t",,terminal_output +12962,12046327,"TERMINAL",0,0,"51:011:01940",,terminal_output +12963,12047347,"TERMINAL",0,0,"6\t",,terminal_output +12964,12047347,"TERMINAL",0,0,"722401",,terminal_output +12965,12048366,"TERMINAL",0,0,"8\t",,terminal_output +12966,12048367,"TERMINAL",0,0,"83312",,terminal_output +12967,12049409,"TERMINAL",0,0,"9\t",,terminal_output +12968,12049451,"TERMINAL",0,0,"94423",,terminal_output +12969,12050523,"TERMINAL",0,0,"7:00\t",,terminal_output +12970,12050523,"TERMINAL",0,0,"7:005534",,terminal_output +12971,12051443,"TERMINAL",0,0,"1\t",,terminal_output +12972,12051452,"TERMINAL",0,0,"16645",,terminal_output +12973,12052479,"TERMINAL",0,0,"2\t",,terminal_output +12974,12052494,"TERMINAL",0,0,"27756",,terminal_output +12975,12053593,"TERMINAL",0,0,"3\t",,terminal_output +12976,12053594,"TERMINAL",0,0,"38867",,terminal_output +12977,12054617,"TERMINAL",0,0,"4\t",,terminal_output +12978,12054617,"TERMINAL",0,0,"49978",,terminal_output +12979,12055598,"TERMINAL",0,0,"5\t",,terminal_output +12980,12055619,"TERMINAL",0,0,"5101089",,terminal_output +12981,12056666,"TERMINAL",0,0,"6\t",,terminal_output +12982,12056666,"TERMINAL",0,0,"611950",,terminal_output +12983,12057661,"TERMINAL",0,0,"7\t",,terminal_output +12984,12057700,"TERMINAL",0,0,"722501",,terminal_output +12985,12058733,"TERMINAL",0,0,"8\t",,terminal_output +12986,12058749,"TERMINAL",0,0,"83312",,terminal_output +12987,12059835,"TERMINAL",0,0,"9\t",,terminal_output +12988,12059840,"TERMINAL",0,0,"94423",,terminal_output +12989,12060788,"TERMINAL",0,0,"10\t",,terminal_output +12990,12060798,"TERMINAL",0,0,"105534",,terminal_output +12991,12061813,"TERMINAL",0,0,"1\t",,terminal_output +12992,12061846,"TERMINAL",0,0,"16645",,terminal_output +12993,12062850,"TERMINAL",0,0,"2\t",,terminal_output +12994,12062888,"TERMINAL",0,0,"27756",,terminal_output +12995,12063885,"TERMINAL",0,0,"3\t",,terminal_output +12996,12063919,"TERMINAL",0,0,"38867",,terminal_output +12997,12064967,"TERMINAL",0,0,"4\t",,terminal_output +12998,12064973,"TERMINAL",0,0,"49978",,terminal_output +12999,12065961,"TERMINAL",0,0,"5\t",,terminal_output +13000,12065989,"TERMINAL",0,0,"5202089",,terminal_output +13001,12067103,"TERMINAL",0,0,"6\t",,terminal_output +13002,12067103,"TERMINAL",0,0,"61195:00",,terminal_output +13003,12068134,"TERMINAL",0,0,"7\t",,terminal_output +13004,12068140,"TERMINAL",0,0,"72210:001",,terminal_output +13005,12069080,"TERMINAL",0,0,"8\t",,terminal_output +13006,12069105,"TERMINAL",0,0,"83312",,terminal_output +13007,12070112,"TERMINAL",0,0,"9\t",,terminal_output +13008,12070176,"TERMINAL",0,0,"94423",,terminal_output +13009,12071218,"TERMINAL",0,0,"20\t",,terminal_output +13010,12071218,"TERMINAL",0,0,"205534",,terminal_output +13011,12072191,"TERMINAL",0,0,"1\t",,terminal_output +13012,12072217,"TERMINAL",0,0,"16645",,terminal_output +13013,12073256,"TERMINAL",0,0,"2\t",,terminal_output +13014,12073258,"TERMINAL",0,0,"27756",,terminal_output +13015,12074387,"TERMINAL",0,0,"3\t",,terminal_output +13016,12074388,"TERMINAL",0,0,"39978",,terminal_output +13017,12075321,"TERMINAL",0,0,"4\t",,terminal_output +13018,12075341,"TERMINAL",0,0,"5303089",,terminal_output +13019,12076441,"TERMINAL",0,0,"6\t",,terminal_output +13020,12076447,"TERMINAL",0,0,"611910",,terminal_output +13021,12077403,"TERMINAL",0,0,"7\t",,terminal_output +13022,12077421,"TERMINAL",0,0,"722101",,terminal_output +13023,12078477,"TERMINAL",0,0,"8\t",,terminal_output +13024,12078477,"TERMINAL",0,0,"83312",,terminal_output +13025,12079489,"TERMINAL",0,0,"9\t",,terminal_output +13026,12079523,"TERMINAL",0,0,"94423",,terminal_output +13027,12080517,"TERMINAL",0,0,"30\t",,terminal_output +13028,12080552,"TERMINAL",0,0,"305534",,terminal_output +13029,12081647,"TERMINAL",0,0,"1\t",,terminal_output +13030,12081656,"TERMINAL",0,0,"16645",,terminal_output +13031,12082663,"TERMINAL",0,0,"2\t",,terminal_output +13032,12082669,"TERMINAL",0,0,"27756",,terminal_output +13033,12083715,"TERMINAL",0,0,"3\t",,terminal_output +13034,12083716,"TERMINAL",0,0,"38867",,terminal_output +13035,12084783,"TERMINAL",0,0,"4\t",,terminal_output +13036,12084783,"TERMINAL",0,0,"49978",,terminal_output +13037,12085769,"TERMINAL",0,0,"5\t",,terminal_output +13038,12085807,"TERMINAL",0,0,"5404089",,terminal_output +13039,12086762,"TERMINAL",0,0,"6\t",,terminal_output +13040,12086821,"TERMINAL",0,0,"611920",,terminal_output +13041,12087805,"TERMINAL",0,0,"7\t",,terminal_output +13042,12087853,"TERMINAL",0,0,"722201",,terminal_output +13043,12088831,"TERMINAL",0,0,"8\t",,terminal_output +13044,12088894,"TERMINAL",0,0,"83312",,terminal_output +13045,12089878,"TERMINAL",0,0,"9\t",,terminal_output +13046,12089970,"TERMINAL",0,0,"94423",,terminal_output +13047,12090919,"TERMINAL",0,0,"40\t",,terminal_output +13048,12091017,"TERMINAL",0,0,"405534",,terminal_output +13049,12091952,"TERMINAL",0,0,"1\t",,terminal_output +13050,12092054,"TERMINAL",0,0,"16645",,terminal_output +13051,12092978,"TERMINAL",0,0,"2\t",,terminal_output +13052,12093077,"TERMINAL",0,0,"27756",,terminal_output +13053,12094017,"TERMINAL",0,0,"3\t",,terminal_output +13054,12094113,"TERMINAL",0,0,"38867",,terminal_output +13055,12095065,"TERMINAL",0,0,"4\t",,terminal_output +13056,12095161,"TERMINAL",0,0,"49978",,terminal_output +13057,12096096,"TERMINAL",0,0,"5\t",,terminal_output +13058,12096248,"TERMINAL",0,0,"5505089",,terminal_output +13059,12097146,"TERMINAL",0,0,"6\t",,terminal_output +13060,12097249,"TERMINAL",0,0,"611930",,terminal_output +13061,12098257,"TERMINAL",0,0,"7\t",,terminal_output +13062,12098288,"TERMINAL",0,0,"733312",,terminal_output +13063,12099280,"TERMINAL",0,0,"8\t",,terminal_output +13064,12099338,"TERMINAL",0,0,"94423",,terminal_output +13065,12100253,"TERMINAL",0,0,"9\t",,terminal_output +13066,12100391,"TERMINAL",0,0,"505534",,terminal_output +13067,12101484,"TERMINAL",0,0,"50\t",,terminal_output +13068,12101485,"TERMINAL",0,0,"16645",,terminal_output +13069,12102336,"TERMINAL",0,0,"2\t",,terminal_output +13070,12102454,"TERMINAL",0,0,"27756",,terminal_output +13071,12103385,"TERMINAL",0,0,"3\t",,terminal_output +13072,12103556,"TERMINAL",0,0,"38867",,terminal_output +13073,12104425,"TERMINAL",0,0,"4\t",,terminal_output +13074,12104597,"TERMINAL",0,0,"49978",,terminal_output +13075,12105516,"TERMINAL",0,0,"5\t",,terminal_output +13076,12105607,"TERMINAL",0,0,"52:002:0089",,terminal_output +13077,12106554,"TERMINAL",0,0,"6\t",,terminal_output +13078,12106653,"TERMINAL",0,0,"611940",,terminal_output +13079,12107571,"TERMINAL",0,0,"7\t",,terminal_output +13080,12107708,"TERMINAL",0,0,"722401",,terminal_output +13081,12110484,"TERMINAL",0,0,"85534",,terminal_output +13082,12110484,"TERMINAL",0,0,"86",,terminal_output +13083,12111805,"TERMINAL",0,0,"8:016645",,terminal_output +13084,12111805,"TERMINAL",0,0,"8:015",,terminal_output +13085,12112792,"TERMINAL",0,0,"2\t",,terminal_output +13086,12112792,"TERMINAL",0,0,"27756",,terminal_output +13087,12113823,"TERMINAL",0,0,"3\t",,terminal_output +13088,12113825,"TERMINAL",0,0,"38867",,terminal_output +13089,12114921,"TERMINAL",0,0,"4\t",,terminal_output +13090,12114921,"TERMINAL",0,0,"49978",,terminal_output +13091,12116317,"TERMINAL",0,0,"5\t",,terminal_output +13092,12116317,"TERMINAL",0,0,"5101089",,terminal_output +13093,12116948,"TERMINAL",0,0,"6\t",,terminal_output +13094,12116963,"TERMINAL",0,0,"611950",,terminal_output +13095,12117981,"TERMINAL",0,0,"7\t",,terminal_output +13096,12117998,"TERMINAL",0,0,"722501",,terminal_output +13097,12119019,"TERMINAL",0,0,"8\t",,terminal_output +13098,12119042,"TERMINAL",0,0,"83312",,terminal_output +13099,12120070,"TERMINAL",0,0,"9\t",,terminal_output +13100,12120087,"TERMINAL",0,0,"94423",,terminal_output +13101,12121108,"TERMINAL",0,0,"10\t",,terminal_output +13102,12121129,"TERMINAL",0,0,"105534",,terminal_output +13103,12122214,"TERMINAL",0,0,"1\t",,terminal_output +13104,12122214,"TERMINAL",0,0,"16645",,terminal_output +13105,12123229,"TERMINAL",0,0,"2\t",,terminal_output +13106,12123229,"TERMINAL",0,0,"27756",,terminal_output +13107,12124241,"TERMINAL",0,0,"3\t",,terminal_output +13108,12124297,"TERMINAL",0,0,"38867",,terminal_output +13109,12125273,"TERMINAL",0,0,"4\t",,terminal_output +13110,12125321,"TERMINAL",0,0,"4202089",,terminal_output +13111,12126273,"TERMINAL",0,0,"5\t",,terminal_output +13112,12126346,"TERMINAL",0,0,"61196:00",,terminal_output +13113,12127319,"TERMINAL",0,0,"7\t",,terminal_output +13114,12127375,"TERMINAL",0,0,"7221:001",,terminal_output +13115,12128346,"TERMINAL",0,0,"8\t",,terminal_output +13116,12128415,"TERMINAL",0,0,"83312",,terminal_output +13117,12129381,"TERMINAL",0,0,"9\t",,terminal_output +13118,12129472,"TERMINAL",0,0,"94423",,terminal_output +13119,12130425,"TERMINAL",0,0,"20\t",,terminal_output +13120,12130484,"TERMINAL",0,0,"205534",,terminal_output +13121,12131520,"TERMINAL",0,0,"1\t",,terminal_output +13122,12131529,"TERMINAL",0,0,"16645",,terminal_output +13123,12132544,"TERMINAL",0,0,"2\t",,terminal_output +13124,12132563,"TERMINAL",0,0,"27756",,terminal_output +13125,12133534,"TERMINAL",0,0,"3\t",,terminal_output +13126,12133630,"TERMINAL",0,0,"38867",,terminal_output +13127,12134694,"TERMINAL",0,0,"4\t",,terminal_output +13128,12134694,"TERMINAL",0,0,"49978",,terminal_output +13129,12135718,"TERMINAL",0,0,"5\t",,terminal_output +13130,12135719,"TERMINAL",0,0,"5303089",,terminal_output +13131,12136650,"TERMINAL",0,0,"6\t",,terminal_output +13132,12136703,"TERMINAL",0,0,"611910",,terminal_output +13133,12137672,"TERMINAL",0,0,"7\t",,terminal_output +13134,12137737,"TERMINAL",0,0,"722101",,terminal_output +13135,12138707,"TERMINAL",0,0,"8\t",,terminal_output +13136,12138826,"TERMINAL",0,0,"83312",,terminal_output +13137,12139744,"TERMINAL",0,0,"9\t",,terminal_output +13138,12139887,"TERMINAL",0,0,"94423",,terminal_output +13139,12140781,"TERMINAL",0,0,"30\t",,terminal_output +13140,12140880,"TERMINAL",0,0,"305534",,terminal_output +13141,12141886,"TERMINAL",0,0,"1\t",,terminal_output +13142,12141892,"TERMINAL",0,0,"16645",,terminal_output +13143,12142856,"TERMINAL",0,0,"2\t",,terminal_output +13144,12142931,"TERMINAL",0,0,"27756",,terminal_output +13145,12143910,"TERMINAL",0,0,"3\t",,terminal_output +13146,12143968,"TERMINAL",0,0,"38867",,terminal_output +13147,12144923,"TERMINAL",0,0,"4\t",,terminal_output +13148,12145020,"TERMINAL",0,0,"49978",,terminal_output +13149,12145967,"TERMINAL",0,0,"5\t",,terminal_output +13150,12146061,"TERMINAL",0,0,"5404089",,terminal_output +13151,12147000,"TERMINAL",0,0,"6\t",,terminal_output +13152,12147077,"TERMINAL",0,0,"611920",,terminal_output +13153,12148051,"TERMINAL",0,0,"7\t",,terminal_output +13154,12148112,"TERMINAL",0,0,"722201",,terminal_output +13155,12149089,"TERMINAL",0,0,"8\t",,terminal_output +13156,12149154,"TERMINAL",0,0,"83312",,terminal_output +13157,12150113,"TERMINAL",0,0,"9\t",,terminal_output +13158,12150210,"TERMINAL",0,0,"94423",,terminal_output +13159,12151181,"TERMINAL",0,0,"40\t",,terminal_output +13160,12151238,"TERMINAL",0,0,"405534",,terminal_output +13161,12152208,"TERMINAL",0,0,"1\t",,terminal_output +13162,12152261,"TERMINAL",0,0,"16645",,terminal_output +13163,12153205,"TERMINAL",0,0,"2\t",,terminal_output +13164,12153297,"TERMINAL",0,0,"28867",,terminal_output +13165,12154242,"TERMINAL",0,0,"3\t",,terminal_output +13166,12154337,"TERMINAL",0,0,"49978",,terminal_output +13167,12155277,"TERMINAL",0,0,"4\t",,terminal_output +13168,12155373,"TERMINAL",0,0,"5505089",,terminal_output +13169,12156406,"TERMINAL",0,0,"6\t",,terminal_output +13170,12156411,"TERMINAL",0,0,"611930",,terminal_output +13171,12157422,"TERMINAL",0,0,"7\t",,terminal_output +13172,12157458,"TERMINAL",0,0,"722301",,terminal_output +13173,12158487,"TERMINAL",0,0,"8\t",,terminal_output +13174,12158493,"TERMINAL",0,0,"83312",,terminal_output +13175,12159417,"TERMINAL",0,0,"9\t",,terminal_output +13176,12159524,"TERMINAL",0,0,"94423",,terminal_output +13177,12160499,"TERMINAL",0,0,"50\t",,terminal_output +13178,12160561,"TERMINAL",0,0,"505534",,terminal_output +13179,12161483,"TERMINAL",0,0,"1\t",,terminal_output +13180,12161630,"TERMINAL",0,0,"16645",,terminal_output +13181,12162548,"TERMINAL",0,0,"2\t",,terminal_output +13182,12162638,"TERMINAL",0,0,"27756",,terminal_output +13183,12163676,"TERMINAL",0,0,"3\t",,terminal_output +13184,12163676,"TERMINAL",0,0,"38867",,terminal_output +13185,12164584,"TERMINAL",0,0,"4\t",,terminal_output +13186,12164722,"TERMINAL",0,0,"49978",,terminal_output +13187,12165717,"TERMINAL",0,0,"5\t",,terminal_output +13188,12165757,"TERMINAL",0,0,"53:003:0089",,terminal_output +13189,12166668,"TERMINAL",0,0,"6\t",,terminal_output +13190,12166809,"TERMINAL",0,0,"611940",,terminal_output +13191,12167701,"TERMINAL",0,0,"7\t",,terminal_output +13192,12167824,"TERMINAL",0,0,"722401",,terminal_output +13193,12168794,"TERMINAL",0,0,"8\t",,terminal_output +13194,12168896,"TERMINAL",0,0,"83312",,terminal_output +13195,12169826,"TERMINAL",0,0,"9\t",,terminal_output +13196,12169898,"TERMINAL",0,0,"94423",,terminal_output +13197,12170835,"TERMINAL",0,0,"9:00\t",,terminal_output +13198,12170974,"TERMINAL",0,0,"9:005534",,terminal_output +13199,12171852,"TERMINAL",0,0,"1\t",,terminal_output +13200,12171989,"TERMINAL",0,0,"16645",,terminal_output +13201,12172898,"TERMINAL",0,0,"2\t",,terminal_output +13202,12173036,"TERMINAL",0,0,"27756",,terminal_output +13203,12173927,"TERMINAL",0,0,"3\t",,terminal_output +13204,12174063,"TERMINAL",0,0,"38867",,terminal_output +13205,12174965,"TERMINAL",0,0,"4\t",,terminal_output +13206,12175119,"TERMINAL",0,0,"49978",,terminal_output +13207,12176000,"TERMINAL",0,0,"5\t",,terminal_output +13208,12176141,"TERMINAL",0,0,"5101089",,terminal_output +13209,12177091,"TERMINAL",0,0,"6\t",,terminal_output +13210,12177176,"TERMINAL",0,0,"611950",,terminal_output +13211,12178107,"TERMINAL",0,0,"7\t",,terminal_output +13212,12178257,"TERMINAL",0,0,"722501",,terminal_output +13213,12179180,"TERMINAL",0,0,"8\t",,terminal_output +13214,12179275,"TERMINAL",0,0,"83312",,terminal_output +13215,12180164,"TERMINAL",0,0,"9\t",,terminal_output +13216,12180374,"TERMINAL",0,0,"95534",,terminal_output +13217,12181162,"TERMINAL",0,0,"10\t",,terminal_output +13218,12181313,"TERMINAL",0,0,"116645",,terminal_output +13219,12182349,"TERMINAL",0,0,"1\t",,terminal_output +13220,12182352,"TERMINAL",0,0,"27756",,terminal_output +13221,12183337,"TERMINAL",0,0,"2\t",,terminal_output +13222,12183439,"TERMINAL",0,0,"38867",,terminal_output +13223,12184358,"TERMINAL",0,0,"3\t",,terminal_output +13224,12184486,"TERMINAL",0,0,"49978",,terminal_output +13225,12185317,"TERMINAL",0,0,"5\t",,terminal_output +13226,12185470,"TERMINAL",0,0,"5202089",,terminal_output +13227,12186136,"TERMINAL",0,0,"watch",,terminal_focus +13228,12186372,"TERMINAL",0,0,"6\t",,terminal_output +13229,12186521,"TERMINAL",0,0,"61197:00",,terminal_output +13230,12187421,"TERMINAL",0,0,"7\t",,terminal_output +13231,12187577,"TERMINAL",0,0,"7222:001",,terminal_output +13232,12188455,"TERMINAL",0,0,"8\t",,terminal_output +13233,12188659,"TERMINAL",0,0,"83312",,terminal_output +13234,12189473,"TERMINAL",0,0,"9\t",,terminal_output +13235,12189617,"TERMINAL",0,0,"94423",,terminal_output +13236,12190617,"TERMINAL",0,0,"20\t",,terminal_output +13237,12190657,"TERMINAL",0,0,"205534",,terminal_output +13238,12191575,"TERMINAL",0,0,"1\t",,terminal_output +13239,12191719,"TERMINAL",0,0,"16645",,terminal_output +13240,12192664,"TERMINAL",0,0,"2\t",,terminal_output +13241,12192737,"TERMINAL",0,0,"27756",,terminal_output +13242,12193604,"TERMINAL",0,0,"3\t",,terminal_output +13243,12193791,"TERMINAL",0,0,"38867",,terminal_output +13244,12194701,"TERMINAL",0,0,"4\t",,terminal_output +13245,12194819,"TERMINAL",0,0,"49978",,terminal_output +13246,12195717,"TERMINAL",0,0,"5\t",,terminal_output +13247,12195856,"TERMINAL",0,0,"5303089",,terminal_output +13248,12196766,"TERMINAL",0,0,"6\t",,terminal_output +13249,12196906,"TERMINAL",0,0,"611910",,terminal_output +13250,12197744,"TERMINAL",0,0,"7\t",,terminal_output +13251,12197924,"TERMINAL",0,0,"722101",,terminal_output +13252,12198814,"TERMINAL",0,0,"8\t",,terminal_output +13253,12198971,"TERMINAL",0,0,"83312",,terminal_output +13254,12199828,"TERMINAL",0,0,"9\t",,terminal_output +13255,12199998,"TERMINAL",0,0,"94423",,terminal_output +13256,12200861,"TERMINAL",0,0,"30\t",,terminal_output +13257,12201036,"TERMINAL",0,0,"305534",,terminal_output +13258,12201898,"TERMINAL",0,0,"1\t",,terminal_output +13259,12202078,"TERMINAL",0,0,"16645",,terminal_output +13260,12202928,"TERMINAL",0,0,"2\t",,terminal_output +13261,12203116,"TERMINAL",0,0,"27756",,terminal_output +13262,12203971,"TERMINAL",0,0,"3\t",,terminal_output +13263,12204154,"TERMINAL",0,0,"38867",,terminal_output +13264,12205001,"TERMINAL",0,0,"4\t",,terminal_output +13265,12205201,"TERMINAL",0,0,"49978",,terminal_output +13266,12206028,"TERMINAL",0,0,"5\t",,terminal_output +13267,12206231,"TERMINAL",0,0,"5404089",,terminal_output +13268,12207128,"TERMINAL",0,0,"6\t",,terminal_output +13269,12207270,"TERMINAL",0,0,"611920",,terminal_output +13270,12208236,"TERMINAL",0,0,"7\t",,terminal_output +13271,12208368,"TERMINAL",0,0,"733212",,terminal_output +13272,12209141,"TERMINAL",0,0,"8\t",,terminal_output +13273,12209350,"TERMINAL",0,0,"94423",,terminal_output +13274,12210219,"TERMINAL",0,0,"9\t",,terminal_output +13275,12210374,"TERMINAL",0,0,"405534",,terminal_output +13276,12211221,"TERMINAL",0,0,"40\t",,terminal_output +13277,12211417,"TERMINAL",0,0,"16645",,terminal_output +13278,12212254,"TERMINAL",0,0,"1\t",,terminal_output +13279,12212453,"TERMINAL",0,0,"27756",,terminal_output +13280,12213348,"TERMINAL",0,0,"2\t",,terminal_output +13281,12213490,"TERMINAL",0,0,"38867",,terminal_output +13282,12214375,"TERMINAL",0,0,"4\t",,terminal_output +13283,12214527,"TERMINAL",0,0,"49978",,terminal_output +13284,12215493,"TERMINAL",0,0,"5\t",,terminal_output +13285,12215600,"TERMINAL",0,0,"5505089",,terminal_output +13286,12216519,"TERMINAL",0,0,"6\t",,terminal_output +13287,12216638,"TERMINAL",0,0,"611930",,terminal_output +13288,12217540,"TERMINAL",0,0,"7\t",,terminal_output +13289,12217657,"TERMINAL",0,0,"722301",,terminal_output +13290,12218578,"TERMINAL",0,0,"8\t",,terminal_output +13291,12218691,"TERMINAL",0,0,"83312",,terminal_output +13292,12219506,"TERMINAL",0,0,"9\t",,terminal_output +13293,12219719,"TERMINAL",0,0,"94423",,terminal_output +13294,12220575,"TERMINAL",0,0,"50\t",,terminal_output +13295,12220770,"TERMINAL",0,0,"505534",,terminal_output +13296,12221120,"TERMINAL",0,0,"bash",,terminal_focus +13297,12221578,"TERMINAL",0,0,"1\t",,terminal_output +13298,12221790,"TERMINAL",0,0,"16645",,terminal_output +13299,12222656,"TERMINAL",0,0,"2\t",,terminal_output +13300,12222822,"TERMINAL",0,0,"27756",,terminal_output +13301,12223681,"TERMINAL",0,0,"3\t",,terminal_output +13302,12223870,"TERMINAL",0,0,"38867",,terminal_output +13303,12224844,"TERMINAL",0,0,"4\t",,terminal_output +13304,12224904,"TERMINAL",0,0,"49978",,terminal_output +13305,12225728,"TERMINAL",0,0,"53",,terminal_output +13306,12225938,"TERMINAL",0,0,"54:004:0089",,terminal_output +13307,12226857,"TERMINAL",0,0,"6\t",,terminal_output +13308,12226999,"TERMINAL",0,0,"611940",,terminal_output +13309,12227839,"TERMINAL",0,0,"7\t",,terminal_output +13310,12228014,"TERMINAL",0,0,"722401",,terminal_output +13311,12228907,"TERMINAL",0,0,"8\t",,terminal_output +13312,12229053,"TERMINAL",0,0,"83312",,terminal_output +13313,12229943,"TERMINAL",0,0,"9\t",,terminal_output +13314,12230089,"TERMINAL",0,0,"94423",,terminal_output +13315,12230897,"TERMINAL",0,0,"50:00\t",,terminal_output +13316,12231157,"TERMINAL",0,0,"50:005534",,terminal_output +13317,12233835,"TERMINAL",0,0,"18867",,terminal_output +13318,12233835,"TERMINAL",0,0,"1\t",,terminal_output +13319,12234800,"TERMINAL",0,0,"4\t",,terminal_output +13320,12234800,"TERMINAL",0,0,"49978",,terminal_output +13321,12235866,"TERMINAL",0,0,"5\t",,terminal_output +13322,12235872,"TERMINAL",0,0,"5101089",,terminal_output +13323,12236890,"TERMINAL",0,0,"6\t",,terminal_output +13324,12236891,"TERMINAL",0,0,"611950",,terminal_output +13325,12237889,"TERMINAL",0,0,"7\t",,terminal_output +13326,12237900,"TERMINAL",0,0,"722501",,terminal_output +13327,12238924,"TERMINAL",0,0,"8\t",,terminal_output +13328,12238947,"TERMINAL",0,0,"83312",,terminal_output +13329,12239963,"TERMINAL",0,0,"9\t",,terminal_output +13330,12239974,"TERMINAL",0,0,"94423",,terminal_output +13331,12241023,"TERMINAL",0,0,"10\t",,terminal_output +13332,12241028,"TERMINAL",0,0,"105534",,terminal_output +13333,12242057,"TERMINAL",0,0,"1\t",,terminal_output +13334,12242063,"TERMINAL",0,0,"16645",,terminal_output +13335,12243147,"TERMINAL",0,0,"2\t",,terminal_output +13336,12243147,"TERMINAL",0,0,"27756",,terminal_output +13337,12244172,"TERMINAL",0,0,"3\t",,terminal_output +13338,12244173,"TERMINAL",0,0,"38867",,terminal_output +13339,12245188,"TERMINAL",0,0,"4\t",,terminal_output +13340,12245188,"TERMINAL",0,0,"49978",,terminal_output +13341,12246188,"TERMINAL",0,0,"5\t",,terminal_output +13342,12246200,"TERMINAL",0,0,"5202089",,terminal_output +13343,12247338,"TERMINAL",0,0,"6\t",,terminal_output +13344,12247339,"TERMINAL",0,0,"61198:00",,terminal_output +13345,12248299,"TERMINAL",0,0,"7\t",,terminal_output +13346,12248305,"TERMINAL",0,0,"7223:001",,terminal_output +13347,12249330,"TERMINAL",0,0,"8\t",,terminal_output +13348,12249373,"TERMINAL",0,0,"84423",,terminal_output +13349,12250411,"TERMINAL",0,0,"20\t",,terminal_output +13350,12250412,"TERMINAL",0,0,"205534",,terminal_output +13351,12251432,"TERMINAL",0,0,"1\t",,terminal_output +13352,12251432,"TERMINAL",0,0,"16645",,terminal_output +13353,12252402,"TERMINAL",0,0,"2\t",,terminal_output +13354,12252442,"TERMINAL",0,0,"27756",,terminal_output +13355,12253472,"TERMINAL",0,0,"3\t",,terminal_output +13356,12253484,"TERMINAL",0,0,"38867",,terminal_output +13357,12254486,"TERMINAL",0,0,"4\t",,terminal_output +13358,12254526,"TERMINAL",0,0,"49978",,terminal_output +13359,12255554,"TERMINAL",0,0,"5\t",,terminal_output +13360,12255597,"TERMINAL",0,0,"5303089",,terminal_output +13361,12256661,"TERMINAL",0,0,"6\t",,terminal_output +13362,12256663,"TERMINAL",0,0,"611910",,terminal_output +13363,12257650,"TERMINAL",0,0,"7\t",,terminal_output +13364,12257657,"TERMINAL",0,0,"722101",,terminal_output +13365,12258606,"TERMINAL",0,0,"8\t",,terminal_output +13366,12258703,"TERMINAL",0,0,"83312",,terminal_output +13367,12259673,"TERMINAL",0,0,"9\t",,terminal_output +13368,12259720,"TERMINAL",0,0,"94423",,terminal_output +13369,12260679,"TERMINAL",0,0,"30\t",,terminal_output +13370,12260800,"TERMINAL",0,0,"305534",,terminal_output +13371,12261714,"TERMINAL",0,0,"1\t",,terminal_output +13372,12261779,"TERMINAL",0,0,"16645",,terminal_output +13373,12262751,"TERMINAL",0,0,"2\t",,terminal_output +13374,12262815,"TERMINAL",0,0,"27756",,terminal_output +13375,12263864,"TERMINAL",0,0,"3\t",,terminal_output +13376,12263872,"TERMINAL",0,0,"38867",,terminal_output +13377,12264948,"TERMINAL",0,0,"4\t",,terminal_output +13378,12264948,"TERMINAL",0,0,"49978",,terminal_output +13379,12265870,"TERMINAL",0,0,"5\t",,terminal_output +13380,12265919,"TERMINAL",0,0,"5404089",,terminal_output +13381,12266953,"TERMINAL",0,0,"6\t",,terminal_output +13382,12266963,"TERMINAL",0,0,"611920",,terminal_output +13383,12267966,"TERMINAL",0,0,"7\t",,terminal_output +13384,12268004,"TERMINAL",0,0,"722201",,terminal_output +13385,12269012,"TERMINAL",0,0,"8\t",,terminal_output +13386,12269055,"TERMINAL",0,0,"83312",,terminal_output +13387,12270030,"TERMINAL",0,0,"9\t",,terminal_output +13388,12270075,"TERMINAL",0,0,"94423",,terminal_output +13389,12271044,"TERMINAL",0,0,"40\t",,terminal_output +13390,12271145,"TERMINAL",0,0,"405534",,terminal_output +13391,12272123,"TERMINAL",0,0,"1\t",,terminal_output +13392,12272147,"TERMINAL",0,0,"16645",,terminal_output +13393,12273122,"TERMINAL",0,0,"2\t",,terminal_output +13394,12273228,"TERMINAL",0,0,"27756",,terminal_output +13395,12274157,"TERMINAL",0,0,"3\t",,terminal_output +13396,12274219,"TERMINAL",0,0,"38867",,terminal_output +13397,12275194,"TERMINAL",0,0,"4\t",,terminal_output +13398,12275291,"TERMINAL",0,0,"49978",,terminal_output +13399,12276320,"TERMINAL",0,0,"5\t",,terminal_output +13400,12276320,"TERMINAL",0,0,"55151930",,terminal_output +13401,12277340,"TERMINAL",0,0,"6\t",,terminal_output +13402,12277354,"TERMINAL",0,0,"722301",,terminal_output +13403,12278370,"TERMINAL",0,0,"7\t",,terminal_output +13404,12278412,"TERMINAL",0,0,"83312",,terminal_output +13405,12279375,"TERMINAL",0,0,"9\t",,terminal_output +13406,12279440,"TERMINAL",0,0,"94423",,terminal_output +13407,12280377,"TERMINAL",0,0,"50\t",,terminal_output +13408,12280452,"TERMINAL",0,0,"505534",,terminal_output +13409,12281405,"TERMINAL",0,0,"1\t",,terminal_output +13410,12281477,"TERMINAL",0,0,"16645",,terminal_output +13411,12282567,"TERMINAL",0,0,"2\t",,terminal_output +13412,12282567,"TERMINAL",0,0,"27756",,terminal_output +13413,12283488,"TERMINAL",0,0,"3\t",,terminal_output +13414,12283623,"TERMINAL",0,0,"38867",,terminal_output +13415,12284532,"TERMINAL",0,0,"4\t",,terminal_output +13416,12284644,"TERMINAL",0,0,"49978",,terminal_output +13417,12285634,"TERMINAL",0,0,"5\t",,terminal_output +13418,12285639,"TERMINAL",0,0,"55:005:0089",,terminal_output +13419,12286608,"TERMINAL",0,0,"6\t",,terminal_output +13420,12286694,"TERMINAL",0,0,"611940",,terminal_output +13421,12287636,"TERMINAL",0,0,"7\t",,terminal_output +13422,12287737,"TERMINAL",0,0,"722401",,terminal_output +13423,12288717,"TERMINAL",0,0,"8\t",,terminal_output +13424,12288756,"TERMINAL",0,0,"83312",,terminal_output +13425,12289885,"TERMINAL",0,0,"9\t",,terminal_output +13426,12289885,"TERMINAL",0,0,"94423",,terminal_output +13427,12290861,"TERMINAL",0,0,"1:00\t",,terminal_output +13428,12290867,"TERMINAL",0,0,"1:005534",,terminal_output +13429,12291790,"TERMINAL",0,0,"1\t",,terminal_output +13430,12291894,"TERMINAL",0,0,"16645",,terminal_output +13431,12292850,"TERMINAL",0,0,"2\t",,terminal_output +13432,12292904,"TERMINAL",0,0,"27756",,terminal_output +13433,12293935,"TERMINAL",0,0,"3\t",,terminal_output +13434,12293941,"TERMINAL",0,0,"38867",,terminal_output +13435,12294970,"TERMINAL",0,0,"4\t",,terminal_output +13436,12294984,"TERMINAL",0,0,"49978",,terminal_output +13437,12295930,"TERMINAL",0,0,"56",,terminal_output +13438,12296020,"TERMINAL",0,0,"5101089",,terminal_output +13439,12296961,"TERMINAL",0,0,"6\t",,terminal_output +13440,12297146,"TERMINAL",0,0,"611950",,terminal_output +13441,12298000,"TERMINAL",0,0,"7\t",,terminal_output +13442,12298120,"TERMINAL",0,0,"722501",,terminal_output +13443,12299039,"TERMINAL",0,0,"8\t",,terminal_output +13444,12299132,"TERMINAL",0,0,"83312",,terminal_output +13445,12300134,"TERMINAL",0,0,"9\t",,terminal_output +13446,12300174,"TERMINAL",0,0,"94423",,terminal_output +13447,12301214,"TERMINAL",0,0,"10\t",,terminal_output +13448,12301215,"TERMINAL",0,0,"105534",,terminal_output +13449,12302237,"TERMINAL",0,0,"1\t",,terminal_output +13450,12302239,"TERMINAL",0,0,"16645",,terminal_output +13451,12303172,"TERMINAL",0,0,"2\t",,terminal_output +13452,12303323,"TERMINAL",0,0,"28867",,terminal_output +13453,12304286,"TERMINAL",0,0,"3\t",,terminal_output +13454,12304319,"TERMINAL",0,0,"49978",,terminal_output +13455,12305247,"TERMINAL",0,0,"4\t",,terminal_output +13456,12305387,"TERMINAL",0,0,"5202089",,terminal_output +13457,12306324,"TERMINAL",0,0,"5\t",,terminal_output +13458,12306394,"TERMINAL",0,0,"61199:00",,terminal_output +13459,12307447,"TERMINAL",0,0,"7\t",,terminal_output +13460,12307447,"TERMINAL",0,0,"7224:001",,terminal_output +13461,12308373,"TERMINAL",0,0,"8\t",,terminal_output +13462,12308535,"TERMINAL",0,0,"83312",,terminal_output +13463,12309397,"TERMINAL",0,0,"9\t",,terminal_output +13464,12309537,"TERMINAL",0,0,"94423",,terminal_output +13465,12310522,"TERMINAL",0,0,"20\t",,terminal_output +13466,12310556,"TERMINAL",0,0,"205534",,terminal_output +13467,12311540,"TERMINAL",0,0,"1\t",,terminal_output +13468,12311637,"TERMINAL",0,0,"16645",,terminal_output +13469,12312538,"TERMINAL",0,0,"2\t",,terminal_output +13470,12312653,"TERMINAL",0,0,"27756",,terminal_output +13471,12313596,"TERMINAL",0,0,"31",,terminal_output +13472,12313677,"TERMINAL",0,0,"38867",,terminal_output +13473,12314598,"TERMINAL",0,0,"4\t",,terminal_output +13474,12314738,"TERMINAL",0,0,"49978",,terminal_output +13475,12315632,"TERMINAL",0,0,"5\t",,terminal_output +13476,12315771,"TERMINAL",0,0,"5303089",,terminal_output +13477,12316708,"TERMINAL",0,0,"6\t",,terminal_output +13478,12317355,"TERMINAL",0,0,"6221011",,terminal_output +13479,12317807,"TERMINAL",0,0,"77",,terminal_output +13480,12318416,"TERMINAL",0,0,"83312",,terminal_output +13481,12318739,"TERMINAL",0,0,"8\t",,terminal_output +13482,12319360,"TERMINAL",0,0,"94423",,terminal_output +13483,12319782,"TERMINAL",0,0,"9\t",,terminal_output +13484,12320451,"TERMINAL",0,0,"305534",,terminal_output +13485,12320889,"TERMINAL",0,0,"30\t",,terminal_output +13486,12321439,"TERMINAL",0,0,"16645",,terminal_output +13487,12321883,"TERMINAL",0,0,"1\t",,terminal_output +13488,12322497,"TERMINAL",0,0,"27756",,terminal_output +13489,12322907,"TERMINAL",0,0,"2\t",,terminal_output +13490,12323515,"TERMINAL",0,0,"38867",,terminal_output +13491,12323958,"TERMINAL",0,0,"3\t",,terminal_output +13492,12324649,"TERMINAL",0,0,"49978",,terminal_output +13493,12324989,"TERMINAL",0,0,"4\t",,terminal_output +13494,12325672,"TERMINAL",0,0,"5404089",,terminal_output +13495,12326130,"TERMINAL",0,0,"5\t",,terminal_output +13496,12326635,"TERMINAL",0,0,"611920",,terminal_output +13497,12327122,"TERMINAL",0,0,"6\t",,terminal_output +13498,12327727,"TERMINAL",0,0,"722201",,terminal_output +13499,12328136,"TERMINAL",0,0,"7\t",,terminal_output +13500,12328744,"TERMINAL",0,0,"83312",,terminal_output +13501,12329146,"TERMINAL",0,0,"8\t",,terminal_output +13502,12329773,"TERMINAL",0,0,"94423",,terminal_output +13503,12330186,"TERMINAL",0,0,"9\t",,terminal_output +13504,12330894,"TERMINAL",0,0,"405534",,terminal_output +13505,12331304,"TERMINAL",0,0,"40\t",,terminal_output +13506,12331923,"TERMINAL",0,0,"16645",,terminal_output +13507,12332329,"TERMINAL",0,0,"1\t",,terminal_output +13508,12332889,"TERMINAL",0,0,"27756",,terminal_output +13509,12333358,"TERMINAL",0,0,"2\t",,terminal_output +13510,12333966,"TERMINAL",0,0,"38867",,terminal_output +13511,12334315,"TERMINAL",0,0,"4\t",,terminal_output +13512,12334990,"TERMINAL",0,0,"49978",,terminal_output +13513,12335400,"TERMINAL",0,0,"5\t",,terminal_output +13514,12335993,"TERMINAL",0,0,"5505089",,terminal_output +13515,12336396,"TERMINAL",0,0,"6\t",,terminal_output +13516,12337039,"TERMINAL",0,0,"611930",,terminal_output +13517,12337448,"TERMINAL",0,0,"7\t",,terminal_output +13518,12338180,"TERMINAL",0,0,"722301",,terminal_output +13519,12338577,"TERMINAL",0,0,"8\t",,terminal_output +13520,12339291,"TERMINAL",0,0,"83312",,terminal_output +13521,12339502,"TERMINAL",0,0,"9\t",,terminal_output +13522,12340215,"TERMINAL",0,0,"94423",,terminal_output +13523,12340689,"TERMINAL",0,0,"50\t",,terminal_output +13524,12341341,"TERMINAL",0,0,"505534",,terminal_output +13525,12341596,"TERMINAL",0,0,"1\t",,terminal_output +13526,12342363,"TERMINAL",0,0,"16645",,terminal_output +13527,12342738,"TERMINAL",0,0,"2\t",,terminal_output +13528,12343397,"TERMINAL",0,0,"28867",,terminal_output +13529,12343666,"TERMINAL",0,0,"3\t",,terminal_output +13530,12344346,"TERMINAL",0,0,"49978",,terminal_output +13531,12344746,"TERMINAL",0,0,"40",,terminal_output +13532,12345380,"TERMINAL",0,0,"56:006:0089",,terminal_output +13533,12345845,"TERMINAL",0,0,"5\t",,terminal_output +13534,12346461,"TERMINAL",0,0,"611940",,terminal_output +13535,12346869,"TERMINAL",0,0,"6\t",,terminal_output +13536,12347483,"TERMINAL",0,0,"722401",,terminal_output +13537,12347817,"TERMINAL",0,0,"7\t",,terminal_output +13538,12348609,"TERMINAL",0,0,"83312",,terminal_output +13539,12348853,"TERMINAL",0,0,"8\t",,terminal_output +13540,12349533,"TERMINAL",0,0,"94423",,terminal_output +13541,12349941,"TERMINAL",0,0,"9\t",,terminal_output +13542,12350652,"TERMINAL",0,0,"2:005534",,terminal_output +13543,12350978,"TERMINAL",0,0,"2:00\t",,terminal_output +13544,12351791,"TERMINAL",0,0,"16645",,terminal_output +13545,12351991,"TERMINAL",0,0,"1\t",,terminal_output +13546,12352712,"TERMINAL",0,0,"27756",,terminal_output +13547,12353004,"TERMINAL",0,0,"221",,terminal_output +13548,12353743,"TERMINAL",0,0,"38867",,terminal_output +13549,12354148,"TERMINAL",0,0,"3\t",,terminal_output +13550,12356592,"TERMINAL",0,0,"41111950",,terminal_output +13551,12356617,"TERMINAL",0,0,"40",,terminal_output +13552,12357732,"TERMINAL",0,0,"722501",,terminal_output +13553,12357744,"TERMINAL",0,0,"719",,terminal_output +13554,12358853,"TERMINAL",0,0,"83312",,terminal_output +13555,12358853,"TERMINAL",0,0,"8\t",,terminal_output +13556,12359834,"TERMINAL",0,0,"94423",,terminal_output +13557,12359856,"TERMINAL",0,0,"9\t",,terminal_output +13558,12360899,"TERMINAL",0,0,"105534",,terminal_output +13559,12360900,"TERMINAL",0,0,"10\t",,terminal_output +13560,12361923,"TERMINAL",0,0,"16645",,terminal_output +13561,12361923,"TERMINAL",0,0,"1\t",,terminal_output +13562,12363003,"TERMINAL",0,0,"2\t",,terminal_output +13563,12363004,"TERMINAL",0,0,"27756",,terminal_output +13564,12364089,"TERMINAL",0,0,"3\t",,terminal_output +13565,12364089,"TERMINAL",0,0,"38867",,terminal_output +13566,12365096,"TERMINAL",0,0,"4\t",,terminal_output +13567,12365133,"TERMINAL",0,0,"49978",,terminal_output +13568,12366124,"TERMINAL",0,0,"5\t",,terminal_output +13569,12366128,"TERMINAL",0,0,"5202089",,terminal_output +13570,12367114,"TERMINAL",0,0,"6\t",,terminal_output +13571,12367122,"TERMINAL",0,0,"611920:00",,terminal_output +13572,12368130,"TERMINAL",0,0,"724",,terminal_output +13573,12368158,"TERMINAL",0,0,"73469458 accelerat train_dy tum_cte0 CG20:01\t 8 hkn[0703,0706-0707,0711-0715]225:00",,terminal_output +13574,12369165,"TERMINAL",0,0,"8\t",,terminal_output +13575,12369205,"TERMINAL",0,0,"8331",,terminal_output +13576,12370228,"TERMINAL",0,0,"9\t",,terminal_output +13577,12370252,"TERMINAL",0,0,"9442",,terminal_output +13578,12371251,"TERMINAL",0,0,"20\t",,terminal_output +13579,12371279,"TERMINAL",0,0,"20664",,terminal_output +13580,12372368,"TERMINAL",0,0,"1\t",,terminal_output +13581,12372368,"TERMINAL",0,0,"2775",,terminal_output +13582,12373419,"TERMINAL",0,0,"3\t",,terminal_output +13583,12373423,"TERMINAL",0,0,"3886",,terminal_output +13584,12374415,"TERMINAL",0,0,"4\t",,terminal_output +13585,12374490,"TERMINAL",0,0,"4997",,terminal_output +13586,12375440,"TERMINAL",0,0,"5\t",,terminal_output +13587,12375440,"TERMINAL",0,0,"530308",,terminal_output +13588,12376464,"TERMINAL",0,0,"6\t",,terminal_output +13589,12376471,"TERMINAL",0,0,"6119",,terminal_output +13590,12377469,"TERMINAL",0,0,"7\t",,terminal_output +13591,12377512,"TERMINAL",0,0,"72210",,terminal_output +13592,12378609,"TERMINAL",0,0,"8\t",,terminal_output +13593,12378610,"TERMINAL",0,0,"\r81 hkn0703331",,terminal_output +13594,12379579,"TERMINAL",0,0,"9\t",,terminal_output +13595,12379619,"TERMINAL",0,0,"\r9442",,terminal_output +13596,12380673,"TERMINAL",0,0,"30\t",,terminal_output +13597,12380674,"TERMINAL",0,0,"30553",,terminal_output +13598,12381686,"TERMINAL",0,0,"1\t",,terminal_output +13599,12381686,"TERMINAL",0,0,"1664",,terminal_output +13600,12382701,"TERMINAL",0,0,"2\t",,terminal_output +13601,12382737,"TERMINAL",0,0,"2775",,terminal_output +13602,12383835,"TERMINAL",0,0,"3\t",,terminal_output +13603,12383836,"TERMINAL",0,0,"3886",,terminal_output +13604,12384749,"TERMINAL",0,0,"4\t",,terminal_output +13605,12384866,"TERMINAL",0,0,"4997",,terminal_output +13606,12385947,"TERMINAL",0,0,"5\t",,terminal_output +13607,12385948,"TERMINAL",0,0,"540408",,terminal_output +13608,12386805,"TERMINAL",0,0,"6\t",,terminal_output +13609,12386849,"TERMINAL",0,0,"6119",,terminal_output +13610,12387836,"TERMINAL",0,0,"7\t",,terminal_output +13611,12387922,"TERMINAL",0,0,"72220",,terminal_output +13612,12388967,"TERMINAL",0,0,"8\t",,terminal_output +13613,12388968,"TERMINAL",0,0,"8331",,terminal_output +13614,12389899,"TERMINAL",0,0,"9\t",,terminal_output +13615,12390005,"TERMINAL",0,0,"9442",,terminal_output +13616,12390935,"TERMINAL",0,0,"40\t",,terminal_output +13617,12390993,"TERMINAL",0,0,"40553",,terminal_output +13618,12391987,"TERMINAL",0,0,"1\t",,terminal_output +13619,12392029,"TERMINAL",0,0,"1664",,terminal_output +13620,12393006,"TERMINAL",0,0,"2\t",,terminal_output +13621,12393101,"TERMINAL",0,0,"2775",,terminal_output +13622,12394040,"TERMINAL",0,0,"3\t",,terminal_output +13623,12394134,"TERMINAL",0,0,"3886",,terminal_output +13624,12395083,"TERMINAL",0,0,"4\t",,terminal_output +13625,12395187,"TERMINAL",0,0,"4997",,terminal_output +13626,12396171,"TERMINAL",0,0,"5\t",,terminal_output +13627,12396272,"TERMINAL",0,0,"550508",,terminal_output +13628,12397190,"TERMINAL",0,0,"6\t",,terminal_output +13629,12397253,"TERMINAL",0,0,"6119",,terminal_output +13630,12398281,"TERMINAL",0,0,"7\t",,terminal_output +13631,12398289,"TERMINAL",0,0,"73331",,terminal_output +13632,12399299,"TERMINAL",0,0,"8\t",,terminal_output +13633,12399340,"TERMINAL",0,0,"9442",,terminal_output +13634,12400264,"TERMINAL",0,0,"9\t",,terminal_output +13635,12400364,"TERMINAL",0,0,"50553",,terminal_output +13636,12401399,"TERMINAL",0,0,"50\t",,terminal_output +13637,12401512,"TERMINAL",0,0,"1664",,terminal_output +13638,12402412,"TERMINAL",0,0,"2\t",,terminal_output +13639,12402452,"TERMINAL",0,0,"2775",,terminal_output +13640,12403511,"TERMINAL",0,0,"3\t",,terminal_output +13641,12403511,"TERMINAL",0,0,"3886",,terminal_output +13642,12404420,"TERMINAL",0,0,"42",,terminal_output +13643,12404569,"TERMINAL",0,0,"4997",,terminal_output +13644,12405545,"TERMINAL",0,0,"5\t",,terminal_output +13645,12405640,"TERMINAL",0,0,"57:007:008",,terminal_output +13646,12406581,"TERMINAL",0,0,"6\t",,terminal_output +13647,12406611,"TERMINAL",0,0,"6119",,terminal_output +13648,12407620,"TERMINAL",0,0,"7\t",,terminal_output +13649,12407640,"TERMINAL",0,0,"72240",,terminal_output +13650,12408574,"TERMINAL",0,0,"8\t",,terminal_output +13651,12408700,"TERMINAL",0,0,"8331",,terminal_output +13652,12409663,"TERMINAL",0,0,"9\t",,terminal_output +13653,12409702,"TERMINAL",0,0,"9442",,terminal_output +13654,12410645,"TERMINAL",0,0,"3:00\t",,terminal_output +13655,12410786,"TERMINAL",0,0,"3:00553",,terminal_output +13656,12411688,"TERMINAL",0,0,"1\t",,terminal_output +13657,12411795,"TERMINAL",0,0,"1664",,terminal_output +13658,12412757,"TERMINAL",0,0,"2\t",,terminal_output +13659,12412840,"TERMINAL",0,0,"2775",,terminal_output +13660,12413872,"TERMINAL",0,0,"3\t",,terminal_output +13661,12413911,"TERMINAL",0,0,"3886",,terminal_output +13662,12414920,"TERMINAL",0,0,"4\t",,terminal_output +13663,12414982,"TERMINAL",0,0,"4997",,terminal_output +13664,12415947,"TERMINAL",0,0,"5\t",,terminal_output +13665,12416356,"TERMINAL",0,0,"510108",,terminal_output +13666,12416917,"TERMINAL",0,0,"6\t",,terminal_output +13667,12417014,"TERMINAL",0,0,"6119",,terminal_output +13668,12418094,"TERMINAL",0,0,"7\t",,terminal_output +13669,12418096,"TERMINAL",0,0,"72250",,terminal_output +13670,12419124,"TERMINAL",0,0,"8\t",,terminal_output +13671,12419239,"TERMINAL",0,0,"8331",,terminal_output +13672,12420022,"TERMINAL",0,0,"9\t",,terminal_output +13673,12420110,"TERMINAL",0,0,"9442",,terminal_output +13674,12421134,"TERMINAL",0,0,"10\t",,terminal_output +13675,12421141,"TERMINAL",0,0,"10553",,terminal_output +13676,12422107,"TERMINAL",0,0,"1\t",,terminal_output +13677,12422189,"TERMINAL",0,0,"1664",,terminal_output +13678,12423157,"TERMINAL",0,0,"2\t",,terminal_output +13679,12423260,"TERMINAL",0,0,"2775",,terminal_output +13680,12424284,"TERMINAL",0,0,"3\t",,terminal_output +13681,12424285,"TERMINAL",0,0,"3886",,terminal_output +13682,12425212,"TERMINAL",0,0,"4\t",,terminal_output +13683,12425312,"TERMINAL",0,0,"420208",,terminal_output +13684,12426337,"TERMINAL",0,0,"5\t",,terminal_output +13685,12426355,"TERMINAL",0,0,"6119",,terminal_output +13686,12427368,"TERMINAL",0,0,"6\t",,terminal_output +13687,12427394,"TERMINAL",0,0,"7226:00",,terminal_output +13688,12428386,"TERMINAL",0,0,"8\t",,terminal_output +13689,12428507,"TERMINAL",0,0,"8331",,terminal_output +13690,12429379,"TERMINAL",0,0,"9\t",,terminal_output +13691,12429533,"TERMINAL",0,0,"9442",,terminal_output +13692,12430521,"TERMINAL",0,0,"20\t",,terminal_output +13693,12430530,"TERMINAL",0,0,"20553",,terminal_output +13694,12431466,"TERMINAL",0,0,"1\t",,terminal_output +13695,12431573,"TERMINAL",0,0,"1664",,terminal_output +13696,12432499,"TERMINAL",0,0,"2\t",,terminal_output +13697,12432621,"TERMINAL",0,0,"2775",,terminal_output +13698,12433612,"TERMINAL",0,0,"3\t",,terminal_output +13699,12433664,"TERMINAL",0,0,"3886",,terminal_output +13700,12434639,"TERMINAL",0,0,"4\t",,terminal_output +13701,12434731,"TERMINAL",0,0,"4997",,terminal_output +13702,12435655,"TERMINAL",0,0,"5\t",,terminal_output +13703,12435746,"TERMINAL",0,0,"530308",,terminal_output +13704,12436678,"TERMINAL",0,0,"6\t",,terminal_output +13705,12436819,"TERMINAL",0,0,"6119",,terminal_output +13706,12437681,"TERMINAL",0,0,"7\t",,terminal_output +13707,12437823,"TERMINAL",0,0,"72210",,terminal_output +13708,12438832,"TERMINAL",0,0,"8\t",,terminal_output +13709,12438873,"TERMINAL",0,0,"8331",,terminal_output +13710,12439776,"TERMINAL",0,0,"9\t",,terminal_output +13711,12439940,"TERMINAL",0,0,"9442",,terminal_output +13712,12440812,"TERMINAL",0,0,"30\t",,terminal_output +13713,12440956,"TERMINAL",0,0,"30553",,terminal_output +13714,12441826,"TERMINAL",0,0,"1\t",,terminal_output +13715,12441993,"TERMINAL",0,0,"1664",,terminal_output +13716,12442866,"TERMINAL",0,0,"2\t",,terminal_output +13717,12443041,"TERMINAL",0,0,"2775",,terminal_output +13718,12443913,"TERMINAL",0,0,"3\t",,terminal_output +13719,12444086,"TERMINAL",0,0,"3886",,terminal_output +13720,12444972,"TERMINAL",0,0,"4\t",,terminal_output +13721,12445119,"TERMINAL",0,0,"4997",,terminal_output +13722,12445998,"TERMINAL",0,0,"5\t",,terminal_output +13723,12446164,"TERMINAL",0,0,"540408",,terminal_output +13724,12447022,"TERMINAL",0,0,"6\t",,terminal_output +13725,12447221,"TERMINAL",0,0,"6119",,terminal_output +13726,12448058,"TERMINAL",0,0,"7\t",,terminal_output +13727,12448235,"TERMINAL",0,0,"72220",,terminal_output +13728,12449096,"TERMINAL",0,0,"8\t",,terminal_output +13729,12449269,"TERMINAL",0,0,"8331",,terminal_output +13730,12450133,"TERMINAL",0,0,"9\t",,terminal_output +13731,12450304,"TERMINAL",0,0,"40553",,terminal_output +13732,12451173,"TERMINAL",0,0,"40\t",,terminal_output +13733,12451356,"TERMINAL",0,0,"1664",,terminal_output +13734,12452246,"TERMINAL",0,0,"1\t",,terminal_output +13735,12452390,"TERMINAL",0,0,"2775",,terminal_output +13736,12453260,"TERMINAL",0,0,"2\t",,terminal_output +13737,12453422,"TERMINAL",0,0,"3886",,terminal_output +13738,12454374,"TERMINAL",0,0,"3\t",,terminal_output +13739,12454453,"TERMINAL",0,0,"4997",,terminal_output +13740,12455431,"TERMINAL",0,0,"5\t",,terminal_output +13741,12455532,"TERMINAL",0,0,"550508",,terminal_output +13742,12456368,"TERMINAL",0,0,"6\t",,terminal_output +13743,12456535,"TERMINAL",0,0,"6119",,terminal_output +13744,12457473,"TERMINAL",0,0,"7\t",,terminal_output +13745,12457565,"TERMINAL",0,0,"72230",,terminal_output +13746,12458497,"TERMINAL",0,0,"8\t",,terminal_output +13747,12458643,"TERMINAL",0,0,"8331",,terminal_output +13748,12459488,"TERMINAL",0,0,"9\t",,terminal_output +13749,12459650,"TERMINAL",0,0,"9442",,terminal_output +13750,12460689,"TERMINAL",0,0,"50\t",,terminal_output +13751,12460690,"TERMINAL",0,0,"50553",,terminal_output +13752,12461664,"TERMINAL",0,0,"1\t",,terminal_output +13753,12461735,"TERMINAL",0,0,"1664",,terminal_output +13754,12462699,"TERMINAL",0,0,"2\t",,terminal_output +13755,12462801,"TERMINAL",0,0,"2775",,terminal_output +13756,12463631,"TERMINAL",0,0,"3\t",,terminal_output +13757,12463810,"TERMINAL",0,0,"3886",,terminal_output +13758,12464686,"TERMINAL",0,0,"4\t",,terminal_output +13759,12464834,"TERMINAL",0,0,"4997",,terminal_output +13760,12465796,"TERMINAL",0,0,"5\t",,terminal_output +13761,12465869,"TERMINAL",0,0,"58:008:008",,terminal_output +13762,12466734,"TERMINAL",0,0,"6\t",,terminal_output +13763,12466905,"TERMINAL",0,0,"6119",,terminal_output +13764,12467770,"TERMINAL",0,0,"7\t",,terminal_output +13765,12467941,"TERMINAL",0,0,"72240",,terminal_output +13766,12468931,"TERMINAL",0,0,"8\t",,terminal_output +13767,12468985,"TERMINAL",0,0,"8331",,terminal_output +13768,12469845,"TERMINAL",0,0,"9\t",,terminal_output +13769,12470019,"TERMINAL",0,0,"9442",,terminal_output +13770,12470877,"TERMINAL",0,0,"4:00\t",,terminal_output +13771,12471055,"TERMINAL",0,0,"4:00553",,terminal_output +13772,12472020,"TERMINAL",0,0,"1\t",,terminal_output +13773,12472098,"TERMINAL",0,0,"1664",,terminal_output +13774,12472947,"TERMINAL",0,0,"2\t",,terminal_output +13775,12473130,"TERMINAL",0,0,"2775",,terminal_output +13776,12474079,"TERMINAL",0,0,"3\t",,terminal_output +13777,12474167,"TERMINAL",0,0,"3886",,terminal_output +13778,12475036,"TERMINAL",0,0,"4\t",,terminal_output +13779,12475203,"TERMINAL",0,0,"4997",,terminal_output +13780,12476124,"TERMINAL",0,0,"5\t",,terminal_output +13781,12476263,"TERMINAL",0,0,"510108",,terminal_output +13782,12477146,"TERMINAL",0,0,"6\t",,terminal_output +13783,12477283,"TERMINAL",0,0,"62250",,terminal_output +13784,12479740,"TERMINAL",0,0,"8442",,terminal_output +13785,12479746,"TERMINAL",0,0,"7\t",,terminal_output +13786,12480911,"TERMINAL",0,0,"10553",,terminal_output +13787,12480912,"TERMINAL",0,0,"10\t",,terminal_output +13788,12481939,"TERMINAL",0,0,"1664",,terminal_output +13789,12481939,"TERMINAL",0,0,"1\t",,terminal_output +13790,12482919,"TERMINAL",0,0,"2775",,terminal_output +13791,12482919,"TERMINAL",0,0,"2\t",,terminal_output +13792,12483949,"TERMINAL",0,0,"3\t",,terminal_output +13793,12483950,"TERMINAL",0,0,"3886",,terminal_output +13794,12484983,"TERMINAL",0,0,"4997",,terminal_output +13795,12484987,"TERMINAL",0,0,"4\t",,terminal_output +13796,12486039,"TERMINAL",0,0,"520208",,terminal_output +13797,12486040,"TERMINAL",0,0,"5\t",,terminal_output +13798,12487058,"TERMINAL",0,0,"6119",,terminal_output +13799,12487074,"TERMINAL",0,0,"6\t",,terminal_output +13800,12488190,"TERMINAL",0,0,"7227:00",,terminal_output +13801,12488190,"TERMINAL",0,0,"7\t",,terminal_output +13802,12489135,"TERMINAL",0,0,"8331",,terminal_output +13803,12489135,"TERMINAL",0,0,"8\t",,terminal_output +13804,12490239,"TERMINAL",0,0,"9442",,terminal_output +13805,12490240,"TERMINAL",0,0,"9\t",,terminal_output +13806,12491254,"TERMINAL",0,0,"20553",,terminal_output +13807,12491254,"TERMINAL",0,0,"20\t",,terminal_output +13808,12492241,"TERMINAL",0,0,"1\t",,terminal_output +13809,12492242,"TERMINAL",0,0,"1664",,terminal_output +13810,12493305,"TERMINAL",0,0,"2\t",,terminal_output +13811,12493305,"TERMINAL",0,0,"2886",,terminal_output +13812,12494376,"TERMINAL",0,0,"4997",,terminal_output +13813,12494378,"TERMINAL",0,0,"4\t",,terminal_output +13814,12495353,"TERMINAL",0,0,"5\t",,terminal_output +13815,12495353,"TERMINAL",0,0,"530308",,terminal_output +13816,12496476,"TERMINAL",0,0,"6\t",,terminal_output +13817,12496477,"TERMINAL",0,0,"6119",,terminal_output +13818,12497409,"TERMINAL",0,0,"7\t",,terminal_output +13819,12497423,"TERMINAL",0,0,"72210",,terminal_output +13820,12498526,"TERMINAL",0,0,"8\t",,terminal_output +13821,12498526,"TERMINAL",0,0,"8331",,terminal_output +13822,12499480,"TERMINAL",0,0,"9\t",,terminal_output +13823,12499497,"TERMINAL",0,0,"9442",,terminal_output +13824,12500522,"TERMINAL",0,0,"30\t",,terminal_output +13825,12500531,"TERMINAL",0,0,"30553",,terminal_output +13826,12501552,"TERMINAL",0,0,"1\t",,terminal_output +13827,12501572,"TERMINAL",0,0,"1664",,terminal_output +13828,12502589,"TERMINAL",0,0,"2\t",,terminal_output +13829,12502602,"TERMINAL",0,0,"2775",,terminal_output +13830,12503626,"TERMINAL",0,0,"3\t",,terminal_output +13831,12503639,"TERMINAL",0,0,"3886",,terminal_output +13832,12504765,"TERMINAL",0,0,"4\t",,terminal_output +13833,12504766,"TERMINAL",0,0,"4997",,terminal_output +13834,12505697,"TERMINAL",0,0,"5\t",,terminal_output +13835,12505712,"TERMINAL",0,0,"540408",,terminal_output +13836,12506751,"TERMINAL",0,0,"6\t",,terminal_output +13837,12506780,"TERMINAL",0,0,"6119",,terminal_output +13838,12507774,"TERMINAL",0,0,"7\t",,terminal_output +13839,12507811,"TERMINAL",0,0,"72220",,terminal_output +13840,12508864,"TERMINAL",0,0,"8\t",,terminal_output +13841,12508870,"TERMINAL",0,0,"8331",,terminal_output +13842,12509886,"TERMINAL",0,0,"9\t",,terminal_output +13843,12509929,"TERMINAL",0,0,"9442",,terminal_output +13844,12510951,"TERMINAL",0,0,"40\t",,terminal_output +13845,12510952,"TERMINAL",0,0,"40553",,terminal_output +13846,12511990,"TERMINAL",0,0,"1\t",,terminal_output +13847,12512036,"TERMINAL",0,0,"1664",,terminal_output +13848,12512954,"TERMINAL",0,0,"2\t",,terminal_output +13849,12512999,"TERMINAL",0,0,"2775",,terminal_output +13850,12513997,"TERMINAL",0,0,"3\t",,terminal_output +13851,12514046,"TERMINAL",0,0,"3886",,terminal_output +13852,12515022,"TERMINAL",0,0,"4\t",,terminal_output +13853,12515113,"TERMINAL",0,0,"4997",,terminal_output +13854,12516063,"TERMINAL",0,0,"5\t",,terminal_output +13855,12516118,"TERMINAL",0,0,"550508",,terminal_output +13856,12517103,"TERMINAL",0,0,"6\t",,terminal_output +13857,12517156,"TERMINAL",0,0,"6119",,terminal_output +13858,12518190,"TERMINAL",0,0,"7\t",,terminal_output +13859,12518193,"TERMINAL",0,0,"72230",,terminal_output +13860,12519171,"TERMINAL",0,0,"8\t",,terminal_output +13861,12519224,"TERMINAL",0,0,"8331",,terminal_output +13862,12520235,"TERMINAL",0,0,"9\t",,terminal_output +13863,12520264,"TERMINAL",0,0,"9442",,terminal_output +13864,12521316,"TERMINAL",0,0,"50\t",,terminal_output +13865,12521317,"TERMINAL",0,0,"50664",,terminal_output +13866,12522320,"TERMINAL",0,0,"1\t",,terminal_output +13867,12522340,"TERMINAL",0,0,"2775",,terminal_output +13868,12523404,"TERMINAL",0,0,"3\t",,terminal_output +13869,12523404,"TERMINAL",0,0,"3886",,terminal_output +13870,12524351,"TERMINAL",0,0,"4\t",,terminal_output +13871,12524407,"TERMINAL",0,0,"4997",,terminal_output +13872,12525459,"TERMINAL",0,0,"5\t",,terminal_output +13873,12525460,"TERMINAL",0,0,"59:009:008",,terminal_output +13874,12526476,"TERMINAL",0,0,"6\t",,terminal_output +13875,12526488,"TERMINAL",0,0,"6119",,terminal_output +13876,12527504,"TERMINAL",0,0,"7\t",,terminal_output +13877,12527520,"TERMINAL",0,0,"72240",,terminal_output +13878,12528528,"TERMINAL",0,0,"8\t",,terminal_output +13879,12528549,"TERMINAL",0,0,"8331",,terminal_output +13880,12529626,"TERMINAL",0,0,"9\t",,terminal_output +13881,12529627,"TERMINAL",0,0,"9442",,terminal_output +13882,12530688,"TERMINAL",0,0,"5:00\t",,terminal_output +13883,12530689,"TERMINAL",0,0,"5:00553",,terminal_output +13884,12531704,"TERMINAL",0,0,"1\t",,terminal_output +13885,12531707,"TERMINAL",0,0,"1664",,terminal_output +13886,12532726,"TERMINAL",0,0,"2\t",,terminal_output +13887,12532726,"TERMINAL",0,0,"2775",,terminal_output +13888,12533753,"TERMINAL",0,0,"3\t",,terminal_output +13889,12533754,"TERMINAL",0,0,"3886",,terminal_output +13890,12534711,"TERMINAL",0,0,"4\t",,terminal_output +13891,12534768,"TERMINAL",0,0,"4997",,terminal_output +13892,12535811,"TERMINAL",0,0,"5\t",,terminal_output +13893,12535843,"TERMINAL",0,0,"510108",,terminal_output +13894,12536776,"TERMINAL",0,0,"6\t",,terminal_output +13895,12536837,"TERMINAL",0,0,"6119",,terminal_output +13896,12537818,"TERMINAL",0,0,"7\t",,terminal_output +13897,12537873,"TERMINAL",0,0,"72250",,terminal_output +13898,12538860,"TERMINAL",0,0,"8\t",,terminal_output +13899,12538921,"TERMINAL",0,0,"8331",,terminal_output +13900,12539999,"TERMINAL",0,0,"9\t",,terminal_output +13901,12540000,"TERMINAL",0,0,"9442",,terminal_output +13902,12541021,"TERMINAL",0,0,"10\t",,terminal_output +13903,12541021,"TERMINAL",0,0,"10553",,terminal_output +13904,12542044,"TERMINAL",0,0,"1\t",,terminal_output +13905,12542046,"TERMINAL",0,0,"1664",,terminal_output +13906,12543009,"TERMINAL",0,0,"2\t",,terminal_output +13907,12543103,"TERMINAL",0,0,"2775",,terminal_output +13908,12544041,"TERMINAL",0,0,"3\t",,terminal_output +13909,12544274,"TERMINAL",0,0,"3886",,terminal_output +13910,12545084,"TERMINAL",0,0,"4\t",,terminal_output +13911,12545179,"TERMINAL",0,0,"4997",,terminal_output +13912,12546125,"TERMINAL",0,0,"5\t",,terminal_output +13913,12546232,"TERMINAL",0,0,"520208",,terminal_output +13914,12547268,"TERMINAL",0,0,"6\t",,terminal_output +13915,12547269,"TERMINAL",0,0,"6119",,terminal_output +13916,12548292,"TERMINAL",0,0,"7\t",,terminal_output +13917,12548293,"TERMINAL",0,0,"7338:01",,terminal_output +13918,12549312,"TERMINAL",0,0,"8\t",,terminal_output +13919,12549357,"TERMINAL",0,0,"9442",,terminal_output +13920,12550291,"TERMINAL",0,0,"9\t",,terminal_output +13921,12550458,"TERMINAL",0,0,"20553",,terminal_output +13922,12551360,"TERMINAL",0,0,"21\t",,terminal_output +13923,12551396,"TERMINAL",0,0,"1664",,terminal_output +13924,12552379,"TERMINAL",0,0,"2\t",,terminal_output +13925,12552431,"TERMINAL",0,0,"2775",,terminal_output +13926,12553371,"TERMINAL",0,0,"320",,terminal_output +13927,12553514,"TERMINAL",0,0,"3886",,terminal_output +13928,12554413,"TERMINAL",0,0,"4\t",,terminal_output +13929,12554553,"TERMINAL",0,0,"4997",,terminal_output +13930,12555460,"TERMINAL",0,0,"5\t",,terminal_output +13931,12555553,"TERMINAL",0,0,"530308",,terminal_output +13932,12556594,"TERMINAL",0,0,"6\t",,terminal_output +13933,12556594,"TERMINAL",0,0,"6119",,terminal_output +13934,12557525,"TERMINAL",0,0,"7\t",,terminal_output +13935,12557638,"TERMINAL",0,0,"72210",,terminal_output +13936,12558635,"TERMINAL",0,0,"8\t",,terminal_output +13937,12558678,"TERMINAL",0,0,"8331",,terminal_output +13938,12559732,"TERMINAL",0,0,"9\t",,terminal_output +13939,12559733,"TERMINAL",0,0,"9442",,terminal_output +13940,12560638,"TERMINAL",0,0,"30\t",,terminal_output +13941,12560755,"TERMINAL",0,0,"30553",,terminal_output +13942,12561735,"TERMINAL",0,0,"1\t",,terminal_output +13943,12561803,"TERMINAL",0,0,"1664",,terminal_output +13944,12562750,"TERMINAL",0,0,"2\t",,terminal_output +13945,12562860,"TERMINAL",0,0,"2775",,terminal_output +13946,12563756,"TERMINAL",0,0,"3\t",,terminal_output +13947,12563884,"TERMINAL",0,0,"3886",,terminal_output +13948,12564890,"TERMINAL",0,0,"4\t",,terminal_output +13949,12564922,"TERMINAL",0,0,"4997",,terminal_output +13950,12565912,"TERMINAL",0,0,"5\t",,terminal_output +13951,12565955,"TERMINAL",0,0,"540408",,terminal_output +13952,12566947,"TERMINAL",0,0,"6\t",,terminal_output +13953,12567098,"TERMINAL",0,0,"6119",,terminal_output +13954,12567953,"TERMINAL",0,0,"7\t",,terminal_output +13955,12568039,"TERMINAL",0,0,"72220",,terminal_output +13956,12568983,"TERMINAL",0,0,"8\t",,terminal_output +13957,12569121,"TERMINAL",0,0,"8331",,terminal_output +13958,12570003,"TERMINAL",0,0,"9\t",,terminal_output +13959,12570231,"TERMINAL",0,0,"9442",,terminal_output +13960,12571057,"TERMINAL",0,0,"40\t",,terminal_output +13961,12571202,"TERMINAL",0,0,"40553",,terminal_output +13962,12572079,"TERMINAL",0,0,"1\t",,terminal_output +13963,12572220,"TERMINAL",0,0,"1664",,terminal_output +13964,12573099,"TERMINAL",0,0,"2\t",,terminal_output +13965,12573260,"TERMINAL",0,0,"2775",,terminal_output +13966,12574140,"TERMINAL",0,0,"3\t",,terminal_output +13967,12574303,"TERMINAL",0,0,"3997",,terminal_output +13968,12575187,"TERMINAL",0,0,"4\t",,terminal_output +13969,12575346,"TERMINAL",0,0,"550508",,terminal_output +13970,12576290,"TERMINAL",0,0,"5\t",,terminal_output +13971,12576391,"TERMINAL",0,0,"6119",,terminal_output +13972,12577254,"TERMINAL",0,0,"6\t",,terminal_output +13973,12577430,"TERMINAL",0,0,"72230",,terminal_output +13974,12578358,"TERMINAL",0,0,"7\t",,terminal_output +13975,12578521,"TERMINAL",0,0,"8331",,terminal_output +13976,12579334,"TERMINAL",0,0,"9\t",,terminal_output +13977,12579512,"TERMINAL",0,0,"9442",,terminal_output +13978,12580373,"TERMINAL",0,0,"50\t",,terminal_output +13979,12580554,"TERMINAL",0,0,"50553",,terminal_output +13980,12581423,"TERMINAL",0,0,"1\t",,terminal_output +13981,12581646,"TERMINAL",0,0,"1664",,terminal_output +13982,12582521,"TERMINAL",0,0,"2\t",,terminal_output +13983,12582660,"TERMINAL",0,0,"2775",,terminal_output +13984,12583486,"TERMINAL",0,0,"3\t",,terminal_output +13985,12583675,"TERMINAL",0,0,"3886",,terminal_output +13986,12584643,"TERMINAL",0,0,"4\t",,terminal_output +13987,12584717,"TERMINAL",0,0,"4997",,terminal_output +13988,12585771,"TERMINAL",0,0,"51",,terminal_output +13989,12585771,"TERMINAL",0,0,"540:0040:008",,terminal_output +13990,12586666,"TERMINAL",0,0,"6\t",,terminal_output +13991,12586801,"TERMINAL",0,0,"6119",,terminal_output +13992,12587711,"TERMINAL",0,0,"7\t",,terminal_output +13993,12587849,"TERMINAL",0,0,"72240",,terminal_output +13994,12588715,"TERMINAL",0,0,"8\t",,terminal_output +13995,12588945,"TERMINAL",0,0,"8331",,terminal_output +13996,12589764,"TERMINAL",0,0,"9\t",,terminal_output +13997,12589944,"TERMINAL",0,0,"9442",,terminal_output +13998,12590760,"TERMINAL",0,0,"6:00\t",,terminal_output +13999,12590988,"TERMINAL",0,0,"6:00553",,terminal_output +14000,12591802,"TERMINAL",0,0,"1\t",,terminal_output +14001,12592022,"TERMINAL",0,0,"1664",,terminal_output +14002,12592840,"TERMINAL",0,0,"2\t",,terminal_output +14003,12593066,"TERMINAL",0,0,"2775",,terminal_output +14004,12593887,"TERMINAL",0,0,"3\t",,terminal_output +14005,12594119,"TERMINAL",0,0,"3886",,terminal_output +14006,12594975,"TERMINAL",0,0,"4\t",,terminal_output +14007,12595146,"TERMINAL",0,0,"4997",,terminal_output +14008,12595952,"TERMINAL",0,0,"5\t",,terminal_output +14009,12596180,"TERMINAL",0,0,"510108",,terminal_output +14010,12597035,"TERMINAL",0,0,"6\t",,terminal_output +14011,12597219,"TERMINAL",0,0,"6119",,terminal_output +14012,12598042,"TERMINAL",0,0,"7\t",,terminal_output +14013,12598263,"TERMINAL",0,0,"72250",,terminal_output +14014,12599074,"TERMINAL",0,0,"8\t",,terminal_output +14015,12599313,"TERMINAL",0,0,"8442",,terminal_output +14016,12600140,"TERMINAL",0,0,"9\t",,terminal_output +14017,12600345,"TERMINAL",0,0,"10553",,terminal_output +14018,12602846,"TERMINAL",0,0,"1019",,terminal_output +14019,12602846,"TERMINAL",0,0,"1775",,terminal_output +14020,12603997,"TERMINAL",0,0,"3886",,terminal_output +14021,12604041,"TERMINAL",0,0,"3\t",,terminal_output +14022,12605023,"TERMINAL",0,0,"4997",,terminal_output +14023,12605064,"TERMINAL",0,0,"4\t",,terminal_output +14024,12606046,"TERMINAL",0,0,"5\t",,terminal_output +14025,12606046,"TERMINAL",0,0,"520208",,terminal_output +14026,12607057,"TERMINAL",0,0,"6\t",,terminal_output +14027,12607098,"TERMINAL",0,0,"6119",,terminal_output +14028,12608104,"TERMINAL",0,0,"7\t",,terminal_output +14029,12608121,"TERMINAL",0,0,"7229:00",,terminal_output +14030,12609135,"TERMINAL",0,0,"8\t",,terminal_output +14031,12609172,"TERMINAL",0,0,"8331",,terminal_output +14032,12610189,"TERMINAL",0,0,"9\t",,terminal_output +14033,12610204,"TERMINAL",0,0,"9442",,terminal_output +14034,12611202,"TERMINAL",0,0,"2020",,terminal_output +14035,12611241,"TERMINAL",0,0,"20553",,terminal_output +14036,12612246,"TERMINAL",0,0,"1\t",,terminal_output +14037,12612279,"TERMINAL",0,0,"1775",,terminal_output +14038,12613288,"TERMINAL",0,0,"2\t",,terminal_output +14039,12613325,"TERMINAL",0,0,"3886",,terminal_output +14040,12614382,"TERMINAL",0,0,"4\t",,terminal_output +14041,12614383,"TERMINAL",0,0,"4997",,terminal_output +14042,12615371,"TERMINAL",0,0,"5\t",,terminal_output +14043,12615406,"TERMINAL",0,0,"530308",,terminal_output +14044,12616490,"TERMINAL",0,0,"6\t",,terminal_output +14045,12616491,"TERMINAL",0,0,"6119",,terminal_output +14046,12617455,"TERMINAL",0,0,"7\t",,terminal_output +14047,12617541,"TERMINAL",0,0,"72210",,terminal_output +14048,12618540,"TERMINAL",0,0,"8\t",,terminal_output +14049,12618547,"TERMINAL",0,0,"8331",,terminal_output +14050,12619568,"TERMINAL",0,0,"9\t",,terminal_output +14051,12619606,"TERMINAL",0,0,"9442",,terminal_output +14052,12620579,"TERMINAL",0,0,"30\t",,terminal_output +14053,12620654,"TERMINAL",0,0,"30553",,terminal_output +14054,12621712,"TERMINAL",0,0,"1\t",,terminal_output +14055,12621756,"TERMINAL",0,0,"1664",,terminal_output +14056,12622646,"TERMINAL",0,0,"2\t",,terminal_output +14057,12622770,"TERMINAL",0,0,"2775",,terminal_output +14058,12623683,"TERMINAL",0,0,"3\t",,terminal_output +14059,12623778,"TERMINAL",0,0,"3886",,terminal_output +14060,12624717,"TERMINAL",0,0,"4\t",,terminal_output +14061,12624810,"TERMINAL",0,0,"4997",,terminal_output +14062,12625804,"TERMINAL",0,0,"5\t",,terminal_output +14063,12625866,"TERMINAL",0,0,"540408",,terminal_output +14064,12626812,"TERMINAL",0,0,"6\t",,terminal_output +14065,12626956,"TERMINAL",0,0,"6119",,terminal_output +14066,12627818,"TERMINAL",0,0,"7\t",,terminal_output +14067,12627940,"TERMINAL",0,0,"72220",,terminal_output +14068,12628897,"TERMINAL",0,0,"8\t",,terminal_output +14069,12629024,"TERMINAL",0,0,"8331",,terminal_output +14070,12629896,"TERMINAL",0,0,"9\t",,terminal_output +14071,12630075,"TERMINAL",0,0,"9442",,terminal_output +14072,12631081,"TERMINAL",0,0,"40\t",,terminal_output +14073,12631125,"TERMINAL",0,0,"40553",,terminal_output +14074,12631978,"TERMINAL",0,0,"1\t",,terminal_output +14075,12632130,"TERMINAL",0,0,"1664",,terminal_output +14076,12633056,"TERMINAL",0,0,"2\t",,terminal_output +14077,12633192,"TERMINAL",0,0,"2775",,terminal_output +14078,12634060,"TERMINAL",0,0,"3\t",,terminal_output +14079,12634205,"TERMINAL",0,0,"3886",,terminal_output +14080,12635098,"TERMINAL",0,0,"4\t",,terminal_output +14081,12635247,"TERMINAL",0,0,"4997",,terminal_output +14082,12636134,"TERMINAL",0,0,"5\t",,terminal_output +14083,12636302,"TERMINAL",0,0,"551519",,terminal_output +14084,12637181,"TERMINAL",0,0,"6\t",,terminal_output +14085,12637353,"TERMINAL",0,0,"72230",,terminal_output +14086,12638217,"TERMINAL",0,0,"7\t",,terminal_output +14087,12638385,"TERMINAL",0,0,"8331",,terminal_output +14088,12639256,"TERMINAL",0,0,"8\t",,terminal_output +14089,12639439,"TERMINAL",0,0,"9442",,terminal_output +14090,12640297,"TERMINAL",0,0,"9\t",,terminal_output +14091,12640472,"TERMINAL",0,0,"50553",,terminal_output +14092,12641339,"TERMINAL",0,0,"51\t",,terminal_output +14093,12641507,"TERMINAL",0,0,"1664",,terminal_output +14094,12642376,"TERMINAL",0,0,"2\t",,terminal_output +14095,12642651,"TERMINAL",0,0,"2775",,terminal_output +14096,12643433,"TERMINAL",0,0,"3\t",,terminal_output +14097,12643594,"TERMINAL",0,0,"3886",,terminal_output +14098,12644452,"TERMINAL",0,0,"4\t",,terminal_output +14099,12644636,"TERMINAL",0,0,"4997",,terminal_output +14100,12645492,"TERMINAL",0,0,"5\t",,terminal_output +14101,12645680,"TERMINAL",0,0,"51:001:008",,terminal_output +14102,12646529,"TERMINAL",0,0,"6\t",,terminal_output +14103,12646723,"TERMINAL",0,0,"6119",,terminal_output +14104,12647591,"TERMINAL",0,0,"7\t",,terminal_output +14105,12647786,"TERMINAL",0,0,"72240",,terminal_output +14106,12648612,"TERMINAL",0,0,"8\t",,terminal_output +14107,12648803,"TERMINAL",0,0,"8331",,terminal_output +14108,12649651,"TERMINAL",0,0,"9\t",,terminal_output +14109,12649849,"TERMINAL",0,0,"9442",,terminal_output +14110,12650694,"TERMINAL",0,0,"7:00\t",,terminal_output +14111,12650886,"TERMINAL",0,0,"7:00553",,terminal_output +14112,12651728,"TERMINAL",0,0,"11",,terminal_output +14113,12651930,"TERMINAL",0,0,"1664",,terminal_output +14114,12652763,"TERMINAL",0,0,"2\t",,terminal_output +14115,12652970,"TERMINAL",0,0,"2775",,terminal_output +14116,12653839,"TERMINAL",0,0,"3\t",,terminal_output +14117,12654055,"TERMINAL",0,0,"3886",,terminal_output +14118,12654863,"TERMINAL",0,0,"4\t",,terminal_output +14119,12655050,"TERMINAL",0,0,"4997",,terminal_output +14120,12655880,"TERMINAL",0,0,"5\t",,terminal_output +14121,12656087,"TERMINAL",0,0,"510108",,terminal_output +14122,12656918,"TERMINAL",0,0,"6\t",,terminal_output +14123,12657140,"TERMINAL",0,0,"6119",,terminal_output +14124,12657946,"TERMINAL",0,0,"7\t",,terminal_output +14125,12658160,"TERMINAL",0,0,"72250",,terminal_output +14126,12658987,"TERMINAL",0,0,"8\t",,terminal_output +14127,12659198,"TERMINAL",0,0,"8331",,terminal_output +14128,12660120,"TERMINAL",0,0,"9\t",,terminal_output +14129,12660239,"TERMINAL",0,0,"9442",,terminal_output +14130,12661139,"TERMINAL",0,0,"10\t",,terminal_output +14131,12661287,"TERMINAL",0,0,"10664",,terminal_output +14132,12662170,"TERMINAL",0,0,"1\t",,terminal_output +14133,12662318,"TERMINAL",0,0,"2775",,terminal_output +14134,12663139,"TERMINAL",0,0,"2\t",,terminal_output +14135,12663397,"TERMINAL",0,0,"3886",,terminal_output +14136,12664174,"TERMINAL",0,0,"3\t",,terminal_output +14137,12664422,"TERMINAL",0,0,"4997",,terminal_output +14138,12665220,"TERMINAL",0,0,"4\t",,terminal_output +14139,12665431,"TERMINAL",0,0,"520208",,terminal_output +14140,12666241,"TERMINAL",0,0,"5\t",,terminal_output +14141,12666462,"TERMINAL",0,0,"6119",,terminal_output +14142,12667283,"TERMINAL",0,0,"6\t",,terminal_output +14143,12667498,"TERMINAL",0,0,"72220:00",,terminal_output +14144,12668407,"TERMINAL",0,0,"8\t",,terminal_output +14145,12668540,"TERMINAL",0,0,"8331",,terminal_output +14146,12669375,"TERMINAL",0,0,"9\t",,terminal_output +14147,12669571,"TERMINAL",0,0,"9442",,terminal_output +14148,12670398,"TERMINAL",0,0,"20\t",,terminal_output +14149,12670607,"TERMINAL",0,0,"20553",,terminal_output +14150,12671421,"TERMINAL",0,0,"1\t",,terminal_output +14151,12671643,"TERMINAL",0,0,"1664",,terminal_output +14152,12672499,"TERMINAL",0,0,"2\t",,terminal_output +14153,12672675,"TERMINAL",0,0,"2775",,terminal_output +14154,12673492,"TERMINAL",0,0,"3\t",,terminal_output +14155,12673713,"TERMINAL",0,0,"3886",,terminal_output +14156,12674526,"TERMINAL",0,0,"4\t",,terminal_output +14157,12674745,"TERMINAL",0,0,"4997",,terminal_output +14158,12675673,"TERMINAL",0,0,"5\t",,terminal_output +14159,12675814,"TERMINAL",0,0,"530308",,terminal_output +14160,12676709,"TERMINAL",0,0,"6\t",,terminal_output +14161,12676846,"TERMINAL",0,0,"6119",,terminal_output +14162,12677668,"TERMINAL",0,0,"7\t",,terminal_output +14163,12677854,"TERMINAL",0,0,"72210",,terminal_output +14164,12678750,"TERMINAL",0,0,"8\t",,terminal_output +14165,12678888,"TERMINAL",0,0,"8331",,terminal_output +14166,12679728,"TERMINAL",0,0,"9\t",,terminal_output +14167,12680076,"TERMINAL",0,0,"9442",,terminal_output +14168,12680798,"TERMINAL",0,0,"30\t",,terminal_output +14169,12680971,"TERMINAL",0,0,"30553",,terminal_output +14170,12681774,"TERMINAL",0,0,"1\t",,terminal_output +14171,12682130,"TERMINAL",0,0,"1664",,terminal_output +14172,12682808,"TERMINAL",0,0,"2\t",,terminal_output +14173,12683156,"TERMINAL",0,0,"2775",,terminal_output +14174,12683923,"TERMINAL",0,0,"3\t",,terminal_output +14175,12684104,"TERMINAL",0,0,"3886",,terminal_output +14176,12685000,"TERMINAL",0,0,"4\t",,terminal_output +14177,12685144,"TERMINAL",0,0,"4997",,terminal_output +14178,12685929,"TERMINAL",0,0,"5\t",,terminal_output +14179,12686175,"TERMINAL",0,0,"540408",,terminal_output +14180,12687059,"TERMINAL",0,0,"6\t",,terminal_output +14181,12687217,"TERMINAL",0,0,"6119",,terminal_output +14182,12687981,"TERMINAL",0,0,"7\t",,terminal_output +14183,12688268,"TERMINAL",0,0,"72220",,terminal_output +14184,12689093,"TERMINAL",0,0,"8\t",,terminal_output +14185,12689289,"TERMINAL",0,0,"8442",,terminal_output +14186,12690112,"TERMINAL",0,0,"9\t",,terminal_output +14187,12690321,"TERMINAL",0,0,"40553",,terminal_output +14188,12691140,"TERMINAL",0,0,"40\t",,terminal_output +14189,12691391,"TERMINAL",0,0,"1664",,terminal_output +14190,12692168,"TERMINAL",0,0,"1\t",,terminal_output +14191,12692479,"TERMINAL",0,0,"2775",,terminal_output +14192,12693192,"TERMINAL",0,0,"2\t",,terminal_output +14193,12693496,"TERMINAL",0,0,"3886",,terminal_output +14194,12694204,"TERMINAL",0,0,"3\t",,terminal_output +14195,12694475,"TERMINAL",0,0,"4997",,terminal_output +14196,12695242,"TERMINAL",0,0,"4\t",,terminal_output +14197,12695544,"TERMINAL",0,0,"550508",,terminal_output +14198,12696364,"TERMINAL",0,0,"5\t",,terminal_output +14199,12696552,"TERMINAL",0,0,"6119",,terminal_output +14200,12697311,"TERMINAL",0,0,"7\t",,terminal_output +14201,12697591,"TERMINAL",0,0,"72230",,terminal_output +14202,12698351,"TERMINAL",0,0,"8\t",,terminal_output +14203,12698714,"TERMINAL",0,0,"8331",,terminal_output +14204,12699394,"TERMINAL",0,0,"92",,terminal_output +14205,12699742,"TERMINAL",0,0,"9442",,terminal_output +14206,12700455,"TERMINAL",0,0,"50\t",,terminal_output +14207,12700785,"TERMINAL",0,0,"50553",,terminal_output +14208,12701590,"TERMINAL",0,0,"1\t",,terminal_output +14209,12701747,"TERMINAL",0,0,"1664",,terminal_output +14210,12702502,"TERMINAL",0,0,"2\t",,terminal_output +14211,12702780,"TERMINAL",0,0,"2775",,terminal_output +14212,12703542,"TERMINAL",0,0,"3\t",,terminal_output +14213,12703817,"TERMINAL",0,0,"3886",,terminal_output +14214,12704577,"TERMINAL",0,0,"4\t",,terminal_output +14215,12704961,"TERMINAL",0,0,"4997",,terminal_output +14216,12705682,"TERMINAL",0,0,"5\t",,terminal_output +14217,12705899,"TERMINAL",0,0,"52:002:008",,terminal_output +14218,12706705,"TERMINAL",0,0,"6\t",,terminal_output +14219,12707009,"TERMINAL",0,0,"6119",,terminal_output +14220,12707729,"TERMINAL",0,0,"7\t",,terminal_output +14221,12707980,"TERMINAL",0,0,"72240",,terminal_output +14222,12708754,"TERMINAL",0,0,"8\t",,terminal_output +14223,12709105,"TERMINAL",0,0,"8331",,terminal_output +14224,12709777,"TERMINAL",0,0,"9\t",,terminal_output +14225,12710084,"TERMINAL",0,0,"9442",,terminal_output +14226,12710906,"TERMINAL",0,0,"8:00\t",,terminal_output +14227,12711116,"TERMINAL",0,0,"8:00553",,terminal_output +14228,12711830,"TERMINAL",0,0,"13",,terminal_output +14229,12712126,"TERMINAL",0,0,"1664",,terminal_output +14230,12712859,"TERMINAL",0,0,"2\t",,terminal_output +14231,12713165,"TERMINAL",0,0,"2775",,terminal_output +14232,12713895,"TERMINAL",0,0,"3\t",,terminal_output +14233,12714196,"TERMINAL",0,0,"3886",,terminal_output +14234,12715022,"TERMINAL",0,0,"4\t",,terminal_output +14235,12715369,"TERMINAL",0,0,"43469465 accelerat train_dy tum_cte0 CG20:47\t 8 hkn[0521-0528]99",,terminal_output +14236,12716386,"TERMINAL",0,0,"532",,terminal_output +14237,12716386,"TERMINAL",0,0,"51111",,terminal_output +14238,12717103,"TERMINAL",0,0,"6\t",,terminal_output +14239,12717333,"TERMINAL",0,0,"722",,terminal_output +14240,12718082,"TERMINAL",0,0,"7\t",,terminal_output +14241,12718368,"TERMINAL",0,0,"833",,terminal_output +14242,12719072,"TERMINAL",0,0,"8\t",,terminal_output +14243,12719389,"TERMINAL",0,0,"944",,terminal_output +14244,12720280,"TERMINAL",0,0,"94",,terminal_output +14245,12720429,"TERMINAL",0,0,"1055",,terminal_output +14246,12721316,"TERMINAL",0,0,"112",,terminal_output +14247,12721459,"TERMINAL",0,0,"166",,terminal_output +14248,12722482,"TERMINAL",0,0,"2\t",,terminal_output +14249,12722517,"TERMINAL",0,0,"277",,terminal_output +14250,12723505,"TERMINAL",0,0,"3\t",,terminal_output +14251,12723558,"TERMINAL",0,0,"388",,terminal_output +14252,12725935,"TERMINAL",0,0,"42020",,terminal_output +14253,12725945,"TERMINAL",0,0,"42",,terminal_output +14254,12727083,"TERMINAL",0,0,"\r611",,terminal_output +14255,12727084,"TERMINAL",0,0,"60 0",,terminal_output +14256,12728097,"TERMINAL",0,0,"722",,terminal_output +14257,12728141,"TERMINAL",0,0,"7\t",,terminal_output +14258,12729142,"TERMINAL",0,0,"833",,terminal_output +14259,12729142,"TERMINAL",0,0,"8\t",,terminal_output +14260,12730169,"TERMINAL",0,0,"9\t",,terminal_output +14261,12730170,"TERMINAL",0,0,"944",,terminal_output +14262,12731210,"TERMINAL",0,0,"2055",,terminal_output +14263,12731211,"TERMINAL",0,0,"20\t",,terminal_output +14264,12732251,"TERMINAL",0,0,"166",,terminal_output +14265,12732299,"TERMINAL",0,0,"1\t",,terminal_output +14266,12733348,"TERMINAL",0,0,"288",,terminal_output +14267,12733356,"TERMINAL",0,0,"2\t",,terminal_output +14268,12734310,"TERMINAL",0,0,"4\t",,terminal_output +14269,12734398,"TERMINAL",0,0,"499",,terminal_output +14270,12735355,"TERMINAL",0,0,"5\t",,terminal_output +14271,12735410,"TERMINAL",0,0,"53030",,terminal_output +14272,12736396,"TERMINAL",0,0,"6\t",,terminal_output +14273,12736471,"TERMINAL",0,0,"611",,terminal_output +14274,12737418,"TERMINAL",0,0,"7\t",,terminal_output +14275,12737483,"TERMINAL",0,0,"722",,terminal_output +14276,12738450,"TERMINAL",0,0,"8\t",,terminal_output +14277,12738563,"TERMINAL",0,0,"833",,terminal_output +14278,12739487,"TERMINAL",0,0,"9\t",,terminal_output +14279,12739588,"TERMINAL",0,0,"944",,terminal_output +14280,12740601,"TERMINAL",0,0,"30\t",,terminal_output +14281,12740601,"TERMINAL",0,0,"3055",,terminal_output +14282,12741697,"TERMINAL",0,0,"1\t",,terminal_output +14283,12741703,"TERMINAL",0,0,"166",,terminal_output +14284,12742648,"TERMINAL",0,0,"2\t",,terminal_output +14285,12742691,"TERMINAL",0,0,"277",,terminal_output +14286,12743686,"TERMINAL",0,0,"3\t",,terminal_output +14287,12743705,"TERMINAL",0,0,"388",,terminal_output +14288,12744704,"TERMINAL",0,0,"4\t",,terminal_output +14289,12744757,"TERMINAL",0,0,"499",,terminal_output +14290,12745706,"TERMINAL",0,0,"5\t",,terminal_output +14291,12745807,"TERMINAL",0,0,"54040",,terminal_output +14292,12746745,"TERMINAL",0,0,"6\t",,terminal_output +14293,12746811,"TERMINAL",0,0,"611",,terminal_output +14294,12747774,"TERMINAL",0,0,"7\t",,terminal_output +14295,12747887,"TERMINAL",0,0,"722",,terminal_output +14296,12748809,"TERMINAL",0,0,"8\t",,terminal_output +14297,12748886,"TERMINAL",0,0,"833",,terminal_output +14298,12749863,"TERMINAL",0,0,"9\t",,terminal_output +14299,12749921,"TERMINAL",0,0,"944",,terminal_output +14300,12750964,"TERMINAL",0,0,"40\t",,terminal_output +14301,12750968,"TERMINAL",0,0,"4055",,terminal_output +14302,12751971,"TERMINAL",0,0,"1\t",,terminal_output +14303,12751990,"TERMINAL",0,0,"166",,terminal_output +14304,12752955,"TERMINAL",0,0,"2\t",,terminal_output +14305,12753033,"TERMINAL",0,0,"277",,terminal_output +14306,12754021,"TERMINAL",0,0,"31",,terminal_output +14307,12754104,"TERMINAL",0,0,"388",,terminal_output +14308,12755040,"TERMINAL",0,0,"4\t",,terminal_output +14309,12755100,"TERMINAL",0,0,"499",,terminal_output +14310,12756088,"TERMINAL",0,0,"5\t",,terminal_output +14311,12756136,"TERMINAL",0,0,"55050",,terminal_output +14312,12757093,"TERMINAL",0,0,"6\t",,terminal_output +14313,12757179,"TERMINAL",0,0,"611",,terminal_output +14314,12758123,"TERMINAL",0,0,"7\t",,terminal_output +14315,12758222,"TERMINAL",0,0,"722",,terminal_output +14316,12759160,"TERMINAL",0,0,"8\t",,terminal_output +14317,12759258,"TERMINAL",0,0,"833",,terminal_output +14318,12760206,"TERMINAL",0,0,"9\t",,terminal_output +14319,12760316,"TERMINAL",0,0,"955",,terminal_output +14320,12761250,"TERMINAL",0,0,"50\t",,terminal_output +14321,12761319,"TERMINAL",0,0,"5166",,terminal_output +14322,12762289,"TERMINAL",0,0,"1\t",,terminal_output +14323,12762382,"TERMINAL",0,0,"277",,terminal_output +14324,12763320,"TERMINAL",0,0,"3\t",,terminal_output +14325,12763431,"TERMINAL",0,0,"388",,terminal_output +14326,12764375,"TERMINAL",0,0,"4\t",,terminal_output +14327,12764487,"TERMINAL",0,0,"499",,terminal_output +14328,12765407,"TERMINAL",0,0,"5\t",,terminal_output +14329,12765471,"TERMINAL",0,0,"53:003:00",,terminal_output +14330,12766535,"TERMINAL",0,0,"6\t",,terminal_output +14331,12766550,"TERMINAL",0,0,"611",,terminal_output +14332,12767540,"TERMINAL",0,0,"7\t",,terminal_output +14333,12767543,"TERMINAL",0,0,"722",,terminal_output +14334,12768557,"TERMINAL",0,0,"8\t",,terminal_output +14335,12768585,"TERMINAL",0,0,"833",,terminal_output +14336,12769617,"TERMINAL",0,0,"9\t",,terminal_output +14337,12769623,"TERMINAL",0,0,"944",,terminal_output +14338,12770603,"TERMINAL",0,0,"9:00\t",,terminal_output +14339,12770655,"TERMINAL",0,0,"9:0055",,terminal_output +14340,12771737,"TERMINAL",0,0,"1\t",,terminal_output +14341,12771737,"TERMINAL",0,0,"166",,terminal_output +14342,12772771,"TERMINAL",0,0,"2\t",,terminal_output +14343,12772772,"TERMINAL",0,0,"277",,terminal_output +14344,12773781,"TERMINAL",0,0,"3\t",,terminal_output +14345,12773781,"TERMINAL",0,0,"388",,terminal_output +14346,12774754,"TERMINAL",0,0,"4\t",,terminal_output +14347,12774837,"TERMINAL",0,0,"499",,terminal_output +14348,12775755,"TERMINAL",0,0,"5\t",,terminal_output +14349,12775863,"TERMINAL",0,0,"51010",,terminal_output +14350,12776790,"TERMINAL",0,0,"6\t",,terminal_output +14351,12776905,"TERMINAL",0,0,"611",,terminal_output +14352,12777828,"TERMINAL",0,0,"7\t",,terminal_output +14353,12777910,"TERMINAL",0,0,"722",,terminal_output +14354,12778906,"TERMINAL",0,0,"8\t",,terminal_output +14355,12778958,"TERMINAL",0,0,"833",,terminal_output +14356,12779917,"TERMINAL",0,0,"9\t",,terminal_output +14357,12780004,"TERMINAL",0,0,"944",,terminal_output +14358,12781048,"TERMINAL",0,0,"10\t",,terminal_output +14359,12781056,"TERMINAL",0,0,"1055",,terminal_output +14360,12781976,"TERMINAL",0,0,"1\t",,terminal_output +14361,12782084,"TERMINAL",0,0,"166",,terminal_output +14362,12783019,"TERMINAL",0,0,"2\t",,terminal_output +14363,12783128,"TERMINAL",0,0,"277",,terminal_output +14364,12784133,"TERMINAL",0,0,"3\t",,terminal_output +14365,12784154,"TERMINAL",0,0,"388",,terminal_output +14366,12785155,"TERMINAL",0,0,"4\t",,terminal_output +14367,12785270,"TERMINAL",0,0,"499",,terminal_output +14368,12786121,"TERMINAL",0,0,"5\t",,terminal_output +14369,12786257,"TERMINAL",0,0,"52020",,terminal_output +14370,12787281,"TERMINAL",0,0,"6\t",,terminal_output +14371,12787294,"TERMINAL",0,0,"611",,terminal_output +14372,12788197,"TERMINAL",0,0,"7\t",,terminal_output +14373,12788333,"TERMINAL",0,0,"733",,terminal_output +14374,12789228,"TERMINAL",0,0,"8\t",,terminal_output +14375,12789367,"TERMINAL",0,0,"944",,terminal_output +14376,12790272,"TERMINAL",0,0,"9\t",,terminal_output +14377,12790366,"TERMINAL",0,0,"2055",,terminal_output +14378,12791333,"TERMINAL",0,0,"21\t",,terminal_output +14379,12791429,"TERMINAL",0,0,"166",,terminal_output +14380,12792352,"TERMINAL",0,0,"2\t",,terminal_output +14381,12792447,"TERMINAL",0,0,"277",,terminal_output +14382,12793384,"TERMINAL",0,0,"3\t",,terminal_output +14383,12793508,"TERMINAL",0,0,"388",,terminal_output +14384,12794422,"TERMINAL",0,0,"4\t",,terminal_output +14385,12794510,"TERMINAL",0,0,"499",,terminal_output +14386,12795466,"TERMINAL",0,0,"5\t",,terminal_output +14387,12795561,"TERMINAL",0,0,"53030",,terminal_output +14388,12796515,"TERMINAL",0,0,"6\t",,terminal_output +14389,12796608,"TERMINAL",0,0,"611",,terminal_output +14390,12797541,"TERMINAL",0,0,"7\t",,terminal_output +14391,12797637,"TERMINAL",0,0,"722",,terminal_output +14392,12798661,"TERMINAL",0,0,"8\t",,terminal_output +14393,12798668,"TERMINAL",0,0,"833",,terminal_output +14394,12799691,"TERMINAL",0,0,"9\t",,terminal_output +14395,12799734,"TERMINAL",0,0,"944",,terminal_output +14396,12800668,"TERMINAL",0,0,"30\t",,terminal_output +14397,12800766,"TERMINAL",0,0,"3055",,terminal_output +14398,12801739,"TERMINAL",0,0,"1\t",,terminal_output +14399,12801782,"TERMINAL",0,0,"166",,terminal_output +14400,12802722,"TERMINAL",0,0,"2\t",,terminal_output +14401,12802867,"TERMINAL",0,0,"277",,terminal_output +14402,12803745,"TERMINAL",0,0,"3\t",,terminal_output +14403,12803885,"TERMINAL",0,0,"388",,terminal_output +14404,12804785,"TERMINAL",0,0,"4\t",,terminal_output +14405,12804940,"TERMINAL",0,0,"499",,terminal_output +14406,12805935,"TERMINAL",0,0,"5\t",,terminal_output +14407,12805935,"TERMINAL",0,0,"54040",,terminal_output +14408,12806858,"TERMINAL",0,0,"6\t",,terminal_output +14409,12806998,"TERMINAL",0,0,"611",,terminal_output +14410,12807898,"TERMINAL",0,0,"7\t",,terminal_output +14411,12808044,"TERMINAL",0,0,"722",,terminal_output +14412,12808929,"TERMINAL",0,0,"8\t",,terminal_output +14413,12809039,"TERMINAL",0,0,"833",,terminal_output +14414,12810029,"TERMINAL",0,0,"9\t",,terminal_output +14415,12810083,"TERMINAL",0,0,"944",,terminal_output +14416,12810999,"TERMINAL",0,0,"40\t",,terminal_output +14417,12811139,"TERMINAL",0,0,"4055",,terminal_output +14418,12812100,"TERMINAL",0,0,"1\t",,terminal_output +14419,12812156,"TERMINAL",0,0,"166",,terminal_output +14420,12813116,"TERMINAL",0,0,"2\t",,terminal_output +14421,12813189,"TERMINAL",0,0,"277",,terminal_output +14422,12814233,"TERMINAL",0,0,"3\t",,terminal_output +14423,12814239,"TERMINAL",0,0,"388",,terminal_output +14424,12815156,"TERMINAL",0,0,"4\t",,terminal_output +14425,12815304,"TERMINAL",0,0,"499",,terminal_output +14426,12816188,"TERMINAL",0,0,"5\t",,terminal_output +14427,12816335,"TERMINAL",0,0,"55151",,terminal_output +14428,12817219,"TERMINAL",0,0,"6\t",,terminal_output +14429,12817372,"TERMINAL",0,0,"722",,terminal_output +14430,12818253,"TERMINAL",0,0,"7\t",,terminal_output +14431,12818389,"TERMINAL",0,0,"833",,terminal_output +14432,12819288,"TERMINAL",0,0,"8\t",,terminal_output +14433,12819421,"TERMINAL",0,0,"944",,terminal_output +14434,12820325,"TERMINAL",0,0,"50\t",,terminal_output +14435,12820469,"TERMINAL",0,0,"5055",,terminal_output +14436,12821371,"TERMINAL",0,0,"1\t",,terminal_output +14437,12821550,"TERMINAL",0,0,"166",,terminal_output +14438,12822395,"TERMINAL",0,0,"2\t",,terminal_output +14439,12822570,"TERMINAL",0,0,"277",,terminal_output +14440,12823447,"TERMINAL",0,0,"3\t",,terminal_output +14441,12823602,"TERMINAL",0,0,"388",,terminal_output +14442,12824467,"TERMINAL",0,0,"4\t",,terminal_output +14443,12824639,"TERMINAL",0,0,"499",,terminal_output +14444,12825501,"TERMINAL",0,0,"5\t",,terminal_output +14445,12825688,"TERMINAL",0,0,"54:004:00",,terminal_output +14446,12826618,"TERMINAL",0,0,"6\t",,terminal_output +14447,12826722,"TERMINAL",0,0,"611",,terminal_output +14448,12827581,"TERMINAL",0,0,"7\t",,terminal_output +14449,12827763,"TERMINAL",0,0,"722",,terminal_output +14450,12828665,"TERMINAL",0,0,"8\t",,terminal_output +14451,12828805,"TERMINAL",0,0,"833",,terminal_output +14452,12829690,"TERMINAL",0,0,"9\t",,terminal_output +14453,12829863,"TERMINAL",0,0,"944",,terminal_output +14454,12830815,"TERMINAL",0,0,"7:00:00\t",,terminal_output +14455,12830922,"TERMINAL",0,0,"7:00:0055",,terminal_output +14456,12831743,"TERMINAL",0,0,"1\t",,terminal_output +14457,12831917,"TERMINAL",0,0,"166",,terminal_output +14458,12832762,"TERMINAL",0,0,"2\t",,terminal_output +14459,12832951,"TERMINAL",0,0,"277",,terminal_output +14460,12833899,"TERMINAL",0,0,"3\t",,terminal_output +14461,12833990,"TERMINAL",0,0,"388",,terminal_output +14462,12834910,"TERMINAL",0,0,"42",,terminal_output +14463,12835051,"TERMINAL",0,0,"499",,terminal_output +14464,12835868,"TERMINAL",0,0,"5\t",,terminal_output +14465,12836083,"TERMINAL",0,0,"51010",,terminal_output +14466,12836917,"TERMINAL",0,0,"6\t",,terminal_output +14467,12837118,"TERMINAL",0,0,"611",,terminal_output +14468,12837938,"TERMINAL",0,0,"7\t",,terminal_output +14469,12838146,"TERMINAL",0,0,"722",,terminal_output +14470,12838981,"TERMINAL",0,0,"8\t",,terminal_output +14471,12839183,"TERMINAL",0,0,"833",,terminal_output +14472,12840011,"TERMINAL",0,0,"9\t",,terminal_output +14473,12840223,"TERMINAL",0,0,"944",,terminal_output +14474,12841050,"TERMINAL",0,0,"10\t",,terminal_output +14475,12841260,"TERMINAL",0,0,"1055",,terminal_output +14476,12842131,"TERMINAL",0,0,"1\t",,terminal_output +14477,12842296,"TERMINAL",0,0,"177",,terminal_output +14478,12843136,"TERMINAL",0,0,"2\t",,terminal_output +14479,12843337,"TERMINAL",0,0,"388",,terminal_output +14480,12844157,"TERMINAL",0,0,"310",,terminal_output +14481,12844375,"TERMINAL",0,0,"499",,terminal_output +14482,12845210,"TERMINAL",0,0,"4\t",,terminal_output +14483,12845419,"TERMINAL",0,0,"52020",,terminal_output +14484,12846230,"TERMINAL",0,0,"5\t",,terminal_output +14485,12846477,"TERMINAL",0,0,"611",,terminal_output +14486,12849069,"TERMINAL",0,0,"733",,terminal_output +14487,12849069,"TERMINAL",0,0,"60",,terminal_output +14488,12850205,"TERMINAL",0,0,"944",,terminal_output +14489,12850228,"TERMINAL",0,0,"9\t",,terminal_output +14490,12851246,"TERMINAL",0,0,"2055",,terminal_output +14491,12851254,"TERMINAL",0,0,"20\t",,terminal_output +14492,12852289,"TERMINAL",0,0,"177",,terminal_output +14493,12852324,"TERMINAL",0,0,"1\t",,terminal_output +14494,12853333,"TERMINAL",0,0,"388",,terminal_output +14495,12853335,"TERMINAL",0,0,"3\t",,terminal_output +14496,12854385,"TERMINAL",0,0,"499",,terminal_output +14497,12854386,"TERMINAL",0,0,"4\t",,terminal_output +14498,12855421,"TERMINAL",0,0,"5\t",,terminal_output +14499,12855421,"TERMINAL",0,0,"53030",,terminal_output +14500,12856439,"TERMINAL",0,0,"6\t",,terminal_output +14501,12856450,"TERMINAL",0,0,"611",,terminal_output +14502,12857473,"TERMINAL",0,0,"7\t",,terminal_output +14503,12857501,"TERMINAL",0,0,"722",,terminal_output +14504,12858585,"TERMINAL",0,0,"8\t",,terminal_output +14505,12858590,"TERMINAL",0,0,"833",,terminal_output +14506,12859558,"TERMINAL",0,0,"9\t",,terminal_output +14507,12859620,"TERMINAL",0,0,"944",,terminal_output +14508,12860596,"TERMINAL",0,0,"30\t",,terminal_output +14509,12860605,"TERMINAL",0,0,"3055",,terminal_output +14510,12861651,"TERMINAL",0,0,"1\t",,terminal_output +14511,12861668,"TERMINAL",0,0,"166",,terminal_output +14512,12862661,"TERMINAL",0,0,"2\t",,terminal_output +14513,12862684,"TERMINAL",0,0,"277",,terminal_output +14514,12863698,"TERMINAL",0,0,"3\t",,terminal_output +14515,12863734,"TERMINAL",0,0,"388",,terminal_output +14516,12864734,"TERMINAL",0,0,"4\t",,terminal_output +14517,12864770,"TERMINAL",0,0,"499",,terminal_output +14518,12865772,"TERMINAL",0,0,"5\t",,terminal_output +14519,12865823,"TERMINAL",0,0,"54040",,terminal_output +14520,12866814,"TERMINAL",0,0,"6\t",,terminal_output +14521,12866855,"TERMINAL",0,0,"611",,terminal_output +14522,12867856,"TERMINAL",0,0,"7\t",,terminal_output +14523,12867907,"TERMINAL",0,0,"722",,terminal_output +14524,12868892,"TERMINAL",0,0,"8\t",,terminal_output +14525,12868938,"TERMINAL",0,0,"833",,terminal_output +14526,12869970,"TERMINAL",0,0,"9\t",,terminal_output +14527,12869990,"TERMINAL",0,0,"944",,terminal_output +14528,12871063,"TERMINAL",0,0,"40\t",,terminal_output +14529,12871064,"TERMINAL",0,0,"4055",,terminal_output +14530,12872005,"TERMINAL",0,0,"1\t",,terminal_output +14531,12872055,"TERMINAL",0,0,"166",,terminal_output +14532,12873156,"TERMINAL",0,0,"2\t",,terminal_output +14533,12873156,"TERMINAL",0,0,"277",,terminal_output +14534,12874134,"TERMINAL",0,0,"3\t",,terminal_output +14535,12874153,"TERMINAL",0,0,"388",,terminal_output +14536,12875131,"TERMINAL",0,0,"4\t",,terminal_output +14537,12875186,"TERMINAL",0,0,"499",,terminal_output +14538,12876173,"TERMINAL",0,0,"5\t",,terminal_output +14539,12876222,"TERMINAL",0,0,"55050",,terminal_output +14540,12877355,"TERMINAL",0,0,"6\t",,terminal_output +14541,12877355,"TERMINAL",0,0,"611",,terminal_output +14542,12878336,"TERMINAL",0,0,"7\t",,terminal_output +14543,12878352,"TERMINAL",0,0,"733",,terminal_output +14544,12879287,"TERMINAL",0,0,"8\t",,terminal_output +14545,12879353,"TERMINAL",0,0,"944",,terminal_output +14546,12880323,"TERMINAL",0,0,"50\t",,terminal_output +14547,12880407,"TERMINAL",0,0,"5055",,terminal_output +14548,12881401,"TERMINAL",0,0,"1\t",,terminal_output +14549,12881436,"TERMINAL",0,0,"166",,terminal_output +14550,12882401,"TERMINAL",0,0,"2\t",,terminal_output +14551,12882493,"TERMINAL",0,0,"277",,terminal_output +14552,12883431,"TERMINAL",0,0,"3\t",,terminal_output +14553,12883528,"TERMINAL",0,0,"388",,terminal_output +14554,12884472,"TERMINAL",0,0,"4\t",,terminal_output +14555,12884575,"TERMINAL",0,0,"499",,terminal_output +14556,12885606,"TERMINAL",0,0,"5\t",,terminal_output +14557,12885655,"TERMINAL",0,0,"55:005:00",,terminal_output +14558,12886601,"TERMINAL",0,0,"6\t",,terminal_output +14559,12886638,"TERMINAL",0,0,"611",,terminal_output +14560,12887588,"TERMINAL",0,0,"7\t",,terminal_output +14561,12887700,"TERMINAL",0,0,"722",,terminal_output +14562,12888679,"TERMINAL",0,0,"8\t",,terminal_output +14563,12888729,"TERMINAL",0,0,"833",,terminal_output +14564,12889651,"TERMINAL",0,0,"9\t",,terminal_output +14565,12889788,"TERMINAL",0,0,"944",,terminal_output +14566,12890728,"TERMINAL",0,0,"1:00\t",,terminal_output +14567,12890822,"TERMINAL",0,0,"1:0055",,terminal_output +14568,12891749,"TERMINAL",0,0,"1\t",,terminal_output +14569,12891913,"TERMINAL",0,0,"166",,terminal_output +14570,12892768,"TERMINAL",0,0,"2\t",,terminal_output +14571,12892904,"TERMINAL",0,0,"277",,terminal_output +14572,12893800,"TERMINAL",0,0,"3\t",,terminal_output +14573,12893916,"TERMINAL",0,0,"388",,terminal_output +14574,12894845,"TERMINAL",0,0,"4\t",,terminal_output +14575,12894986,"TERMINAL",0,0,"499",,terminal_output +14576,12895882,"TERMINAL",0,0,"5\t",,terminal_output +14577,12896019,"TERMINAL",0,0,"51010",,terminal_output +14578,12896918,"TERMINAL",0,0,"6\t",,terminal_output +14579,12897062,"TERMINAL",0,0,"611",,terminal_output +14580,12897955,"TERMINAL",0,0,"7\t",,terminal_output +14581,12898136,"TERMINAL",0,0,"722",,terminal_output +14582,12899015,"TERMINAL",0,0,"8\t",,terminal_output +14583,12899161,"TERMINAL",0,0,"833",,terminal_output +14584,12900184,"TERMINAL",0,0,"9\t",,terminal_output +14585,12900230,"TERMINAL",0,0,"944",,terminal_output +14586,12901120,"TERMINAL",0,0,"10\t",,terminal_output +14587,12901282,"TERMINAL",0,0,"1055",,terminal_output +14588,12901413,"TERMINAL",0,0,"bash",,terminal_focus +14589,12902180,"TERMINAL",0,0,"1\t",,terminal_output +14590,12902278,"TERMINAL",0,0,"177",,terminal_output +14591,12903184,"TERMINAL",0,0,"2\t",,terminal_output +14592,12903351,"TERMINAL",0,0,"clear",,terminal_command +14593,12903444,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +14594,12903455,"TERMINAL",0,0,"388",,terminal_output +14595,12904237,"TERMINAL",0,0,"3\t",,terminal_output +14596,12904381,"TERMINAL",0,0,"499",,terminal_output +14597,12905230,"TERMINAL",0,0,"4\t",,terminal_output +14598,12905401,"TERMINAL",0,0,"52020",,terminal_output +14599,12906264,"TERMINAL",0,0,"5\t",,terminal_output +14600,12906485,"TERMINAL",0,0,"611",,terminal_output +14601,12907321,"TERMINAL",0,0,"6\t",,terminal_output +14602,12907489,"TERMINAL",0,0,"722",,terminal_output +14603,12908349,"TERMINAL",0,0,"8\t",,terminal_output +14604,12908543,"TERMINAL",0,0,"833",,terminal_output +14605,12909400,"TERMINAL",0,0,"9\t",,terminal_output +14606,12909592,"TERMINAL",0,0,"944",,terminal_output +14607,12910471,"TERMINAL",0,0,"20\t",,terminal_output +14608,12910622,"TERMINAL",0,0,"2055",,terminal_output +14609,12911471,"TERMINAL",0,0,"1\t",,terminal_output +14610,12911665,"TERMINAL",0,0,"166",,terminal_output +14611,12912508,"TERMINAL",0,0,"2\t",,terminal_output +14612,12912708,"TERMINAL",0,0,"277",,terminal_output +14613,12913664,"TERMINAL",0,0,"3\t",,terminal_output +14614,12913761,"TERMINAL",0,0,"388",,terminal_output +14615,12914682,"TERMINAL",0,0,"4\t",,terminal_output +14616,12914795,"TERMINAL",0,0,"499",,terminal_output +14617,12915709,"TERMINAL",0,0,"5\t",,terminal_output +14618,12915884,"TERMINAL",0,0,"53030",,terminal_output +14619,12916833,"TERMINAL",0,0,"6\t",,terminal_output +14620,12916883,"TERMINAL",0,0,"611",,terminal_output +14621,12917761,"TERMINAL",0,0,"7\t",,terminal_output +14622,12917928,"TERMINAL",0,0,"722",,terminal_output +14623,12918779,"TERMINAL",0,0,"8\t",,terminal_output +14624,12919031,"TERMINAL",0,0,"833",,terminal_output +14625,12919801,"TERMINAL",0,0,"9\t",,terminal_output +14626,12920130,"TERMINAL",0,0,"944",,terminal_output +14627,12920895,"TERMINAL",0,0,"30\t",,terminal_output +14628,12921081,"TERMINAL",0,0,"3055",,terminal_output +14629,12921862,"TERMINAL",0,0,"1\t",,terminal_output +14630,12922189,"TERMINAL",0,0,"166",,terminal_output +14631,12922920,"TERMINAL",0,0,"2\t",,terminal_output +14632,12923146,"TERMINAL",0,0,"277",,terminal_output +14633,12923945,"TERMINAL",0,0,"3\t",,terminal_output +14634,12924184,"TERMINAL",0,0,"388",,terminal_output +14635,12925004,"TERMINAL",0,0,"4\t",,terminal_output +14636,12925249,"TERMINAL",0,0,"499",,terminal_output +14637,12926023,"TERMINAL",0,0,"5\t",,terminal_output +14638,12926269,"TERMINAL",0,0,"54040",,terminal_output +14639,12927186,"TERMINAL",0,0,"62",,terminal_output +14640,12927353,"TERMINAL",0,0,"622",,terminal_output +14641,12928205,"TERMINAL",0,0,"7\t",,terminal_output +14642,12928357,"TERMINAL",0,0,"833",,terminal_output +14643,12929238,"TERMINAL",0,0,"8\t",,terminal_output +14644,12929397,"TERMINAL",0,0,"944",,terminal_output +14645,12930216,"TERMINAL",0,0,"93",,terminal_output +14646,12930443,"TERMINAL",0,0,"4055",,terminal_output +14647,12931240,"TERMINAL",0,0,"40\t",,terminal_output +14648,12931529,"TERMINAL",0,0,"166",,terminal_output +14649,12932343,"TERMINAL",0,0,"1\t",,terminal_output +14650,12932602,"TERMINAL",0,0,"277",,terminal_output +14651,12933292,"TERMINAL",0,0,"2\t",,terminal_output +14652,12933579,"TERMINAL",0,0,"388",,terminal_output +14653,12934329,"TERMINAL",0,0,"4\t",,terminal_output +14654,12934619,"TERMINAL",0,0,"499",,terminal_output +14655,12935372,"TERMINAL",0,0,"5\t",,terminal_output +14656,12935680,"TERMINAL",0,0,"55050",,terminal_output +14657,12936412,"TERMINAL",0,0,"6\t",,terminal_output +14658,12936764,"TERMINAL",0,0,"611",,terminal_output +14659,12937469,"TERMINAL",0,0,"7\t",,terminal_output +14660,12937772,"TERMINAL",0,0,"722",,terminal_output +14661,12938511,"TERMINAL",0,0,"8\t",,terminal_output +14662,12938847,"TERMINAL",0,0,"833",,terminal_output +14663,12939547,"TERMINAL",0,0,"9\t",,terminal_output +14664,12939872,"TERMINAL",0,0,"944",,terminal_output +14665,12940698,"TERMINAL",0,0,"50\t",,terminal_output +14666,12940887,"TERMINAL",0,0,"5055",,terminal_output +14667,12941715,"TERMINAL",0,0,"1\t",,terminal_output +14668,12942023,"TERMINAL",0,0,"166",,terminal_output +14669,12942741,"TERMINAL",0,0,"2\t",,terminal_output +14670,12942958,"TERMINAL",0,0,"277",,terminal_output +14671,12943703,"TERMINAL",0,0,"3\t",,terminal_output +14672,12944001,"TERMINAL",0,0,"388",,terminal_output +14673,12944752,"TERMINAL",0,0,"4\t",,terminal_output +14674,12945095,"TERMINAL",0,0,"499",,terminal_output +14675,12945817,"TERMINAL",0,0,"5\t",,terminal_output +14676,12946088,"TERMINAL",0,0,"56:006:00",,terminal_output +14677,12946938,"TERMINAL",0,0,"6\t",,terminal_output +14678,12947131,"TERMINAL",0,0,"611",,terminal_output +14679,12947903,"TERMINAL",0,0,"7\t",,terminal_output +14680,12948166,"TERMINAL",0,0,"722",,terminal_output +14681,12948985,"TERMINAL",0,0,"8\t",,terminal_output +14682,12949205,"TERMINAL",0,0,"833",,terminal_output +14683,12949950,"TERMINAL",0,0,"9\t",,terminal_output +14684,12950258,"TERMINAL",0,0,"944",,terminal_output +14685,12951035,"TERMINAL",0,0,"2:00\t",,terminal_output +14686,12951289,"TERMINAL",0,0,"2:0066",,terminal_output +14687,12952010,"TERMINAL",0,0,"1\t",,terminal_output +14688,12952351,"TERMINAL",0,0,"277",,terminal_output +14689,12953053,"TERMINAL",0,0,"2\t",,terminal_output +14690,12953377,"TERMINAL",0,0,"388",,terminal_output +14691,12954087,"TERMINAL",0,0,"3\t",,terminal_output +14692,12954423,"TERMINAL",0,0,"499",,terminal_output +14693,12955232,"TERMINAL",0,0,"4\t",,terminal_output +14694,12955467,"TERMINAL",0,0,"51010",,terminal_output +14695,12956202,"TERMINAL",0,0,"5\t",,terminal_output +14696,12956566,"TERMINAL",0,0,"611",,terminal_output +14697,12957286,"TERMINAL",0,0,"6\t",,terminal_output +14698,12957607,"TERMINAL",0,0,"722",,terminal_output +14699,12958254,"TERMINAL",0,0,"7\t",,terminal_output +14700,12958589,"TERMINAL",0,0,"833",,terminal_output +14701,12959288,"TERMINAL",0,0,"8\t",,terminal_output +14702,12959748,"TERMINAL",0,0,"944",,terminal_output +14703,12960370,"TERMINAL",0,0,"10\t",,terminal_output +14704,12960771,"TERMINAL",0,0,"1055",,terminal_output +14705,12961388,"TERMINAL",0,0,"1\t",,terminal_output +14706,12961801,"TERMINAL",0,0,"166",,terminal_output +14707,12962505,"TERMINAL",0,0,"2\t",,terminal_output +14708,12962774,"TERMINAL",0,0,"277",,terminal_output +14709,12963451,"TERMINAL",0,0,"3\t",,terminal_output +14710,12963891,"TERMINAL",0,0,"388",,terminal_output +14711,12964486,"TERMINAL",0,0,"4\t",,terminal_output +14712,12964971,"TERMINAL",0,0,"499",,terminal_output +14713,12965586,"TERMINAL",0,0,"5\t",,terminal_output +14714,12966018,"TERMINAL",0,0,"52020",,terminal_output +14715,12966712,"TERMINAL",0,0,"6\t",,terminal_output +14716,12967013,"TERMINAL",0,0,"611",,terminal_output +14717,12967731,"TERMINAL",0,0,"7\t",,terminal_output +14718,12968033,"TERMINAL",0,0,"722",,terminal_output +14719,12968749,"TERMINAL",0,0,"8\t",,terminal_output +14720,12969175,"TERMINAL",0,0,"833",,terminal_output +14721,12969773,"TERMINAL",0,0,"9\t",,terminal_output +14722,12970195,"TERMINAL",0,0,"944",,terminal_output +14723,12972238,"TERMINAL",0,0,"2066",,terminal_output +14724,12972239,"TERMINAL",0,0,"202",,terminal_output +14725,12973389,"TERMINAL",0,0,"277",,terminal_output +14726,12973389,"TERMINAL",0,0,"219",,terminal_output +14727,12974351,"TERMINAL",0,0,"399",,terminal_output +14728,12974352,"TERMINAL",0,0,"3\t",,terminal_output +14729,12975394,"TERMINAL",0,0,"5\t",,terminal_output +14730,12975398,"TERMINAL",0,0,"53030",,terminal_output +14731,12976423,"TERMINAL",0,0,"6\t",,terminal_output +14732,12976468,"TERMINAL",0,0,"611",,terminal_output +14733,12977555,"TERMINAL",0,0,"7\t",,terminal_output +14734,12977556,"TERMINAL",0,0,"722",,terminal_output +14735,12978538,"TERMINAL",0,0,"8\t",,terminal_output +14736,12978555,"TERMINAL",0,0,"833",,terminal_output +14737,12979510,"TERMINAL",0,0,"9\t",,terminal_output +14738,12979535,"TERMINAL",0,0,"944",,terminal_output +14739,12980738,"TERMINAL",0,0,"30\t",,terminal_output +14740,12980738,"TERMINAL",0,0,"3055",,terminal_output +14741,12981562,"TERMINAL",0,0,"1\t",,terminal_output +14742,12981638,"TERMINAL",0,0,"166",,terminal_output +14743,12982615,"TERMINAL",0,0,"2\t",,terminal_output +14744,12982690,"TERMINAL",0,0,"277",,terminal_output +14745,12983649,"TERMINAL",0,0,"3\t",,terminal_output +14746,12983755,"TERMINAL",0,0,"388",,terminal_output +14747,12984769,"TERMINAL",0,0,"4\t",,terminal_output +14748,12984769,"TERMINAL",0,0,"499",,terminal_output +14749,12985732,"TERMINAL",0,0,"5\t",,terminal_output +14750,12985870,"TERMINAL",0,0,"54040",,terminal_output +14751,12986771,"TERMINAL",0,0,"6\t",,terminal_output +14752,12986891,"TERMINAL",0,0,"611",,terminal_output +14753,12987795,"TERMINAL",0,0,"7\t",,terminal_output +14754,12987907,"TERMINAL",0,0,"722",,terminal_output +14755,12988838,"TERMINAL",0,0,"8\t",,terminal_output +14756,12988953,"TERMINAL",0,0,"833",,terminal_output +14757,12989969,"TERMINAL",0,0,"9\t",,terminal_output +14758,12989990,"TERMINAL",0,0,"944",,terminal_output +14759,12990890,"TERMINAL",0,0,"40\t",,terminal_output +14760,12991032,"TERMINAL",0,0,"4055",,terminal_output +14761,12991919,"TERMINAL",0,0,"1\t",,terminal_output +14762,12992118,"TERMINAL",0,0,"166",,terminal_output +14763,12992959,"TERMINAL",0,0,"2\t",,terminal_output +14764,12993166,"TERMINAL",0,0,"277",,terminal_output +14765,12993987,"TERMINAL",0,0,"3\t",,terminal_output +14766,12994167,"TERMINAL",0,0,"388",,terminal_output +14767,12995021,"TERMINAL",0,0,"4\t",,terminal_output +14768,12995216,"TERMINAL",0,0,"499",,terminal_output +14769,12996059,"TERMINAL",0,0,"5\t",,terminal_output +14770,12996251,"TERMINAL",0,0,"55050",,terminal_output +14771,12997219,"TERMINAL",0,0,"6\t",,terminal_output +14772,12997318,"TERMINAL",0,0,"622",,terminal_output +14773,12998158,"TERMINAL",0,0,"720",,terminal_output +14774,12998346,"TERMINAL",0,0,"833",,terminal_output +14775,12999380,"TERMINAL",0,0,"8\t",,terminal_output +14776,12999389,"TERMINAL",0,0,"944",,terminal_output +14777,13000254,"TERMINAL",0,0,"9\t",,terminal_output +14778,13000439,"TERMINAL",0,0,"5055",,terminal_output +14779,13001253,"TERMINAL",0,0,"50\t",,terminal_output +14780,13001517,"TERMINAL",0,0,"166",,terminal_output +14781,13002296,"TERMINAL",0,0,"1\t",,terminal_output +14782,13002530,"TERMINAL",0,0,"277",,terminal_output +14783,13003330,"TERMINAL",0,0,"3\t",,terminal_output +14784,13003641,"TERMINAL",0,0,"388",,terminal_output +14785,13004396,"TERMINAL",0,0,"4\t",,terminal_output +14786,13004693,"TERMINAL",0,0,"499",,terminal_output +14787,13005518,"TERMINAL",0,0,"5\t",,terminal_output +14788,13005662,"TERMINAL",0,0,"57:007:00",,terminal_output +14789,13006539,"TERMINAL",0,0,"6\t",,terminal_output +14790,13006700,"TERMINAL",0,0,"611",,terminal_output +14791,13007491,"TERMINAL",0,0,"7\t",,terminal_output +14792,13007763,"TERMINAL",0,0,"722",,terminal_output +14793,13008555,"TERMINAL",0,0,"8\t",,terminal_output +14794,13008895,"TERMINAL",0,0,"833",,terminal_output +14795,13009607,"TERMINAL",0,0,"9\t",,terminal_output +14796,13009913,"TERMINAL",0,0,"944",,terminal_output +14797,13010648,"TERMINAL",0,0,"3:00\t",,terminal_output +14798,13010939,"TERMINAL",0,0,"3:0055",,terminal_output +14799,13011764,"TERMINAL",0,0,"1\t",,terminal_output +14800,13011955,"TERMINAL",0,0,"166",,terminal_output +14801,13012685,"TERMINAL",0,0,"2\t",,terminal_output +14802,13013088,"TERMINAL",0,0,"277",,terminal_output +14803,13013805,"TERMINAL",0,0,"3\t",,terminal_output +14804,13014049,"TERMINAL",0,0,"388",,terminal_output +14805,13014837,"TERMINAL",0,0,"4\t",,terminal_output +14806,13015191,"TERMINAL",0,0,"499",,terminal_output +14807,13015862,"TERMINAL",0,0,"5\t",,terminal_output +14808,13016221,"TERMINAL",0,0,"51010",,terminal_output +14809,13016844,"TERMINAL",0,0,"6\t",,terminal_output +14810,13017204,"TERMINAL",0,0,"611",,terminal_output +14811,13017881,"TERMINAL",0,0,"7\t",,terminal_output +14812,13018228,"TERMINAL",0,0,"722",,terminal_output +14813,13019029,"TERMINAL",0,0,"8\t",,terminal_output +14814,13019271,"TERMINAL",0,0,"844",,terminal_output +14815,13019961,"TERMINAL",0,0,"9\t",,terminal_output +14816,13020325,"TERMINAL",0,0,"1055",,terminal_output +14817,13021075,"TERMINAL",0,0,"10\t",,terminal_output +14818,13021358,"TERMINAL",0,0,"166",,terminal_output +14819,13022046,"TERMINAL",0,0,"1\t",,terminal_output +14820,13022509,"TERMINAL",0,0,"277",,terminal_output +14821,13023163,"TERMINAL",0,0,"2\t",,terminal_output +14822,13023450,"TERMINAL",0,0,"388",,terminal_output +14823,13024113,"TERMINAL",0,0,"3\t",,terminal_output +14824,13024502,"TERMINAL",0,0,"499",,terminal_output +14825,13025275,"TERMINAL",0,0,"4\t",,terminal_output +14826,13025581,"TERMINAL",0,0,"52020",,terminal_output +14827,13026188,"TERMINAL",0,0,"5\t",,terminal_output +14828,13026601,"TERMINAL",0,0,"611",,terminal_output +14829,13027231,"TERMINAL",0,0,"6\t",,terminal_output +14830,13027629,"TERMINAL",0,0,"722",,terminal_output +14831,13028268,"TERMINAL",0,0,"7\t",,terminal_output +14832,13028761,"TERMINAL",0,0,"833",,terminal_output +14833,13029305,"TERMINAL",0,0,"9\t",,terminal_output +14834,13029785,"TERMINAL",0,0,"944",,terminal_output +14835,13030344,"TERMINAL",0,0,"20\t",,terminal_output +14836,13030806,"TERMINAL",0,0,"2055",,terminal_output +14837,13031423,"TERMINAL",0,0,"1\t",,terminal_output +14838,13031796,"TERMINAL",0,0,"166",,terminal_output +14839,13032441,"TERMINAL",0,0,"2\t",,terminal_output +14840,13032847,"TERMINAL",0,0,"277",,terminal_output +14841,13033450,"TERMINAL",0,0,"3\t",,terminal_output +14842,13033888,"TERMINAL",0,0,"388",,terminal_output +14843,13034486,"TERMINAL",0,0,"4\t",,terminal_output +14844,13035011,"TERMINAL",0,0,"499",,terminal_output +14845,13035522,"TERMINAL",0,0,"5\t",,terminal_output +14846,13036030,"TERMINAL",0,0,"53030",,terminal_output +14847,13036642,"TERMINAL",0,0,"6\t",,terminal_output +14848,13037050,"TERMINAL",0,0,"611",,terminal_output +14849,13037678,"TERMINAL",0,0,"7\t",,terminal_output +14850,13038074,"TERMINAL",0,0,"722",,terminal_output +14851,13038680,"TERMINAL",0,0,"8\t",,terminal_output +14852,13039205,"TERMINAL",0,0,"833",,terminal_output +14853,13039713,"TERMINAL",0,0,"9\t",,terminal_output +14854,13040192,"TERMINAL",0,0,"944",,terminal_output +14855,13040736,"TERMINAL",0,0,"30\t",,terminal_output +14856,13041250,"TERMINAL",0,0,"3055",,terminal_output +14857,13041748,"TERMINAL",0,0,"1\t",,terminal_output +14858,13042255,"TERMINAL",0,0,"166",,terminal_output +14859,13042835,"TERMINAL",0,0,"2\t",,terminal_output +14860,13043298,"TERMINAL",0,0,"288",,terminal_output +14861,13043886,"TERMINAL",0,0,"3\t",,terminal_output +14862,13044345,"TERMINAL",0,0,"499",,terminal_output +14863,13044944,"TERMINAL",0,0,"4\t",,terminal_output +14864,13045387,"TERMINAL",0,0,"54040",,terminal_output +14865,13045914,"TERMINAL",0,0,"5\t",,terminal_output +14866,13046470,"TERMINAL",0,0,"611",,terminal_output +14867,13046983,"TERMINAL",0,0,"6\t",,terminal_output +14868,13047622,"TERMINAL",0,0,"722",,terminal_output +14869,13048008,"TERMINAL",0,0,"7\t",,terminal_output +14870,13048622,"TERMINAL",0,0,"833",,terminal_output +14871,13049056,"TERMINAL",0,0,"8\t",,terminal_output +14872,13049571,"TERMINAL",0,0,"944",,terminal_output +14873,13050163,"TERMINAL",0,0,"9\t",,terminal_output +14874,13050611,"TERMINAL",0,0,"4055",,terminal_output +14875,13051111,"TERMINAL",0,0,"40\t",,terminal_output +14876,13051658,"TERMINAL",0,0,"166",,terminal_output +14877,13052196,"TERMINAL",0,0,"1\t",,terminal_output +14878,13052705,"TERMINAL",0,0,"277",,terminal_output +14879,13053201,"TERMINAL",0,0,"2\t",,terminal_output +14880,13053748,"TERMINAL",0,0,"388",,terminal_output +14881,13054219,"TERMINAL",0,0,"3\t",,terminal_output +14882,13054881,"TERMINAL",0,0,"499",,terminal_output +14883,13055287,"TERMINAL",0,0,"4\t",,terminal_output +14884,13055839,"TERMINAL",0,0,"55050",,terminal_output +14885,13056283,"TERMINAL",0,0,"5\t",,terminal_output +14886,13056888,"TERMINAL",0,0,"611",,terminal_output +14887,13057332,"TERMINAL",0,0,"7\t",,terminal_output +14888,13057933,"TERMINAL",0,0,"722",,terminal_output +14889,13058376,"TERMINAL",0,0,"8\t",,terminal_output +14890,13059070,"TERMINAL",0,0,"833",,terminal_output +14891,13059387,"TERMINAL",0,0,"9\t",,terminal_output +14892,13060094,"TERMINAL",0,0,"944",,terminal_output +14893,13060500,"TERMINAL",0,0,"50\t",,terminal_output +14894,13061114,"TERMINAL",0,0,"5055",,terminal_output +14895,13061481,"TERMINAL",0,0,"1\t",,terminal_output +14896,13062139,"TERMINAL",0,0,"166",,terminal_output +14897,13062555,"TERMINAL",0,0,"2\t",,terminal_output +14898,13063198,"TERMINAL",0,0,"277",,terminal_output +14899,13063583,"TERMINAL",0,0,"3\t",,terminal_output +14900,13064203,"TERMINAL",0,0,"388",,terminal_output +14901,13064567,"TERMINAL",0,0,"4\t",,terminal_output +14902,13065237,"TERMINAL",0,0,"499",,terminal_output +14903,13065722,"TERMINAL",0,0,"5\t",,terminal_output +14904,13066288,"TERMINAL",0,0,"58:018:01",,terminal_output +14905,13066648,"TERMINAL",0,0,"6\t",,terminal_output +14906,13067333,"TERMINAL",0,0,"722",,terminal_output +14907,13067672,"TERMINAL",0,0,"7\t",,terminal_output +14908,13068380,"TERMINAL",0,0,"833",,terminal_output +14909,13068697,"TERMINAL",0,0,"8\t",,terminal_output +14910,13069416,"TERMINAL",0,0,"944",,terminal_output +14911,13069748,"TERMINAL",0,0,"9\t",,terminal_output +14912,13070530,"TERMINAL",0,0,"4:0055",,terminal_output +14913,13070866,"TERMINAL",0,0,"4:00\t",,terminal_output +14914,13071667,"TERMINAL",0,0,"166",,terminal_output +14915,13071806,"TERMINAL",0,0,"1\t",,terminal_output +14916,13072701,"TERMINAL",0,0,"277",,terminal_output +14917,13072839,"TERMINAL",0,0,"2\t",,terminal_output +14918,13073678,"TERMINAL",0,0,"388",,terminal_output +14919,13073877,"TERMINAL",0,0,"3\t",,terminal_output +14920,13074745,"TERMINAL",0,0,"499",,terminal_output +14921,13074956,"TERMINAL",0,0,"4\t",,terminal_output +14922,13075765,"TERMINAL",0,0,"51010",,terminal_output +14923,13075951,"TERMINAL",0,0,"5\t",,terminal_output +14924,13076756,"TERMINAL",0,0,"611",,terminal_output +14925,13077106,"TERMINAL",0,0,"6\t",,terminal_output +14926,13077798,"TERMINAL",0,0,"722",,terminal_output +14927,13078016,"TERMINAL",0,0,"7\t",,terminal_output +14928,13078938,"TERMINAL",0,0,"833",,terminal_output +14929,13079070,"TERMINAL",0,0,"8\t",,terminal_output +14930,13079888,"TERMINAL",0,0,"944",,terminal_output +14931,13080278,"TERMINAL",0,0,"9\t",,terminal_output +14932,13081007,"TERMINAL",0,0,"1055",,terminal_output +14933,13081132,"TERMINAL",0,0,"10\t",,terminal_output +14934,13082020,"TERMINAL",0,0,"166",,terminal_output +14935,13082211,"TERMINAL",0,0,"1\t",,terminal_output +14936,13083019,"TERMINAL",0,0,"277",,terminal_output +14937,13083220,"TERMINAL",0,0,"2\t",,terminal_output +14938,13084087,"TERMINAL",0,0,"388",,terminal_output +14939,13084237,"TERMINAL",0,0,"3\t",,terminal_output +14940,13085188,"TERMINAL",0,0,"499",,terminal_output +14941,13085290,"TERMINAL",0,0,"4\t",,terminal_output +14942,13086197,"TERMINAL",0,0,"52020",,terminal_output +14943,13086334,"TERMINAL",0,0,"6\t",,terminal_output +14944,13087258,"TERMINAL",0,0,"611",,terminal_output +14945,13087390,"TERMINAL",0,0,"7\t",,terminal_output +14946,13088250,"TERMINAL",0,0,"722",,terminal_output +14947,13088402,"TERMINAL",0,0,"8\t",,terminal_output +14948,13089311,"TERMINAL",0,0,"844",,terminal_output +14949,13089444,"TERMINAL",0,0,"9\t",,terminal_output +14950,13090339,"TERMINAL",0,0,"2055",,terminal_output +14951,13090486,"TERMINAL",0,0,"20\t",,terminal_output +14952,13091384,"TERMINAL",0,0,"166",,terminal_output +14953,13091525,"TERMINAL",0,0,"1\t",,terminal_output +14954,13092421,"TERMINAL",0,0,"277",,terminal_output +14955,13092567,"TERMINAL",0,0,"2\t",,terminal_output +14956,13095137,"TERMINAL",0,0,"399",,terminal_output +14957,13095183,"TERMINAL",0,0,"319",,terminal_output +14958,13096182,"TERMINAL",0,0,"53030",,terminal_output +14959,13096183,"TERMINAL",0,0,"5\t",,terminal_output +14960,13097208,"TERMINAL",0,0,"611",,terminal_output +14961,13097249,"TERMINAL",0,0,"6\t",,terminal_output +14962,13098239,"TERMINAL",0,0,"722",,terminal_output +14963,13098250,"TERMINAL",0,0,"7\t",,terminal_output +14964,13099276,"TERMINAL",0,0,"844",,terminal_output +14965,13099285,"TERMINAL",0,0,"8\t",,terminal_output +14966,13100310,"TERMINAL",0,0,"3055",,terminal_output +14967,13100324,"TERMINAL",0,0,"30\t",,terminal_output +14968,13101347,"TERMINAL",0,0,"166",,terminal_output +14969,13101359,"TERMINAL",0,0,"1\t",,terminal_output +14970,13102487,"TERMINAL",0,0,"277",,terminal_output +14971,13102488,"TERMINAL",0,0,"2\t",,terminal_output +14972,13103431,"TERMINAL",0,0,"3\t",,terminal_output +14973,13103433,"TERMINAL",0,0,"388",,terminal_output +14974,13104481,"TERMINAL",0,0,"4\t",,terminal_output +14975,13104481,"TERMINAL",0,0,"499",,terminal_output +14976,13105557,"TERMINAL",0,0,"5\t",,terminal_output +14977,13105558,"TERMINAL",0,0,"54040",,terminal_output +14978,13106597,"TERMINAL",0,0,"6\t",,terminal_output +14979,13106597,"TERMINAL",0,0,"611",,terminal_output +14980,13107609,"TERMINAL",0,0,"7\t",,terminal_output +14981,13107609,"TERMINAL",0,0,"722",,terminal_output +14982,13108731,"TERMINAL",0,0,"8\t",,terminal_output +14983,13108731,"TERMINAL",0,0,"833",,terminal_output +14984,13109755,"TERMINAL",0,0,"9\t",,terminal_output +14985,13109755,"TERMINAL",0,0,"944",,terminal_output +14986,13110679,"TERMINAL",0,0,"40\t",,terminal_output +14987,13110706,"TERMINAL",0,0,"4055",,terminal_output +14988,13111743,"TERMINAL",0,0,"1\t",,terminal_output +14989,13111744,"TERMINAL",0,0,"166",,terminal_output +14990,13112747,"TERMINAL",0,0,"2\t",,terminal_output +14991,13112782,"TERMINAL",0,0,"277",,terminal_output +14992,13113851,"TERMINAL",0,0,"3\t",,terminal_output +14993,13113851,"TERMINAL",0,0,"388",,terminal_output +14994,13114875,"TERMINAL",0,0,"4\t",,terminal_output +14995,13114875,"TERMINAL",0,0,"499",,terminal_output +14996,13115899,"TERMINAL",0,0,"5\t",,terminal_output +14997,13115899,"TERMINAL",0,0,"55050",,terminal_output +14998,13117026,"TERMINAL",0,0,"6\t",,terminal_output +14999,13117034,"TERMINAL",0,0,"611",,terminal_output +15000,13117937,"TERMINAL",0,0,"7\t",,terminal_output +15001,13117955,"TERMINAL",0,0,"722",,terminal_output +15002,13119075,"TERMINAL",0,0,"8\t",,terminal_output +15003,13119078,"TERMINAL",0,0,"833",,terminal_output +15004,13120108,"TERMINAL",0,0,"9\t",,terminal_output +15005,13120108,"TERMINAL",0,0,"944",,terminal_output +15006,13121121,"TERMINAL",0,0,"50\t",,terminal_output +15007,13121122,"TERMINAL",0,0,"5055",,terminal_output +15008,13122180,"TERMINAL",0,0,"1\t",,terminal_output +15009,13122180,"TERMINAL",0,0,"166",,terminal_output +15010,13123184,"TERMINAL",0,0,"2\t",,terminal_output +15011,13123184,"TERMINAL",0,0,"277",,terminal_output +15012,13124164,"TERMINAL",0,0,"3\t",,terminal_output +15013,13124215,"TERMINAL",0,0,"388",,terminal_output +15014,13125232,"TERMINAL",0,0,"4\t",,terminal_output +15015,13125242,"TERMINAL",0,0,"499",,terminal_output +15016,13126297,"TERMINAL",0,0,"5\t",,terminal_output +15017,13126302,"TERMINAL",0,0,"59:019:01",,terminal_output +15018,13127271,"TERMINAL",0,0,"6\t",,terminal_output +15019,13127317,"TERMINAL",0,0,"722",,terminal_output +15020,13128308,"TERMINAL",0,0,"8\t",,terminal_output +15021,13128397,"TERMINAL",0,0,"833",,terminal_output +15022,13129346,"TERMINAL",0,0,"9\t",,terminal_output +15023,13129399,"TERMINAL",0,0,"944",,terminal_output +15024,13130401,"TERMINAL",0,0,"5:00\t",,terminal_output +15025,13130445,"TERMINAL",0,0,"5:0055",,terminal_output +15026,13131423,"TERMINAL",0,0,"1\t",,terminal_output +15027,13131486,"TERMINAL",0,0,"166",,terminal_output +15028,13132525,"TERMINAL",0,0,"2\t",,terminal_output +15029,13132561,"TERMINAL",0,0,"277",,terminal_output +15030,13133614,"TERMINAL",0,0,"3\t",,terminal_output +15031,13133615,"TERMINAL",0,0,"388",,terminal_output +15032,13134546,"TERMINAL",0,0,"4\t",,terminal_output +15033,13134615,"TERMINAL",0,0,"499",,terminal_output +15034,13135594,"TERMINAL",0,0,"5\t",,terminal_output +15035,13135670,"TERMINAL",0,0,"51010",,terminal_output +15036,13136633,"TERMINAL",0,0,"6\t",,terminal_output +15037,13136700,"TERMINAL",0,0,"611",,terminal_output +15038,13137664,"TERMINAL",0,0,"7\t",,terminal_output +15039,13137754,"TERMINAL",0,0,"722",,terminal_output +15040,13138793,"TERMINAL",0,0,"8\t",,terminal_output +15041,13138829,"TERMINAL",0,0,"833",,terminal_output +15042,13139738,"TERMINAL",0,0,"9\t",,terminal_output +15043,13139832,"TERMINAL",0,0,"944",,terminal_output +15044,13140884,"TERMINAL",0,0,"10\t",,terminal_output +15045,13140885,"TERMINAL",0,0,"1055",,terminal_output +15046,13141820,"TERMINAL",0,0,"1\t",,terminal_output +15047,13141915,"TERMINAL",0,0,"166",,terminal_output +15048,13142862,"TERMINAL",0,0,"2\t",,terminal_output +15049,13142952,"TERMINAL",0,0,"277",,terminal_output +15050,13143956,"TERMINAL",0,0,"3\t",,terminal_output +15051,13143995,"TERMINAL",0,0,"388",,terminal_output +15052,13144981,"TERMINAL",0,0,"4\t",,terminal_output +15053,13145036,"TERMINAL",0,0,"499",,terminal_output +15054,13146005,"TERMINAL",0,0,"5\t",,terminal_output +15055,13146078,"TERMINAL",0,0,"52020",,terminal_output +15056,13147183,"TERMINAL",0,0,"6\t",,terminal_output +15057,13147183,"TERMINAL",0,0,"611",,terminal_output +15058,13148052,"TERMINAL",0,0,"7\t",,terminal_output +15059,13148194,"TERMINAL",0,0,"722",,terminal_output +15060,13149222,"TERMINAL",0,0,"8\t",,terminal_output +15061,13149222,"TERMINAL",0,0,"833",,terminal_output +15062,13150208,"TERMINAL",0,0,"9\t",,terminal_output +15063,13150241,"TERMINAL",0,0,"944",,terminal_output +15064,13151348,"TERMINAL",0,0,"20\t",,terminal_output +15065,13151349,"TERMINAL",0,0,"2066",,terminal_output +15066,13152222,"TERMINAL",0,0,"1\t",,terminal_output +15067,13152326,"TERMINAL",0,0,"277",,terminal_output +15068,13153275,"TERMINAL",0,0,"2\t",,terminal_output +15069,13153529,"TERMINAL",0,0,"388",,terminal_output +15070,13154305,"TERMINAL",0,0,"3\t",,terminal_output +15071,13155369,"TERMINAL",0,0,"499",,terminal_output +15072,13155468,"TERMINAL",0,0,"5\t",,terminal_output +15073,13155469,"TERMINAL",0,0,"53030",,terminal_output +15074,13156437,"TERMINAL",0,0,"6\t",,terminal_output +15075,13156560,"TERMINAL",0,0,"611",,terminal_output +15076,13157380,"TERMINAL",0,0,"7\t",,terminal_output +15077,13157523,"TERMINAL",0,0,"722",,terminal_output +15078,13158420,"TERMINAL",0,0,"8\t",,terminal_output +15079,13158560,"TERMINAL",0,0,"833",,terminal_output +15080,13159455,"TERMINAL",0,0,"9\t",,terminal_output +15081,13159604,"TERMINAL",0,0,"944",,terminal_output +15082,13160546,"TERMINAL",0,0,"30\t",,terminal_output +15083,13160633,"TERMINAL",0,0,"3055",,terminal_output +15084,13161569,"TERMINAL",0,0,"1\t",,terminal_output +15085,13161707,"TERMINAL",0,0,"166",,terminal_output +15086,13162594,"TERMINAL",0,0,"2\t",,terminal_output +15087,13162736,"TERMINAL",0,0,"277",,terminal_output +15088,13163597,"TERMINAL",0,0,"3\t",,terminal_output +15089,13163742,"TERMINAL",0,0,"388",,terminal_output +15090,13164641,"TERMINAL",0,0,"4\t",,terminal_output +15091,13164783,"TERMINAL",0,0,"499",,terminal_output +15092,13165668,"TERMINAL",0,0,"5\t",,terminal_output +15093,13165811,"TERMINAL",0,0,"54040",,terminal_output +15094,13166753,"TERMINAL",0,0,"6\t",,terminal_output +15095,13166859,"TERMINAL",0,0,"611",,terminal_output +15096,13167787,"TERMINAL",0,0,"7\t",,terminal_output +15097,13168087,"TERMINAL",0,0,"722",,terminal_output +15098,13168851,"TERMINAL",0,0,"8\t",,terminal_output +15099,13169015,"TERMINAL",0,0,"833",,terminal_output +15100,13169870,"TERMINAL",0,0,"9\t",,terminal_output +15101,13169966,"TERMINAL",0,0,"944",,terminal_output +15102,13170890,"TERMINAL",0,0,"40\t",,terminal_output +15103,13171028,"TERMINAL",0,0,"4055",,terminal_output +15104,13171936,"TERMINAL",0,0,"1\t",,terminal_output +15105,13172073,"TERMINAL",0,0,"166",,terminal_output +15106,13172936,"TERMINAL",0,0,"2\t",,terminal_output +15107,13173078,"TERMINAL",0,0,"277",,terminal_output +15108,13174137,"TERMINAL",0,0,"3\t",,terminal_output +15109,13174146,"TERMINAL",0,0,"388",,terminal_output +15110,13174998,"TERMINAL",0,0,"4\t",,terminal_output +15111,13175162,"TERMINAL",0,0,"499",,terminal_output +15112,13176027,"TERMINAL",0,0,"5\t",,terminal_output +15113,13176185,"TERMINAL",0,0,"55050",,terminal_output +15114,13177182,"TERMINAL",0,0,"6\t",,terminal_output +15115,13177222,"TERMINAL",0,0,"611",,terminal_output +15116,13178216,"TERMINAL",0,0,"7\t",,terminal_output +15117,13178260,"TERMINAL",0,0,"722",,terminal_output +15118,13179138,"TERMINAL",0,0,"8\t",,terminal_output +15119,13179297,"TERMINAL",0,0,"844",,terminal_output +15120,13180233,"TERMINAL",0,0,"9\t",,terminal_output +15121,13180382,"TERMINAL",0,0,"5055",,terminal_output +15122,13181247,"TERMINAL",0,0,"50\t",,terminal_output +15123,13181415,"TERMINAL",0,0,"166",,terminal_output +15124,13182264,"TERMINAL",0,0,"1\t",,terminal_output +15125,13182417,"TERMINAL",0,0,"277",,terminal_output +15126,13183109,"slurm/jobs/mihir/horeka/lam/train_lam_minecraft_8node.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/train_lam_minecraft_8node_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/train_lam_minecraft_8node_%j.log\n#SBATCH --job-name=train_lam_minecraft_8node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $slurm_job_id \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=lam-minecraft-8-node-$slurm_job_id \\n --tags lam minecraft 8-node \\n --entity instant-uv \\n --project jafar \\n --num_latents=100 \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +15127,13183354,"TERMINAL",0,0,"2\t",,terminal_output +15128,13183504,"TERMINAL",0,0,"388",,terminal_output +15129,13184337,"TERMINAL",0,0,"4\t",,terminal_output +15130,13184520,"TERMINAL",0,0,"499",,terminal_output +15131,13185397,"TERMINAL",0,0,"5 9",,terminal_output +15132,13185608,"TERMINAL",0,0,"550:0050:00",,terminal_output +15133,13186481,"TERMINAL",0,0,"6\t",,terminal_output +15134,13186842,"TERMINAL",0,0,"611",,terminal_output +15135,13187477,"TERMINAL",0,0,"7\t",,terminal_output +15136,13187737,"TERMINAL",0,0,"722",,terminal_output +15137,13188485,"TERMINAL",0,0,"8\t",,terminal_output +15138,13188670,"TERMINAL",0,0,"833",,terminal_output +15139,13189456,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/train_lam_minecraft_8node_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/train_lam_minecraft_8node_%j.log\n#SBATCH --job-name=train_lam_minecraft_8node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $slurm_job_id \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=lam-minecraft-8-node-$slurm_job_id \\n --tags lam minecraft 8-node \\n --entity instant-uv \\n --project jafar \\n --num_latents=100 \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +15140,13189588,"TERMINAL",0,0,"9\t",,terminal_output +15141,13189792,"TERMINAL",0,0,"944",,terminal_output +15142,13190565,"TERMINAL",0,0,"6:00\t",,terminal_output +15143,13190746,"TERMINAL",0,0,"6:0055",,terminal_output +15144,13191605,"TERMINAL",0,0,"1\t",,terminal_output +15145,13191965,"TERMINAL",0,0,"166",,terminal_output +15146,13192772,"TERMINAL",0,0,"2\t",,terminal_output +15147,13192858,"TERMINAL",0,0,"277",,terminal_output +15148,13193652,"TERMINAL",0,0,"3\t",,terminal_output +15149,13193863,"TERMINAL",0,0,"388",,terminal_output +15150,13194747,"TERMINAL",0,0,"4\t",,terminal_output +15151,13194893,"TERMINAL",0,0,"499",,terminal_output +15152,13195838,"TERMINAL",0,0,"5\t",,terminal_output +15153,13195967,"TERMINAL",0,0,"51010",,terminal_output +15154,13196801,"TERMINAL",0,0,"6\t",,terminal_output +15155,13197007,"TERMINAL",0,0,"611",,terminal_output +15156,13197806,"TERMINAL",0,0,"7\t",,terminal_output +15157,13198132,"TERMINAL",0,0,"722",,terminal_output +15158,13198830,"TERMINAL",0,0,"8\t",,terminal_output +15159,13199206,"TERMINAL",0,0,"833",,terminal_output +15160,13200141,"TERMINAL",0,0,"9\t",,terminal_output +15161,13200141,"TERMINAL",0,0,"944",,terminal_output +15162,13200895,"TERMINAL",0,0,"10\t",,terminal_output +15163,13201130,"TERMINAL",0,0,"1055",,terminal_output +15164,13202035,"TERMINAL",0,0,"1\t",,terminal_output +15165,13202174,"TERMINAL",0,0,"166",,terminal_output +15166,13202961,"TERMINAL",0,0,"2\t",,terminal_output +15167,13203247,"TERMINAL",0,0,"277",,terminal_output +15168,13203997,"TERMINAL",0,0,"3\t",,terminal_output +15169,13204242,"TERMINAL",0,0,"388",,terminal_output +15170,13205090,"TERMINAL",0,0,"4\t",,terminal_output +15171,13205282,"TERMINAL",0,0,"42020",,terminal_output +15172,13206152,"TERMINAL",0,0,"5\t",,terminal_output +15173,13206318,"TERMINAL",0,0,"611",,terminal_output +15174,13207149,"TERMINAL",0,0,"6\t",,terminal_output +15175,13207369,"TERMINAL",0,0,"722",,terminal_output +15176,13208168,"TERMINAL",0,0,"7\t",,terminal_output +15177,13208402,"TERMINAL",0,0,"833",,terminal_output +15178,13209485,"TERMINAL",0,0,"8\t",,terminal_output +15179,13209516,"TERMINAL",0,0,"944",,terminal_output +15180,13210388,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +15181,13210451,"TERMINAL",0,0,"9\t",,terminal_output +15182,13210553,"TERMINAL",0,0,"2055",,terminal_output +15183,13211243,"TERMINAL",0,0,"20\t",,terminal_output +15184,13211544,"TERMINAL",0,0,"166",,terminal_output +15185,13212307,"TERMINAL",0,0,"1\t",,terminal_output +15186,13212582,"TERMINAL",0,0,"277",,terminal_output +15187,13213354,"TERMINAL",0,0,"3\t",,terminal_output +15188,13213678,"TERMINAL",0,0,"388",,terminal_output +15189,13214350,"TERMINAL",0,0,"4\t",,terminal_output +15190,13214686,"TERMINAL",0,0,"499",,terminal_output +15191,13215349,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=100 \\n --log \\n --name=coinrun-lam-dev-$slurm_job_id \\n --tags lam coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --data_dir $array_records_dir_train\n\n",shellscript,tab +15192,13215465,"TERMINAL",0,0,"5\t",,terminal_output +15193,13215680,"TERMINAL",0,0,"53030",,terminal_output +15194,13217964,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",38,0,"",shellscript,selection_mouse +15195,13217965,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",37,0,"",shellscript,selection_command +15196,13218038,"TERMINAL",0,0,"622",,terminal_output +15197,13218051,"TERMINAL",0,0,"6\t",,terminal_output +15198,13219078,"TERMINAL",0,0,"833",,terminal_output +15199,13219087,"TERMINAL",0,0,"8\t",,terminal_output +15200,13220154,"TERMINAL",0,0,"944",,terminal_output +15201,13220154,"TERMINAL",0,0,"9\t",,terminal_output +15202,13221142,"TERMINAL",0,0,"3055",,terminal_output +15203,13221151,"TERMINAL",0,0,"30\t",,terminal_output +15204,13222182,"TERMINAL",0,0,"166",,terminal_output +15205,13222225,"TERMINAL",0,0,"1\t",,terminal_output +15206,13223317,"TERMINAL",0,0,"277",,terminal_output +15207,13223318,"TERMINAL",0,0,"21",,terminal_output +15208,13224250,"TERMINAL",0,0,"388",,terminal_output +15209,13224282,"TERMINAL",0,0,"3\t",,terminal_output +15210,13225302,"TERMINAL",0,0,"44040",,terminal_output +15211,13225343,"TERMINAL",0,0,"5\t",,terminal_output +15212,13226336,"TERMINAL",0,0,"611",,terminal_output +15213,13226346,"TERMINAL",0,0,"6\t",,terminal_output +15214,13227386,"TERMINAL",0,0,"722",,terminal_output +15215,13227387,"TERMINAL",0,0,"7\t",,terminal_output +15216,13228411,"TERMINAL",0,0,"833",,terminal_output +15217,13228413,"TERMINAL",0,0,"8\t",,terminal_output +15218,13229470,"TERMINAL",0,0,"9\t",,terminal_output +15219,13229471,"TERMINAL",0,0,"944",,terminal_output +15220,13230588,"TERMINAL",0,0,"40\t",,terminal_output +15221,13230588,"TERMINAL",0,0,"4055",,terminal_output +15222,13231612,"TERMINAL",0,0,"1\t",,terminal_output +15223,13231612,"TERMINAL",0,0,"166",,terminal_output +15224,13232636,"TERMINAL",0,0,"2\t",,terminal_output +15225,13232637,"TERMINAL",0,0,"277",,terminal_output +15226,13233609,"TERMINAL",0,0,"3\t",,terminal_output +15227,13233618,"TERMINAL",0,0,"388",,terminal_output +15228,13234703,"TERMINAL",0,0,"4\t",,terminal_output +15229,13234704,"TERMINAL",0,0,"499",,terminal_output +15230,13235685,"TERMINAL",0,0,"5\t",,terminal_output +15231,13235714,"TERMINAL",0,0,"55050",,terminal_output +15232,13236721,"TERMINAL",0,0,"6\t",,terminal_output +15233,13236724,"TERMINAL",0,0,"611",,terminal_output +15234,13237752,"TERMINAL",0,0,"7\t",,terminal_output +15235,13237804,"TERMINAL",0,0,"722",,terminal_output +15236,13238797,"TERMINAL",0,0,"8\t",,terminal_output +15237,13238817,"TERMINAL",0,0,"833",,terminal_output +15238,13239915,"TERMINAL",0,0,"9\t",,terminal_output +15239,13239921,"TERMINAL",0,0,"944",,terminal_output +15240,13240846,"TERMINAL",0,0,"50\t",,terminal_output +15241,13240873,"TERMINAL",0,0,"5055",,terminal_output +15242,13241878,"TERMINAL",0,0,"1\t",,terminal_output +15243,13241906,"TERMINAL",0,0,"166",,terminal_output +15244,13242917,"TERMINAL",0,0,"2\t",,terminal_output +15245,13242940,"TERMINAL",0,0,"277",,terminal_output +15246,13243949,"TERMINAL",0,0,"3\t",,terminal_output +15247,13243983,"TERMINAL",0,0,"388",,terminal_output +15248,13244980,"TERMINAL",0,0,"4\t",,terminal_output +15249,13245011,"TERMINAL",0,0,"499",,terminal_output +15250,13246055,"TERMINAL",0,0,"5\t",,terminal_output +15251,13246056,"TERMINAL",0,0,"51:001:00",,terminal_output +15252,13247195,"TERMINAL",0,0,"6\t",,terminal_output +15253,13247200,"TERMINAL",0,0,"611",,terminal_output +15254,13248085,"TERMINAL",0,0,"7\t",,terminal_output +15255,13248112,"TERMINAL",0,0,"722",,terminal_output +15256,13249138,"TERMINAL",0,0,"8\t",,terminal_output +15257,13249171,"TERMINAL",0,0,"833",,terminal_output +15258,13250185,"TERMINAL",0,0,"9\t",,terminal_output +15259,13250235,"TERMINAL",0,0,"944",,terminal_output +15260,13251278,"TERMINAL",0,0,"7:00\t",,terminal_output +15261,13251278,"TERMINAL",0,0,"7:0055",,terminal_output +15262,13252297,"TERMINAL",0,0,"1\t",,terminal_output +15263,13252298,"TERMINAL",0,0,"177",,terminal_output +15264,13253327,"TERMINAL",0,0,"2\t",,terminal_output +15265,13253370,"TERMINAL",0,0,"388",,terminal_output +15266,13254315,"TERMINAL",0,0,"4\t",,terminal_output +15267,13254400,"TERMINAL",0,0,"499",,terminal_output +15268,13255362,"TERMINAL",0,0,"5\t",,terminal_output +15269,13255404,"TERMINAL",0,0,"51010",,terminal_output +15270,13256393,"TERMINAL",0,0,"6\t",,terminal_output +15271,13256488,"TERMINAL",0,0,"611",,terminal_output +15272,13257429,"TERMINAL",0,0,"7\t",,terminal_output +15273,13257527,"TERMINAL",0,0,"722",,terminal_output +15274,13258462,"TERMINAL",0,0,"8\t",,terminal_output +15275,13258562,"TERMINAL",0,0,"833",,terminal_output +15276,13259497,"TERMINAL",0,0,"9\t",,terminal_output +15277,13259595,"TERMINAL",0,0,"944",,terminal_output +15278,13260592,"TERMINAL",0,0,"10\t",,terminal_output +15279,13260605,"TERMINAL",0,0,"1055",,terminal_output +15280,13261615,"TERMINAL",0,0,"1\t",,terminal_output +15281,13261647,"TERMINAL",0,0,"166",,terminal_output +15282,13262631,"TERMINAL",0,0,"2\t",,terminal_output +15283,13262723,"TERMINAL",0,0,"277",,terminal_output +15284,13263765,"TERMINAL",0,0,"3\t",,terminal_output +15285,13263766,"TERMINAL",0,0,"388",,terminal_output +15286,13264709,"TERMINAL",0,0,"4\t",,terminal_output +15287,13264780,"TERMINAL",0,0,"499",,terminal_output +15288,13265726,"TERMINAL",0,0,"5\t",,terminal_output +15289,13265817,"TERMINAL",0,0,"52020",,terminal_output +15290,13266784,"TERMINAL",0,0,"6\t",,terminal_output +15291,13266880,"TERMINAL",0,0,"611",,terminal_output +15292,13267810,"TERMINAL",0,0,"7\t",,terminal_output +15293,13267905,"TERMINAL",0,0,"722",,terminal_output +15294,13268927,"TERMINAL",0,0,"8\t",,terminal_output +15295,13268968,"TERMINAL",0,0,"833",,terminal_output +15296,13269910,"TERMINAL",0,0,"9\t",,terminal_output +15297,13269969,"TERMINAL",0,0,"944",,terminal_output +15298,13270939,"TERMINAL",0,0,"20\t",,terminal_output +15299,13271036,"TERMINAL",0,0,"2055",,terminal_output +15300,13271948,"TERMINAL",0,0,"1\t",,terminal_output +15301,13272036,"TERMINAL",0,0,"166",,terminal_output +15302,13272978,"TERMINAL",0,0,"2\t",,terminal_output +15303,13273078,"TERMINAL",0,0,"277",,terminal_output +15304,13274006,"TERMINAL",0,0,"3\t",,terminal_output +15305,13274146,"TERMINAL",0,0,"388",,terminal_output +15306,13275138,"TERMINAL",0,0,"4\t",,terminal_output +15307,13275139,"TERMINAL",0,0,"499",,terminal_output +15308,13276100,"TERMINAL",0,0,"5\t",,terminal_output +15309,13276251,"TERMINAL",0,0,"53030",,terminal_output +15310,13277118,"TERMINAL",0,0,"6\t",,terminal_output +15311,13277259,"TERMINAL",0,0,"611",,terminal_output +15312,13279283,"TERMINAL",0,0,"733",,terminal_output +15313,13279283,"TERMINAL",0,0,"720",,terminal_output +15314,13280252,"TERMINAL",0,0,"944",,terminal_output +15315,13280258,"TERMINAL",0,0,"9\t",,terminal_output +15316,13281289,"TERMINAL",0,0,"30\t",,terminal_output +15317,13281304,"TERMINAL",0,0,"3066",,terminal_output +15318,13282324,"TERMINAL",0,0,"2\t",,terminal_output +15319,13282332,"TERMINAL",0,0,"277",,terminal_output +15320,13283360,"TERMINAL",0,0,"3\t",,terminal_output +15321,13283400,"TERMINAL",0,0,"388",,terminal_output +15322,13284398,"TERMINAL",0,0,"4\t",,terminal_output +15323,13284413,"TERMINAL",0,0,"499",,terminal_output +15324,13285438,"TERMINAL",0,0,"5\t",,terminal_output +15325,13285461,"TERMINAL",0,0,"54040",,terminal_output +15326,13286479,"TERMINAL",0,0,"6\t",,terminal_output +15327,13286512,"TERMINAL",0,0,"611",,terminal_output +15328,13287514,"TERMINAL",0,0,"7\t",,terminal_output +15329,13287543,"TERMINAL",0,0,"722",,terminal_output +15330,13288663,"TERMINAL",0,0,"8\t",,terminal_output +15331,13288668,"TERMINAL",0,0,"833",,terminal_output +15332,13289620,"TERMINAL",0,0,"9\t",,terminal_output +15333,13289647,"TERMINAL",0,0,"944",,terminal_output +15334,13290698,"TERMINAL",0,0,"40\t",,terminal_output +15335,13290699,"TERMINAL",0,0,"4055",,terminal_output +15336,13291733,"TERMINAL",0,0,"1\t",,terminal_output +15337,13291734,"TERMINAL",0,0,"166",,terminal_output +15338,13292752,"TERMINAL",0,0,"2\t",,terminal_output +15339,13292767,"TERMINAL",0,0,"277",,terminal_output +15340,13293776,"TERMINAL",0,0,"3\t",,terminal_output +15341,13293802,"TERMINAL",0,0,"388",,terminal_output +15342,13294836,"TERMINAL",0,0,"4\t",,terminal_output +15343,13294847,"TERMINAL",0,0,"499",,terminal_output +15344,13295824,"TERMINAL",0,0,"5\t",,terminal_output +15345,13295884,"TERMINAL",0,0,"55050",,terminal_output +15346,13296943,"TERMINAL",0,0,"6\t",,terminal_output +15347,13296951,"TERMINAL",0,0,"611",,terminal_output +15348,13297909,"TERMINAL",0,0,"7\t",,terminal_output +15349,13298018,"TERMINAL",0,0,"722",,terminal_output +15350,13299026,"TERMINAL",0,0,"8\t",,terminal_output +15351,13299046,"TERMINAL",0,0,"833",,terminal_output +15352,13299467,"slurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +15353,13300306,"TERMINAL",0,0,"9\t",,terminal_output +15354,13300534,"TERMINAL",0,0,"944",,terminal_output +15355,13301058,"TERMINAL",0,0,"50\t",,terminal_output +15356,13301185,"TERMINAL",0,0,"5055",,terminal_output +15357,13302095,"TERMINAL",0,0,"1\t",,terminal_output +15358,13302201,"TERMINAL",0,0,"166",,terminal_output +15359,13303130,"TERMINAL",0,0,"2\t",,terminal_output +15360,13303266,"TERMINAL",0,0,"277",,terminal_output +15361,13304404,"TERMINAL",0,0,"399",,terminal_output +15362,13304404,"TERMINAL",0,0,"310",,terminal_output +15363,13304781,"TERMINAL",0,0,"logs",,terminal_command +15364,13304867,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir",,terminal_output +15365,13305401,"TERMINAL",0,0,"52:002:00",,terminal_output +15366,13305402,"TERMINAL",0,0,"5\t",,terminal_output +15367,13306563,"TERMINAL",0,0,"6\t",,terminal_output +15368,13306564,"TERMINAL",0,0,"611",,terminal_output +15369,13307645,"TERMINAL",0,0,"7\t",,terminal_output +15370,13307645,"TERMINAL",0,0,"722",,terminal_output +15371,13308610,"TERMINAL",0,0,"8\t",,terminal_output +15372,13308617,"TERMINAL",0,0,"833",,terminal_output +15373,13308706,"TERMINAL",0,0,"cd ..",,terminal_command +15374,13308811,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs",,terminal_output +15375,13309560,"TERMINAL",0,0,"9\t",,terminal_output +15376,13309568,"TERMINAL",0,0,"944",,terminal_output +15377,13310605,"TERMINAL",0,0,"8:00\t",,terminal_output +15378,13310611,"TERMINAL",0,0,"8:0055",,terminal_output +15379,13311064,"TERMINAL",0,0,"cd logs_alfred/",,terminal_command +15380,13311221,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_alfred",,terminal_output +15381,13311703,"TERMINAL",0,0,"1\t",,terminal_output +15382,13311727,"TERMINAL",0,0,"166",,terminal_output +15383,13311807,"TERMINAL",0,0,"ls",,terminal_command +15384,13311920,"TERMINAL",0,0,"]633;Catari dev dynamics dynamics_gt_actions_ablation dynamics-masked_lim hf input_pipeline logs_training_dyn logs_training_lam logs_training_tokenizer sampling tokenizer\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_alfred",,terminal_output +15385,13312673,"TERMINAL",0,0,"2\t",,terminal_output +15386,13312694,"TERMINAL",0,0,"277",,terminal_output +15387,13313746,"TERMINAL",0,0,"3\t",,terminal_output +15388,13313752,"TERMINAL",0,0,"388",,terminal_output +15389,13314844,"TERMINAL",0,0,"4\t",,terminal_output +15390,13314845,"TERMINAL",0,0,"499",,terminal_output +15391,13315900,"TERMINAL",0,0,"5\t",,terminal_output +15392,13315901,"TERMINAL",0,0,"51010",,terminal_output +15393,13316847,"TERMINAL",0,0,"6\t",,terminal_output +15394,13316876,"TERMINAL",0,0,"611",,terminal_output +15395,13317865,"TERMINAL",0,0,"7\t",,terminal_output +15396,13317895,"TERMINAL",0,0,"722",,terminal_output +15397,13318920,"TERMINAL",0,0,"8\t",,terminal_output +15398,13318945,"TERMINAL",0,0,"833",,terminal_output +15399,13319932,"TERMINAL",0,0,"9\t",,terminal_output +15400,13319976,"TERMINAL",0,0,"944",,terminal_output +15401,13321007,"TERMINAL",0,0,"10\t",,terminal_output +15402,13321008,"TERMINAL",0,0,"1055",,terminal_output +15403,13322004,"TERMINAL",0,0,"cd logs_training_lam/",,terminal_command +15404,13322076,"TERMINAL",0,0,"1\t",,terminal_output +15405,13322116,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_alfred/logs_training_lam",,terminal_output +15406,13322286,"TERMINAL",0,0,"166",,terminal_output +15407,13322578,"TERMINAL",0,0,"ls",,terminal_command +15408,13322670,"TERMINAL",0,0,"]633;Ctest_requeueing44gg_3333210.log train_lam_oai_dev_fast_partition_3327667.log train_lam_overfit_run_oai_ds_samples_49152_3313603.log\r\ntrain_lam_coinrun_baseline_3314560.log train_lam_oai_dev_fast_partition_3327700.log train_lam_overfit_run_oai_ds_samples_6144_3313598.log\r\ntrain_lam_dev_grain_3328548.log train_lam_overfit_run_oai_ds_samples_12288_3313601.log train_lam_overfit_run_oai_ds_samples_96_3312787.log\r\ntrain_lam_dev_grain_3328550.log train_lam_overfit_run_oai_ds_samples_12_3316680.log train_lam_requeueing_3332606.log\r\ntrain_lam_dev_grain_3328555.log train_lam_overfit_run_oai_ds_samples_12_checkpoint_test_baseline_3316684.log train_lam_requeueing_3332633.log\r\ntrain_lam_dev_grain_3328556.log train_lam_overfit_run_oai_ds_samples_1536_3312808.log train_lam_requeueing_3332736.log\r\ntrain_lam_dev_grain_3328557.log train_lam_overfit_run_oai_ds_samples_24576_3313602.log train_lam_requeueing_3332784.log\r\ntrain_lam_init_params_3318311.log train_lam_overfit_run_oai_ds_samples_3072_3313597.log train_lam_requeueing_3332989.log\r\ntrain_lam_init_params_3318320.log train_lam_overfit_run_oai_ds_samples_384_3313264.log train_lam_rqueue_dev_3333326.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_alfred/logs_training_lam",,terminal_output +15409,13323163,"TERMINAL",0,0,"2\t",,terminal_output +15410,13323164,"TERMINAL",0,0,"277",,terminal_output +15411,13324108,"TERMINAL",0,0,"3\t",,terminal_output +15412,13324125,"TERMINAL",0,0,"388",,terminal_output +15413,13325150,"TERMINAL",0,0,"4\t",,terminal_output +15414,13325190,"TERMINAL",0,0,"499",,terminal_output +15415,13326155,"TERMINAL",0,0,"5\t",,terminal_output +15416,13326199,"TERMINAL",0,0,"52020",,terminal_output +15417,13327187,"TERMINAL",0,0,"6\t",,terminal_output +15418,13327279,"TERMINAL",0,0,"611",,terminal_output +15419,13328278,"TERMINAL",0,0,"7\t",,terminal_output +15420,13328278,"TERMINAL",0,0,"733",,terminal_output +15421,13328798,"TERMINAL",0,0,"ls -l | grep 28747",,terminal_command +15422,13328913,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_alfred/logs_training_lam",,terminal_output +15423,13329311,"TERMINAL",0,0,"8\t",,terminal_output +15424,13329312,"TERMINAL",0,0,"944",,terminal_output +15425,13330292,"TERMINAL",0,0,"9\t",,terminal_output +15426,13330349,"TERMINAL",0,0,"2055",,terminal_output +15427,13331331,"TERMINAL",0,0,"21\t",,terminal_output +15428,13331430,"TERMINAL",0,0,"166",,terminal_output +15429,13332374,"TERMINAL",0,0,"2\t",,terminal_output +15430,13332428,"TERMINAL",0,0,"277",,terminal_output +15431,13333398,"TERMINAL",0,0,"3\t",,terminal_output +15432,13333465,"TERMINAL",0,0,"388",,terminal_output +15433,13334436,"TERMINAL",0,0,"4\t",,terminal_output +15434,13334536,"TERMINAL",0,0,"499",,terminal_output +15435,13335549,"TERMINAL",0,0,"5\t",,terminal_output +15436,13335549,"TERMINAL",0,0,"53030",,terminal_output +15437,13336513,"TERMINAL",0,0,"6\t",,terminal_output +15438,13336612,"TERMINAL",0,0,"611",,terminal_output +15439,13337546,"TERMINAL",0,0,"7\t",,terminal_output +15440,13337641,"TERMINAL",0,0,"722",,terminal_output +15441,13338622,"TERMINAL",0,0,"8\t",,terminal_output +15442,13338646,"TERMINAL",0,0,"833",,terminal_output +15443,13340976,"TERMINAL",0,0,"955",,terminal_output +15444,13340977,"TERMINAL",0,0,"90",,terminal_output +15445,13342718,"TERMINAL",0,0,"3177",,terminal_output +15446,13342718,"TERMINAL",0,0,"31\t",,terminal_output +15447,13343741,"TERMINAL",0,0,"388",,terminal_output +15448,13343747,"TERMINAL",0,0,"3\t",,terminal_output +15449,13344764,"TERMINAL",0,0,"499",,terminal_output +15450,13344765,"TERMINAL",0,0,"4\t",,terminal_output +15451,13345762,"TERMINAL",0,0,"54040",,terminal_output +15452,13345788,"TERMINAL",0,0,"5\t",,terminal_output +15453,13346789,"TERMINAL",0,0,"6\t",,terminal_output +15454,13346790,"TERMINAL",0,0,"611",,terminal_output +15455,13347828,"TERMINAL",0,0,"722",,terminal_output +15456,13347829,"TERMINAL",0,0,"7\t",,terminal_output +15457,13348868,"TERMINAL",0,0,"8\t",,terminal_output +15458,13348868,"TERMINAL",0,0,"833",,terminal_output +15459,13349898,"TERMINAL",0,0,"9\t",,terminal_output +15460,13349899,"TERMINAL",0,0,"944",,terminal_output +15461,13350939,"TERMINAL",0,0,"4055",,terminal_output +15462,13350939,"TERMINAL",0,0,"40\t",,terminal_output +15463,13351976,"TERMINAL",0,0,"166",,terminal_output +15464,13351976,"TERMINAL",0,0,"1\t",,terminal_output +15465,13353000,"TERMINAL",0,0,"2\t",,terminal_output +15466,13353015,"TERMINAL",0,0,"277",,terminal_output +15467,13354040,"TERMINAL",0,0,"3\t",,terminal_output +15468,13354052,"TERMINAL",0,0,"388",,terminal_output +15469,13355078,"TERMINAL",0,0,"4\t",,terminal_output +15470,13355080,"TERMINAL",0,0,"499",,terminal_output +15471,13356118,"TERMINAL",0,0,"5\t",,terminal_output +15472,13356119,"TERMINAL",0,0,"55050",,terminal_output +15473,13357155,"TERMINAL",0,0,"6\t",,terminal_output +15474,13357163,"TERMINAL",0,0,"611",,terminal_output +15475,13358185,"TERMINAL",0,0,"722",,terminal_output +15476,13358195,"TERMINAL",0,0,"7\t",,terminal_output +15477,13359306,"TERMINAL",0,0,"833",,terminal_output +15478,13359307,"TERMINAL",0,0,"8\t",,terminal_output +15479,13360271,"TERMINAL",0,0,"9\t",,terminal_output +15480,13360272,"TERMINAL",0,0,"955",,terminal_output +15481,13361371,"TERMINAL",0,0,"51\t",,terminal_output +15482,13361372,"TERMINAL",0,0,"5166",,terminal_output +15483,13362366,"TERMINAL",0,0,"2\t",,terminal_output +15484,13362373,"TERMINAL",0,0,"277",,terminal_output +15485,13363388,"TERMINAL",0,0,"3\t",,terminal_output +15486,13363424,"TERMINAL",0,0,"388",,terminal_output +15487,13364426,"TERMINAL",0,0,"4\t",,terminal_output +15488,13364439,"TERMINAL",0,0,"499",,terminal_output +15489,13365457,"TERMINAL",0,0,"5\t",,terminal_output +15490,13365486,"TERMINAL",0,0,"53:003:00",,terminal_output +15491,13366501,"TERMINAL",0,0,"6\t",,terminal_output +15492,13366534,"TERMINAL",0,0,"611",,terminal_output +15493,13367600,"TERMINAL",0,0,"7\t",,terminal_output +15494,13367600,"TERMINAL",0,0,"722",,terminal_output +15495,13368631,"TERMINAL",0,0,"8\t",,terminal_output +15496,13368636,"TERMINAL",0,0,"833",,terminal_output +15497,13369659,"TERMINAL",0,0,"9\t",,terminal_output +15498,13369659,"TERMINAL",0,0,"944",,terminal_output +15499,13370662,"TERMINAL",0,0,"9:00\t",,terminal_output +15500,13370704,"TERMINAL",0,0,"9:0055",,terminal_output +15501,13371749,"TERMINAL",0,0,"1\t",,terminal_output +15502,13371755,"TERMINAL",0,0,"166",,terminal_output +15503,13372735,"TERMINAL",0,0,"2\t",,terminal_output +15504,13372765,"TERMINAL",0,0,"277",,terminal_output +15505,13373963,"TERMINAL",0,0,"3\t",,terminal_output +15506,13373974,"TERMINAL",0,0,"388",,terminal_output +15507,13374879,"TERMINAL",0,0,"4\t",,terminal_output +15508,13374880,"TERMINAL",0,0,"499",,terminal_output +15509,13375852,"TERMINAL",0,0,"5\t",,terminal_output +15510,13375894,"TERMINAL",0,0,"51010",,terminal_output +15511,13376929,"TERMINAL",0,0,"6\t",,terminal_output +15512,13376931,"TERMINAL",0,0,"611",,terminal_output +15513,13377933,"TERMINAL",0,0,"7\t",,terminal_output +15514,13377980,"TERMINAL",0,0,"722",,terminal_output +15515,13379070,"TERMINAL",0,0,"8\t",,terminal_output +15516,13379071,"TERMINAL",0,0,"833",,terminal_output +15517,13380010,"TERMINAL",0,0,"9\t",,terminal_output +15518,13380067,"TERMINAL",0,0,"944",,terminal_output +15519,13381117,"TERMINAL",0,0,"10\t",,terminal_output +15520,13381117,"TERMINAL",0,0,"1055",,terminal_output +15521,13382148,"TERMINAL",0,0,"1\t",,terminal_output +15522,13382155,"TERMINAL",0,0,"166",,terminal_output +15523,13383171,"TERMINAL",0,0,"2\t",,terminal_output +15524,13383195,"TERMINAL",0,0,"277",,terminal_output +15525,13384190,"TERMINAL",0,0,"3\t",,terminal_output +15526,13384238,"TERMINAL",0,0,"388",,terminal_output +15527,13385327,"TERMINAL",0,0,"4\t",,terminal_output +15528,13385327,"TERMINAL",0,0,"42020",,terminal_output +15529,13386339,"TERMINAL",0,0,"5\t",,terminal_output +15530,13386340,"TERMINAL",0,0,"611",,terminal_output +15531,13387371,"TERMINAL",0,0,"6\t",,terminal_output +15532,13387387,"TERMINAL",0,0,"722",,terminal_output +15533,13388328,"TERMINAL",0,0,"8\t",,terminal_output +15534,13388425,"TERMINAL",0,0,"833",,terminal_output +15535,13389362,"TERMINAL",0,0,"9\t",,terminal_output +15536,13389461,"TERMINAL",0,0,"944",,terminal_output +15537,13390400,"TERMINAL",0,0,"20\t",,terminal_output +15538,13390483,"TERMINAL",0,0,"2055",,terminal_output +15539,13391443,"TERMINAL",0,0,"1\t",,terminal_output +15540,13391542,"TERMINAL",0,0,"166",,terminal_output +15541,13392488,"TERMINAL",0,0,"2\t",,terminal_output +15542,13392578,"TERMINAL",0,0,"277",,terminal_output +15543,13393516,"TERMINAL",0,0,"3\t",,terminal_output +15544,13393621,"TERMINAL",0,0,"388",,terminal_output +15545,13394635,"TERMINAL",0,0,"4\t",,terminal_output +15546,13394663,"TERMINAL",0,0,"499",,terminal_output +15547,13395666,"TERMINAL",0,0,"5\t",,terminal_output +15548,13395687,"TERMINAL",0,0,"53030",,terminal_output +15549,13396688,"TERMINAL",0,0,"6\t",,terminal_output +15550,13396732,"TERMINAL",0,0,"611",,terminal_output +15551,13397717,"TERMINAL",0,0,"7\t",,terminal_output +15552,13397776,"TERMINAL",0,0,"722",,terminal_output +15553,13398730,"TERMINAL",0,0,"8\t",,terminal_output +15554,13398804,"TERMINAL",0,0,"833",,terminal_output +15555,13399741,"TERMINAL",0,0,"9\t",,terminal_output +15556,13399837,"TERMINAL",0,0,"944",,terminal_output +15557,13400889,"TERMINAL",0,0,"301",,terminal_output +15558,13400889,"TERMINAL",0,0,"3055",,terminal_output +15559,13401851,"TERMINAL",0,0,"1\t",,terminal_output +15560,13401899,"TERMINAL",0,0,"166",,terminal_output +15561,13402848,"TERMINAL",0,0,"2\t",,terminal_output +15562,13402949,"TERMINAL",0,0,"277",,terminal_output +15563,13403922,"TERMINAL",0,0,"3\t",,terminal_output +15564,13403978,"TERMINAL",0,0,"388",,terminal_output +15565,13404987,"TERMINAL",0,0,"4\t",,terminal_output +15566,13405010,"TERMINAL",0,0,"499",,terminal_output +15567,13405971,"TERMINAL",0,0,"5\t",,terminal_output +15568,13406054,"TERMINAL",0,0,"54040",,terminal_output +15569,13406990,"TERMINAL",0,0,"6\t",,terminal_output +15570,13407100,"TERMINAL",0,0,"611",,terminal_output +15571,13408020,"TERMINAL",0,0,"7\t",,terminal_output +15572,13408159,"TERMINAL",0,0,"722",,terminal_output +15573,13409059,"TERMINAL",0,0,"8\t",,terminal_output +15574,13409159,"TERMINAL",0,0,"833",,terminal_output +15575,13410153,"TERMINAL",0,0,"9\t",,terminal_output +15576,13410255,"TERMINAL",0,0,"944",,terminal_output +15577,13411233,"TERMINAL",0,0,"40\t",,terminal_output +15578,13411255,"TERMINAL",0,0,"4055",,terminal_output +15579,13412259,"TERMINAL",0,0,"1\t",,terminal_output +15580,13412304,"TERMINAL",0,0,"177",,terminal_output +15581,13413289,"TERMINAL",0,0,"2\t",,terminal_output +15582,13413338,"TERMINAL",0,0,"388",,terminal_output +15583,13414235,"TERMINAL",0,0,"3\t",,terminal_output +15584,13414378,"TERMINAL",0,0,"499",,terminal_output +15585,13415311,"TERMINAL",0,0,"4\t",,terminal_output +15586,13415455,"TERMINAL",0,0,"55050",,terminal_output +15587,13416366,"TERMINAL",0,0,"63",,terminal_output +15588,13416465,"TERMINAL",0,0,"611",,terminal_output +15589,13417375,"TERMINAL",0,0,"7\t",,terminal_output +15590,13417494,"TERMINAL",0,0,"722",,terminal_output +15591,13418382,"TERMINAL",0,0,"8\t",,terminal_output +15592,13418533,"TERMINAL",0,0,"833",,terminal_output +15593,13419415,"TERMINAL",0,0,"9\t",,terminal_output +15594,13419570,"TERMINAL",0,0,"944",,terminal_output +15595,13420460,"TERMINAL",0,0,"503",,terminal_output +15596,13420612,"TERMINAL",0,0,"5055",,terminal_output +15597,13421503,"TERMINAL",0,0,"14",,terminal_output +15598,13421645,"TERMINAL",0,0,"166",,terminal_output +15599,13422537,"TERMINAL",0,0,"2\t",,terminal_output +15600,13422682,"TERMINAL",0,0,"277",,terminal_output +15601,13423575,"TERMINAL",0,0,"3\t",,terminal_output +15602,13423759,"TERMINAL",0,0,"388",,terminal_output +15603,13424631,"TERMINAL",0,0,"4\t",,terminal_output +15604,13424755,"TERMINAL",0,0,"499",,terminal_output +15605,13425763,"TERMINAL",0,0,"5\t",,terminal_output +15606,13425795,"TERMINAL",0,0,"54:004:00",,terminal_output +15607,13426789,"TERMINAL",0,0,"6\t",,terminal_output +15608,13426833,"TERMINAL",0,0,"611",,terminal_output +15609,13427753,"TERMINAL",0,0,"7\t",,terminal_output +15610,13427877,"TERMINAL",0,0,"722",,terminal_output +15611,13428758,"TERMINAL",0,0,"8\t",,terminal_output +15612,13428920,"TERMINAL",0,0,"833",,terminal_output +15613,13429788,"TERMINAL",0,0,"9\t",,terminal_output +15614,13429962,"TERMINAL",0,0,"944",,terminal_output +15615,13430825,"TERMINAL",0,0,"10:00\t",,terminal_output +15616,13431002,"TERMINAL",0,0,"10:0055",,terminal_output +15617,13431857,"TERMINAL",0,0,"1\t",,terminal_output +15618,13432034,"TERMINAL",0,0,"166",,terminal_output +15619,13432893,"TERMINAL",0,0,"2\t",,terminal_output +15620,13433077,"TERMINAL",0,0,"277",,terminal_output +15621,13433955,"TERMINAL",0,0,"3\t",,terminal_output +15622,13434135,"TERMINAL",0,0,"388",,terminal_output +15623,13434970,"TERMINAL",0,0,"4\t",,terminal_output +15624,13435147,"TERMINAL",0,0,"499",,terminal_output +15625,13436070,"TERMINAL",0,0,"54",,terminal_output +15626,13436215,"TERMINAL",0,0,"51010",,terminal_output +15627,13437138,"TERMINAL",0,0,"6\t",,terminal_output +15628,13437233,"TERMINAL",0,0,"611",,terminal_output +15629,13438161,"TERMINAL",0,0,"7\t",,terminal_output +15630,13438301,"TERMINAL",0,0,"722",,terminal_output +15631,13439324,"TERMINAL",0,0,"8\t",,terminal_output +15632,13439325,"TERMINAL",0,0,"844",,terminal_output +15633,13440202,"TERMINAL",0,0,"9\t",,terminal_output +15634,13440348,"TERMINAL",0,0,"1055",,terminal_output +15635,13441170,"slurm/jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd_3e-6.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1 \n#SBATCH --gres=gpu:1\n#SBATCH --time=1-00:00:00\n#SBATCH --cpus-per-task=5\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_ablation_wsd_3e-6\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam_ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam_full_prec.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --init_lr=0 \\n --max_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,tab +15636,13441733,"TERMINAL",0,0,"10\t",,terminal_output +15637,13441803,"TERMINAL",0,0,"166",,terminal_output +15638,13442244,"TERMINAL",0,0,"1\t",,terminal_output +15639,13442428,"TERMINAL",0,0,"277",,terminal_output +15640,13443415,"TERMINAL",0,0,"2\t",,terminal_output +15641,13443486,"TERMINAL",0,0,"388",,terminal_output +15642,13444347,"TERMINAL",0,0,"3\t",,terminal_output +15643,13444490,"TERMINAL",0,0,"499",,terminal_output +15644,13445337,"TERMINAL",0,0,"5\t",,terminal_output +15645,13445548,"TERMINAL",0,0,"52020",,terminal_output +15646,13446386,"TERMINAL",0,0,"6\t",,terminal_output +15647,13446569,"TERMINAL",0,0,"611",,terminal_output +15648,13447405,"TERMINAL",0,0,"7\t",,terminal_output +15649,13447602,"TERMINAL",0,0,"722",,terminal_output +15650,13448451,"TERMINAL",0,0,"8\t",,terminal_output +15651,13448652,"TERMINAL",0,0,"833",,terminal_output +15652,13449479,"TERMINAL",0,0,"9\t",,terminal_output +15653,13449686,"TERMINAL",0,0,"944",,terminal_output +15654,13450545,"TERMINAL",0,0,"20\t",,terminal_output +15655,13450725,"TERMINAL",0,0,"2055",,terminal_output +15656,13451670,"TERMINAL",0,0,"1\t",,terminal_output +15657,13451773,"TERMINAL",0,0,"166",,terminal_output +15658,13452884,"TERMINAL",0,0,"2\t",,terminal_output +15659,13452884,"TERMINAL",0,0,"277",,terminal_output +15660,13453641,"TERMINAL",0,0,"3\t",,terminal_output +15661,13453834,"TERMINAL",0,0,"388",,terminal_output +15662,13454743,"TERMINAL",0,0,"4\t",,terminal_output +15663,13454880,"TERMINAL",0,0,"499",,terminal_output +15664,13455830,"TERMINAL",0,0,"5\t",,terminal_output +15665,13455921,"TERMINAL",0,0,"53030",,terminal_output +15666,13456811,"TERMINAL",0,0,"6\t",,terminal_output +15667,13456949,"TERMINAL",0,0,"611",,terminal_output +15668,13457773,"TERMINAL",0,0,"7\t",,terminal_output +15669,13458021,"TERMINAL",0,0,"722",,terminal_output +15670,13458573,"slurm/jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_baseline.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1 \n#SBATCH --gres=gpu:1\n#SBATCH --time=1-00:00:00\n#SBATCH --cpus-per-task=5\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_ablation_baseline\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam_ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam_full_prec.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-5 \\n --init_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,tab +15671,13459137,"TERMINAL",0,0,"8\t",,terminal_output +15672,13459171,"TERMINAL",0,0,"833",,terminal_output +15673,13459917,"TERMINAL",0,0,"9\t",,terminal_output +15674,13460077,"TERMINAL",0,0,"944",,terminal_output +15675,13460880,"TERMINAL",0,0,"30\t",,terminal_output +15676,13461137,"TERMINAL",0,0,"3055",,terminal_output +15677,13461935,"TERMINAL",0,0,"1\t",,terminal_output +15678,13462142,"TERMINAL",0,0,"166",,terminal_output +15679,13462839,"slurm/jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd_3e-5_3e-6.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1 \n#SBATCH --gres=gpu:1\n#SBATCH --time=1-00:00:00\n#SBATCH --cpus-per-task=5\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_ablation_wsd_3e-5_3e-6\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam_ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam_full_prec.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --init_lr=3e-5 \\n --max_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,tab +15680,13463903,"slurm/jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd_3e-6.sbatch",0,0,"",shellscript,tab +15681,13464726,"TERMINAL",0,0,"299",,terminal_output +15682,13464727,"TERMINAL",0,0,"20",,terminal_output +15683,13465682,"TERMINAL",0,0,"54040",,terminal_output +15684,13465714,"TERMINAL",0,0,"5\t",,terminal_output +15685,13466706,"TERMINAL",0,0,"6\t",,terminal_output +15686,13466707,"TERMINAL",0,0,"611",,terminal_output +15687,13467812,"TERMINAL",0,0,"722",,terminal_output +15688,13467812,"TERMINAL",0,0,"7\t",,terminal_output +15689,13468801,"TERMINAL",0,0,"833",,terminal_output +15690,13468805,"TERMINAL",0,0,"8\t",,terminal_output +15691,13469839,"TERMINAL",0,0,"9\t",,terminal_output +15692,13469839,"TERMINAL",0,0,"944",,terminal_output +15693,13470928,"TERMINAL",0,0,"40\t",,terminal_output +15694,13470928,"TERMINAL",0,0,"4055",,terminal_output +15695,13471863,"TERMINAL",0,0,"1\t",,terminal_output +15696,13471874,"TERMINAL",0,0,"166",,terminal_output +15697,13472897,"TERMINAL",0,0,"2\t",,terminal_output +15698,13472928,"TERMINAL",0,0,"277",,terminal_output +15699,13474034,"TERMINAL",0,0,"3\t",,terminal_output +15700,13474035,"TERMINAL",0,0,"388",,terminal_output +15701,13475022,"TERMINAL",0,0,"4\t",,terminal_output +15702,13475043,"TERMINAL",0,0,"499",,terminal_output +15703,13476057,"TERMINAL",0,0,"5\t",,terminal_output +15704,13476062,"TERMINAL",0,0,"55050",,terminal_output +15705,13477088,"TERMINAL",0,0,"6\t",,terminal_output +15706,13477101,"TERMINAL",0,0,"611",,terminal_output +15707,13478204,"TERMINAL",0,0,"7\t",,terminal_output +15708,13478205,"TERMINAL",0,0,"722",,terminal_output +15709,13479132,"TERMINAL",0,0,"8\t",,terminal_output +15710,13479158,"TERMINAL",0,0,"833",,terminal_output +15711,13480254,"TERMINAL",0,0,"9\t",,terminal_output +15712,13480294,"TERMINAL",0,0,"944",,terminal_output +15713,13481146,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1 \n#SBATCH --gres=gpu:1\n#SBATCH --time=1-00:00:00\n#SBATCH --cpus-per-task=5\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_ablation_wsd_3e-6\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam_ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam_full_prec.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --init_lr=0 \\n --max_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,tab +15714,13481267,"TERMINAL",0,0,"50\t",,terminal_output +15715,13481267,"TERMINAL",0,0,"5055",,terminal_output +15716,13482289,"TERMINAL",0,0,"1\t",,terminal_output +15717,13482351,"TERMINAL",0,0,"177",,terminal_output +15718,13483349,"TERMINAL",0,0,"2\t",,terminal_output +15719,13483349,"TERMINAL",0,0,"388",,terminal_output +15720,13483909,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2050,0,"",shellscript,selection_mouse +15721,13484253,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2049,1,"\n",shellscript,selection_mouse +15722,13484254,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2030,20,"\n\nwait $child_pid\n\n\n",shellscript,selection_mouse +15723,13484254,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1772,278," \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,selection_mouse +15724,13484254,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1503,547," --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --init_lr=0 \\n --max_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,selection_mouse +15725,13484255,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1404,646,"\nenv | grep SLURM\n\nsrun python train_lam_full_prec.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --init_lr=0 \\n --max_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,selection_mouse +15726,13484255,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1037,1013,"# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam_ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam_full_prec.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --init_lr=0 \\n --max_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,selection_mouse +15727,13484255,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",649,1401," # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam_ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam_full_prec.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --init_lr=0 \\n --max_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,selection_mouse +15728,13484256,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",20,2030,"\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1 \n#SBATCH --gres=gpu:1\n#SBATCH --time=1-00:00:00\n#SBATCH --cpus-per-task=5\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_ablation_wsd_3e-6\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam_ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam_full_prec.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --init_lr=0 \\n --max_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,selection_mouse +15729,13484306,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2049,1,"\n",shellscript,selection_command +15730,13484307,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",0,2050,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1 \n#SBATCH --gres=gpu:1\n#SBATCH --time=1-00:00:00\n#SBATCH --cpus-per-task=5\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/ali/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_ablation_wsd_3e-6\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam_ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam_full_prec.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --init_lr=0 \\n --max_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --lr_schedule cos \\n --no_use_flash_attention \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n\n\n",shellscript,selection_mouse +15731,13484445,"TERMINAL",0,0,"4\t",,terminal_output +15732,13484446,"TERMINAL",0,0,"499",,terminal_output +15733,13485260,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",0,0,"",shellscript,selection_command +15734,13485455,"TERMINAL",0,0,"5\t",,terminal_output +15735,13485501,"TERMINAL",0,0,"55:005:00",,terminal_output +15736,13486435,"TERMINAL",0,0,"6\t",,terminal_output +15737,13486517,"TERMINAL",0,0,"611",,terminal_output +15738,13487445,"TERMINAL",0,0,"7\t",,terminal_output +15739,13487497,"TERMINAL",0,0,"722",,terminal_output +15740,13488512,"TERMINAL",0,0,"8\t",,terminal_output +15741,13488528,"TERMINAL",0,0,"833",,terminal_output +15742,13489546,"TERMINAL",0,0,"9\t",,terminal_output +15743,13489588,"TERMINAL",0,0,"944",,terminal_output +15744,13490589,"TERMINAL",0,0,"1:00\t",,terminal_output +15745,13490630,"TERMINAL",0,0,"1:0055",,terminal_output +15746,13491593,"TERMINAL",0,0,"11",,terminal_output +15747,13491658,"TERMINAL",0,0,"166",,terminal_output +15748,13492637,"TERMINAL",0,0,"2\t",,terminal_output +15749,13492709,"TERMINAL",0,0,"277",,terminal_output +15750,13493713,"TERMINAL",0,0,"3\t",,terminal_output +15751,13493737,"TERMINAL",0,0,"388",,terminal_output +15752,13494711,"TERMINAL",0,0,"4\t",,terminal_output +15753,13494782,"TERMINAL",0,0,"499",,terminal_output +15754,13495746,"TERMINAL",0,0,"5\t",,terminal_output +15755,13495850,"TERMINAL",0,0,"51010",,terminal_output +15756,13496780,"TERMINAL",0,0,"65",,terminal_output +15757,13496873,"TERMINAL",0,0,"611",,terminal_output +15758,13497812,"TERMINAL",0,0,"7\t",,terminal_output +15759,13497911,"TERMINAL",0,0,"722",,terminal_output +15760,13498885,"TERMINAL",0,0,"8\t",,terminal_output +15761,13498944,"TERMINAL",0,0,"833",,terminal_output +15762,13500142,"TERMINAL",0,0,"9\t",,terminal_output +15763,13500142,"TERMINAL",0,0,"944",,terminal_output +15764,13500927,"TERMINAL",0,0,"10\t",,terminal_output +15765,13501034,"TERMINAL",0,0,"1055",,terminal_output +15766,13502053,"TERMINAL",0,0,"1\t",,terminal_output +15767,13502063,"TERMINAL",0,0,"166",,terminal_output +15768,13502998,"TERMINAL",0,0,"2\t",,terminal_output +15769,13503137,"TERMINAL",0,0,"277",,terminal_output +15770,13504034,"TERMINAL",0,0,"3\t",,terminal_output +15771,13504196,"TERMINAL",0,0,"388",,terminal_output +15772,13505211,"TERMINAL",0,0,"4\t",,terminal_output +15773,13505211,"TERMINAL",0,0,"499",,terminal_output +15774,13506121,"TERMINAL",0,0,"5\t",,terminal_output +15775,13506260,"TERMINAL",0,0,"52020",,terminal_output +15776,13507195,"TERMINAL",0,0,"6\t",,terminal_output +15777,13507321,"TERMINAL",0,0,"622",,terminal_output +15778,13508256,"TERMINAL",0,0,"7\t",,terminal_output +15779,13508316,"TERMINAL",0,0,"833",,terminal_output +15780,13509260,"TERMINAL",0,0,"8\t",,terminal_output +15781,13509403,"TERMINAL",0,0,"944",,terminal_output +15782,13510261,"TERMINAL",0,0,"9\t",,terminal_output +15783,13510402,"TERMINAL",0,0,"2055",,terminal_output +15784,13511320,"TERMINAL",0,0,"20\t",,terminal_output +15785,13511478,"TERMINAL",0,0,"166",,terminal_output +15786,13511870,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",39,28,"#SBATCH --ntasks-per-node=1\n#SBATCH --ntasks-per-node=1 ",shellscript,content +15787,13512297,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",67,29,"",shellscript,content +15788,13512381,"TERMINAL",0,0,"2\t",,terminal_output +15789,13512479,"TERMINAL",0,0,"277",,terminal_output +15790,13513375,"TERMINAL",0,0,"3\t",,terminal_output +15791,13513514,"TERMINAL",0,0,"388",,terminal_output +15792,13514415,"TERMINAL",0,0,"4\t",,terminal_output +15793,13514553,"TERMINAL",0,0,"499",,terminal_output +15794,13515452,"TERMINAL",0,0,"5\t",,terminal_output +15795,13515598,"TERMINAL",0,0,"53030",,terminal_output +15796,13516509,"TERMINAL",0,0,"6\t",,terminal_output +15797,13516698,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",440,0,"# Log the sbatch script\n",shellscript,content +15798,13516762,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",464,0,"cat $0\n",shellscript,content +15799,13516763,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",471,0,"\n",shellscript,content +15800,13516802,"TERMINAL",0,0,"611",,terminal_output +15801,13517395,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",472,0,"source .venv/bin/activate\n",shellscript,content +15802,13517395,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",498,0,"\n",shellscript,content +15803,13517525,"TERMINAL",0,0,"7\t",,terminal_output +15804,13517766,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",804,0,"",shellscript,selection_command +15805,13517912,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",499,0,"job_name=$SLURM_JOB_NAME\n",shellscript,content +15806,13518011,"TERMINAL",0,0,"722",,terminal_output +15807,13518259,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",524,0,"slurm_job_id=$SLURM_JOB_ID\n",shellscript,content +15808,13518263,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",551,0,"\n",shellscript,content +15809,13518636,"TERMINAL",0,0,"8\t",,terminal_output +15810,13518721,"TERMINAL",0,0,"833",,terminal_output +15811,13519610,"TERMINAL",0,0,"9\t",,terminal_output +15812,13519760,"TERMINAL",0,0,"944",,terminal_output +15813,13520348,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",552,0,"CHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\n",shellscript,content +15814,13520660,"TERMINAL",0,0,"30\t",,terminal_output +15815,13520811,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",552,938,"",shellscript,content +15816,13520845,"TERMINAL",0,0,"3055",,terminal_output +15817,13521918,"TERMINAL",0,0,"1\t",,terminal_output +15818,13522010,"TERMINAL",0,0,"166",,terminal_output +15819,13522515,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",687,0,"array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\n\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\n",shellscript,content +15820,13522738,"TERMINAL",0,0,"21",,terminal_output +15821,13522946,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",779,0,"env | grep SLURM\n",shellscript,content +15822,13522947,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",796,152,"",shellscript,content +15823,13523072,"TERMINAL",0,0,"277",,terminal_output +15824,13523795,"TERMINAL",0,0,"3\t",,terminal_output +15825,13523931,"TERMINAL",0,0,"388",,terminal_output +15826,13524569,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",852,0," --restore_ckpt_flag $restore_ckpt_flag \\n",shellscript,content +15827,13524619,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",897,25,"",shellscript,content +15828,13524825,"TERMINAL",0,0,"4\t",,terminal_output +15829,13524971,"TERMINAL",0,0,"499",,terminal_output +15830,13526009,"TERMINAL",0,0,"5\t",,terminal_output +15831,13526194,"TERMINAL",0,0,"54040",,terminal_output +15832,13526970,"TERMINAL",0,0,"6\t",,terminal_output +15833,13527052,"TERMINAL",0,0,"611",,terminal_output +15834,13527438,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1253,0," --tags ""coinrun lam_ablation"" \\n",shellscript,content +15835,13527516,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1289,19,"",shellscript,content +15836,13527940,"TERMINAL",0,0,"7\t",,terminal_output +15837,13528093,"TERMINAL",0,0,"722",,terminal_output +15838,13528682,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1459,2,"",shellscript,content +15839,13528971,"TERMINAL",0,0,"86",,terminal_output +15840,13529159,"TERMINAL",0,0,"833",,terminal_output +15841,13530007,"TERMINAL",0,0,"9\t",,terminal_output +15842,13530176,"TERMINAL",0,0,"944",,terminal_output +15843,13531033,"TERMINAL",0,0,"40\t",,terminal_output +15844,13531212,"TERMINAL",0,0,"4055",,terminal_output +15845,13532171,"TERMINAL",0,0,"1\t",,terminal_output +15846,13532253,"TERMINAL",0,0,"166",,terminal_output +15847,13533197,"TERMINAL",0,0,"2\t",,terminal_output +15848,13533319,"TERMINAL",0,0,"288",,terminal_output +15849,13534214,"TERMINAL",0,0,"3\t",,terminal_output +15850,13534333,"TERMINAL",0,0,"499",,terminal_output +15851,13535240,"TERMINAL",0,0,"4\t",,terminal_output +15852,13535370,"TERMINAL",0,0,"55050",,terminal_output +15853,13536286,"TERMINAL",0,0,"5\t",,terminal_output +15854,13536453,"TERMINAL",0,0,"611",,terminal_output +15855,13537387,"TERMINAL",0,0,"6\t",,terminal_output +15856,13537496,"TERMINAL",0,0,"722",,terminal_output +15857,13538404,"TERMINAL",0,0,"7\t",,terminal_output +15858,13538541,"TERMINAL",0,0,"833",,terminal_output +15859,13539337,"TERMINAL",0,0,"9\t",,terminal_output +15860,13539545,"TERMINAL",0,0,"944",,terminal_output +15861,13540462,"TERMINAL",0,0,"50\t",,terminal_output +15862,13540673,"TERMINAL",0,0,"5055",,terminal_output +15863,13541404,"TERMINAL",0,0,"1\t",,terminal_output +15864,13541636,"TERMINAL",0,0,"166",,terminal_output +15865,13542464,"TERMINAL",0,0,"2\t",,terminal_output +15866,13542685,"TERMINAL",0,0,"277",,terminal_output +15867,13543489,"TERMINAL",0,0,"3\t",,terminal_output +15868,13543715,"TERMINAL",0,0,"388",,terminal_output +15869,13544524,"TERMINAL",0,0,"45",,terminal_output +15870,13544769,"TERMINAL",0,0,"499",,terminal_output +15871,13545582,"TERMINAL",0,0,"5\t",,terminal_output +15872,13545825,"TERMINAL",0,0,"56:006:00",,terminal_output +15873,13546666,"TERMINAL",0,0,"6\t",,terminal_output +15874,13546885,"TERMINAL",0,0,"611",,terminal_output +15875,13547644,"TERMINAL",0,0,"7\t",,terminal_output +15876,13547901,"TERMINAL",0,0,"722",,terminal_output +15877,13548688,"TERMINAL",0,0,"8\t",,terminal_output +15878,13548934,"TERMINAL",0,0,"833",,terminal_output +15879,13549806,"TERMINAL",0,0,"9\t",,terminal_output +15880,13549982,"TERMINAL",0,0,"944",,terminal_output +15881,13550759,"TERMINAL",0,0,"2:00\t",,terminal_output +15882,13551122,"TERMINAL",0,0,"2:0055",,terminal_output +15883,13551803,"TERMINAL",0,0,"1\t",,terminal_output +15884,13552063,"TERMINAL",0,0,"166",,terminal_output +15885,13552834,"TERMINAL",0,0,"2\t",,terminal_output +15886,13553110,"TERMINAL",0,0,"277",,terminal_output +15887,13553878,"TERMINAL",0,0,"3\t",,terminal_output +15888,13554144,"TERMINAL",0,0,"388",,terminal_output +15889,13554926,"TERMINAL",0,0,"4\t",,terminal_output +15890,13555192,"TERMINAL",0,0,"499",,terminal_output +15891,13555948,"TERMINAL",0,0,"52",,terminal_output +15892,13556227,"TERMINAL",0,0,"51010",,terminal_output +15893,13556989,"TERMINAL",0,0,"6\t",,terminal_output +15894,13557283,"TERMINAL",0,0,"622",,terminal_output +15895,13558035,"TERMINAL",0,0,"7\t",,terminal_output +15896,13558330,"TERMINAL",0,0,"833",,terminal_output +15897,13559081,"TERMINAL",0,0,"8\t",,terminal_output +15898,13559353,"TERMINAL",0,0,"944",,terminal_output +15899,13560109,"TERMINAL",0,0,"9\t",,terminal_output +15900,13560402,"TERMINAL",0,0,"1055",,terminal_output +15901,13561152,"TERMINAL",0,0,"10\t",,terminal_output +15902,13561450,"TERMINAL",0,0,"166",,terminal_output +15903,13562183,"TERMINAL",0,0,"1\t",,terminal_output +15904,13562480,"TERMINAL",0,0,"277",,terminal_output +15905,13563237,"TERMINAL",0,0,"2\t",,terminal_output +15906,13563543,"TERMINAL",0,0,"388",,terminal_output +15907,13564272,"TERMINAL",0,0,"350",,terminal_output +15908,13564615,"TERMINAL",0,0,"499",,terminal_output +15909,13565301,"TERMINAL",0,0,"4\t",,terminal_output +15910,13565643,"TERMINAL",0,0,"52020",,terminal_output +15911,13566508,"TERMINAL",0,0,"6\t",,terminal_output +15912,13566915,"TERMINAL",0,0,"611",,terminal_output +15913,13567433,"TERMINAL",0,0,"7\t",,terminal_output +15914,13567686,"TERMINAL",0,0,"722",,terminal_output +15915,13568417,"TERMINAL",0,0,"8\t",,terminal_output +15916,13568738,"TERMINAL",0,0,"833",,terminal_output +15917,13569454,"TERMINAL",0,0,"9\t",,terminal_output +15918,13569829,"TERMINAL",0,0,"944",,terminal_output +15919,13570673,"TERMINAL",0,0,"20\t",,terminal_output +15920,13570821,"TERMINAL",0,0,"2055",,terminal_output +15921,13571520,"TERMINAL",0,0,"1\t",,terminal_output +15922,13571944,"TERMINAL",0,0,"166",,terminal_output +15923,13572563,"TERMINAL",0,0,"2\t",,terminal_output +15924,13572722,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",0,0,"",shellscript,selection_command +15925,13572878,"TERMINAL",0,0,"277",,terminal_output +15926,13573698,"TERMINAL",0,0,"3\t",,terminal_output +15927,13573947,"TERMINAL",0,0,"388",,terminal_output +15928,13574230,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1459,0,"\n\n",shellscript,content +15929,13574231,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1253,35," --tags $tags \",shellscript,content +15930,13574231,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",852,44," $restore_ckpt_flag \",shellscript,content +15931,13574231,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",687,92,"",shellscript,content +15932,13574231,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",552,0,"tags=""coinrun lam_ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_10m""\n\n",shellscript,content +15933,13574231,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",440,0,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,content +15934,13574231,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",39,27,"#SBATCH --ntasks-per-node=1 ",shellscript,content +15935,13574636,"TERMINAL",0,0,"4\t",,terminal_output +15936,13574963,"TERMINAL",0,0,"499",,terminal_output +15937,13575690,"TERMINAL",0,0,"5\t",,terminal_output +15938,13575987,"TERMINAL",0,0,"53030",,terminal_output +15939,13576740,"TERMINAL",0,0,"6\t",,terminal_output +15940,13577047,"TERMINAL",0,0,"611",,terminal_output +15941,13577743,"TERMINAL",0,0,"7\t",,terminal_output +15942,13578061,"TERMINAL",0,0,"722",,terminal_output +15943,13578778,"TERMINAL",0,0,"8\t",,terminal_output +15944,13579115,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",39,0,"#SBATCH --ntasks-per-node=4\n",shellscript,content +15945,13579285,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",67,50,"",shellscript,content +15946,13579286,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",93,0,"#SBATCH --partition=dev_accelerated\n",shellscript,content +15947,13579378,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",155,0,"#SBATCH --gres=gpu:1\n",shellscript,content +15948,13579379,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",176,0,"#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n",shellscript,content +15949,13579486,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",287,0,"#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n",shellscript,content +15950,13579487,"TERMINAL",0,0,"833",,terminal_output +15951,13579563,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",397,175,"",shellscript,content +15952,13579993,"TERMINAL",0,0,"9\t",,terminal_output +15953,13580268,"TERMINAL",0,0,"944",,terminal_output +15954,13580358,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1150,0,"module unload mpi/openmpi/5.0\n",shellscript,content +15955,13580456,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1180,0,"module unload devel/cuda/12.4\n",shellscript,content +15956,13580633,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1290,0,"CHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\n",shellscript,content +15957,13580696,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1410,0,"mkdir -p $CHECKPOINT_DIR\n",shellscript,content +15958,13580697,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1435,28,"",shellscript,content +15959,13580854,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1436,0,"array_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n",shellscript,content +15960,13580855,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1567,91,"",shellscript,content +15961,13580960,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1568,0,"env | grep SLURM\n",shellscript,content +15962,13580961,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1585,152,"",shellscript,content +15963,13581136,"TERMINAL",0,0,"30\t",,terminal_output +15964,13581418,"TERMINAL",0,0,"3055",,terminal_output +15965,13581935,"TERMINAL",0,0,"1\t",,terminal_output +15966,13582003,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2022,0," --tags ""coinrun lam_ablation"" \\n",shellscript,content +15967,13582071,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2058,19,"",shellscript,content +15968,13582676,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2228,2,"",shellscript,content +15969,13582689,"TERMINAL",0,0,"166",,terminal_output +15970,13582928,"TERMINAL",0,0,"2\t",,terminal_output +15971,13583243,"TERMINAL",0,0,"277",,terminal_output +15972,13583973,"TERMINAL",0,0,"3\t",,terminal_output +15973,13584299,"TERMINAL",0,0,"399",,terminal_output +15974,13585220,"TERMINAL",0,0,"4\t",,terminal_output +15975,13585360,"TERMINAL",0,0,"54040",,terminal_output +15976,13587600,"TERMINAL",0,0,"622",,terminal_output +15977,13587642,"TERMINAL",0,0,"521",,terminal_output +15978,13589069,"TERMINAL",0,0,"833",,terminal_output +15979,13589069,"TERMINAL",0,0,"80",,terminal_output +15980,13590117,"TERMINAL",0,0,"944",,terminal_output +15981,13590128,"TERMINAL",0,0,"9\t",,terminal_output +15982,13591147,"TERMINAL",0,0,"4055",,terminal_output +15983,13591148,"TERMINAL",0,0,"40\t",,terminal_output +15984,13592270,"TERMINAL",0,0,"166",,terminal_output +15985,13592277,"TERMINAL",0,0,"1\t",,terminal_output +15986,13593293,"TERMINAL",0,0,"277",,terminal_output +15987,13593293,"TERMINAL",0,0,"2\t",,terminal_output +15988,13594321,"TERMINAL",0,0,"3\t",,terminal_output +15989,13594321,"TERMINAL",0,0,"388",,terminal_output +15990,13595341,"TERMINAL",0,0,"4\t",,terminal_output +15991,13595341,"TERMINAL",0,0,"499",,terminal_output +15992,13596333,"TERMINAL",0,0,"5\t",,terminal_output +15993,13596386,"TERMINAL",0,0,"55151",,terminal_output +15994,13597392,"TERMINAL",0,0,"7\t",,terminal_output +15995,13597392,"TERMINAL",0,0,"722",,terminal_output +15996,13598411,"TERMINAL",0,0,"81",,terminal_output +15997,13598416,"TERMINAL",0,0,"833",,terminal_output +15998,13599437,"TERMINAL",0,0,"9\t",,terminal_output +15999,13599438,"TERMINAL",0,0,"944",,terminal_output +16000,13600430,"TERMINAL",0,0,"50\t",,terminal_output +16001,13600443,"TERMINAL",0,0,"5055",,terminal_output +16002,13601492,"TERMINAL",0,0,"1\t",,terminal_output +16003,13601493,"TERMINAL",0,0,"166",,terminal_output +16004,13602561,"TERMINAL",0,0,"2\t",,terminal_output +16005,13602561,"TERMINAL",0,0,"277",,terminal_output +16006,13603553,"TERMINAL",0,0,"3\t",,terminal_output +16007,13603568,"TERMINAL",0,0,"388",,terminal_output +16008,13604634,"TERMINAL",0,0,"4\t",,terminal_output +16009,13604637,"TERMINAL",0,0,"499",,terminal_output +16010,13605629,"TERMINAL",0,0,"5\t",,terminal_output +16011,13605682,"TERMINAL",0,0,"57:007:00",,terminal_output +16012,13606711,"TERMINAL",0,0,"6\t",,terminal_output +16013,13606711,"TERMINAL",0,0,"611",,terminal_output +16014,13607721,"TERMINAL",0,0,"7\t",,terminal_output +16015,13607751,"TERMINAL",0,0,"722",,terminal_output +16016,13608858,"TERMINAL",0,0,"8\t",,terminal_output +16017,13608858,"TERMINAL",0,0,"833",,terminal_output +16018,13609894,"TERMINAL",0,0,"9\t",,terminal_output +16019,13609895,"TERMINAL",0,0,"944",,terminal_output +16020,13610887,"TERMINAL",0,0,"3:00\t",,terminal_output +16021,13610908,"TERMINAL",0,0,"3:0055",,terminal_output +16022,13611872,"TERMINAL",0,0,"1\t",,terminal_output +16023,13611929,"TERMINAL",0,0,"166",,terminal_output +16024,13612917,"TERMINAL",0,0,"2\t",,terminal_output +16025,13612970,"TERMINAL",0,0,"277",,terminal_output +16026,13614153,"TERMINAL",0,0,"3\t",,terminal_output +16027,13614153,"TERMINAL",0,0,"388",,terminal_output +16028,13615273,"TERMINAL",0,0,"4\t",,terminal_output +16029,13615273,"TERMINAL",0,0,"499",,terminal_output +16030,13616135,"TERMINAL",0,0,"5\t",,terminal_output +16031,13616135,"TERMINAL",0,0,"51010",,terminal_output +16032,13616695,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2125,0,"",shellscript,selection_mouse +16033,13616837,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2124,3,"cos",shellscript,selection_mouse +16034,13617088,"TERMINAL",0,0,"6\t",,terminal_output +16035,13617144,"TERMINAL",0,0,"611",,terminal_output +16036,13617769,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2124,3,"",shellscript,content +16037,13618038,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2124,0,"w",shellscript,content +16038,13618039,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2125,0,"",shellscript,selection_keyboard +16039,13618153,"TERMINAL",0,0,"7\t",,terminal_output +16040,13618158,"TERMINAL",0,0,"722",,terminal_output +16041,13618278,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2125,0,"s",shellscript,content +16042,13618279,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2126,0,"",shellscript,selection_keyboard +16043,13618576,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2126,0,"d",shellscript,content +16044,13618577,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2127,0,"",shellscript,selection_keyboard +16045,13619304,"TERMINAL",0,0,"8\t",,terminal_output +16046,13619305,"TERMINAL",0,0,"833",,terminal_output +16047,13620176,"TERMINAL",0,0,"9\t",,terminal_output +16048,13620228,"TERMINAL",0,0,"944",,terminal_output +16049,13620823,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1998,0,"",shellscript,selection_mouse +16050,13621241,"TERMINAL",0,0,"10\t",,terminal_output +16051,13621259,"TERMINAL",0,0,"1055",,terminal_output +16052,13621555,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1992,0,"",shellscript,selection_mouse +16053,13621936,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2040,0,"",shellscript,selection_mouse +16054,13622079,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2034,7,"coinrun",shellscript,selection_mouse +16055,13622255,"TERMINAL",0,0,"1\t",,terminal_output +16056,13622304,"TERMINAL",0,0,"177",,terminal_output +16057,13622627,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1998,0,"",shellscript,selection_mouse +16058,13622770,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1993,8,"job_name",shellscript,selection_mouse +16059,13623553,"TERMINAL",0,0,"2\t",,terminal_output +16060,13623553,"TERMINAL",0,0,"388",,terminal_output +16061,13624385,"TERMINAL",0,0,"4\t",,terminal_output +16062,13624385,"TERMINAL",0,0,"499",,terminal_output +16063,13625451,"TERMINAL",0,0,"5\t",,terminal_output +16064,13625452,"TERMINAL",0,0,"52020",,terminal_output +16065,13626585,"TERMINAL",0,0,"6\t",,terminal_output +16066,13626585,"TERMINAL",0,0,"611",,terminal_output +16067,13627116,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",426,0,"",shellscript,selection_mouse +16068,13627294,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",426,1,"c",shellscript,selection_mouse +16069,13627295,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",426,2,"co",shellscript,selection_mouse +16070,13627320,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",426,3,"coi",shellscript,selection_mouse +16071,13627359,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",426,4,"coin",shellscript,selection_mouse +16072,13627465,"TERMINAL",0,0,"7\t",,terminal_output +16073,13627505,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",426,5,"coinr",shellscript,selection_mouse +16074,13627506,"TERMINAL",0,0,"722",,terminal_output +16075,13627722,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",426,6,"coinru",shellscript,selection_mouse +16076,13627930,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",426,7,"coinrun",shellscript,selection_mouse +16077,13628464,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",426,7,"",shellscript,content +16078,13628553,"TERMINAL",0,0,"8\t",,terminal_output +16079,13628553,"TERMINAL",0,0,"833",,terminal_output +16080,13629523,"TERMINAL",0,0,"9\t",,terminal_output +16081,13629620,"TERMINAL",0,0,"944",,terminal_output +16082,13629628,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",426,0,"c",shellscript,content +16083,13629629,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",427,0,"",shellscript,selection_keyboard +16084,13629726,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",427,0,"o",shellscript,content +16085,13629727,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",428,0,"",shellscript,selection_keyboard +16086,13629945,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",428,0,"i",shellscript,content +16087,13629946,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",429,0,"",shellscript,selection_keyboard +16088,13629980,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",429,0,"n",shellscript,content +16089,13629981,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",430,0,"",shellscript,selection_keyboard +16090,13630554,"TERMINAL",0,0,"20\t",,terminal_output +16091,13630597,"TERMINAL",0,0,"2055",,terminal_output +16092,13631071,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",430,0,"r",shellscript,content +16093,13631072,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",431,0,"",shellscript,selection_keyboard +16094,13631155,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",431,0,"u",shellscript,content +16095,13631156,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",432,0,"",shellscript,selection_keyboard +16096,13631196,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",432,0,"n",shellscript,content +16097,13631197,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",433,0,"",shellscript,selection_keyboard +16098,13631603,"TERMINAL",0,0,"1\t",,terminal_output +16099,13631638,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",434,0,"",shellscript,selection_command +16100,13631643,"TERMINAL",0,0,"166",,terminal_output +16101,13631786,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",435,0,"",shellscript,selection_command +16102,13632238,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",434,0,"",shellscript,selection_command +16103,13632679,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",434,15,"",shellscript,content +16104,13632715,"TERMINAL",0,0,"2\t",,terminal_output +16105,13632752,"TERMINAL",0,0,"277",,terminal_output +16106,13633220,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",434,1,"",shellscript,content +16107,13633393,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",434,1,"",shellscript,content +16108,13633680,"TERMINAL",0,0,"3\t",,terminal_output +16109,13633753,"TERMINAL",0,0,"388",,terminal_output +16110,13634172,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",433,1,"",shellscript,content +16111,13634696,"TERMINAL",0,0,"4\t",,terminal_output +16112,13634753,"TERMINAL",0,0,"499",,terminal_output +16113,13635729,"TERMINAL",0,0,"5\t",,terminal_output +16114,13635793,"TERMINAL",0,0,"53030",,terminal_output +16115,13636764,"TERMINAL",0,0,"6\t",,terminal_output +16116,13636864,"TERMINAL",0,0,"611",,terminal_output +16117,13637801,"TERMINAL",0,0,"7\t",,terminal_output +16118,13637863,"TERMINAL",0,0,"722",,terminal_output +16119,13638877,"TERMINAL",0,0,"8\t",,terminal_output +16120,13638903,"TERMINAL",0,0,"833",,terminal_output +16121,13639902,"TERMINAL",0,0,"9\t",,terminal_output +16122,13639989,"TERMINAL",0,0,"944",,terminal_output +16123,13641010,"TERMINAL",0,0,"30\t",,terminal_output +16124,13641011,"TERMINAL",0,0,"3055",,terminal_output +16125,13642037,"TERMINAL",0,0,"1\t",,terminal_output +16126,13642037,"TERMINAL",0,0,"166",,terminal_output +16127,13642983,"TERMINAL",0,0,"2\t",,terminal_output +16128,13643059,"TERMINAL",0,0,"277",,terminal_output +16129,13644019,"TERMINAL",0,0,"3\t",,terminal_output +16130,13644130,"TERMINAL",0,0,"388",,terminal_output +16131,13645054,"TERMINAL",0,0,"4\t",,terminal_output +16132,13645161,"TERMINAL",0,0,"499",,terminal_output +16133,13645602,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2124,0,"",shellscript,selection_mouse +16134,13646095,"TERMINAL",0,0,"5\t",,terminal_output +16135,13646180,"TERMINAL",0,0,"54040",,terminal_output +16136,13646674,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2123,0,"",shellscript,selection_command +16137,13647195,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2112,31,"",shellscript,content +16138,13647250,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2116,0,"",shellscript,selection_command +16139,13647335,"TERMINAL",0,0,"6\t",,terminal_output +16140,13647336,"TERMINAL",0,0,"611",,terminal_output +16141,13647655,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2092,0,"",shellscript,selection_command +16142,13647809,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2070,0,"",shellscript,selection_command +16143,13647973,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2044,0,"",shellscript,selection_command +16144,13648136,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2008,0,"",shellscript,selection_command +16145,13648356,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1965,0,"",shellscript,selection_command +16146,13648365,"TERMINAL",0,0,"733",,terminal_output +16147,13648365,"TERMINAL",0,0,"72",,terminal_output +16148,13649407,"TERMINAL",0,0,"9\t",,terminal_output +16149,13649444,"TERMINAL",0,0,"944",,terminal_output +16150,13650485,"TERMINAL",0,0,"40\t",,terminal_output +16151,13650485,"TERMINAL",0,0,"4055",,terminal_output +16152,13651470,"TERMINAL",0,0,"1\t",,terminal_output +16153,13651510,"TERMINAL",0,0,"166",,terminal_output +16154,13652511,"TERMINAL",0,0,"2\t",,terminal_output +16155,13652546,"TERMINAL",0,0,"277",,terminal_output +16156,13653553,"TERMINAL",0,0,"3\t",,terminal_output +16157,13653597,"TERMINAL",0,0,"388",,terminal_output +16158,13654601,"TERMINAL",0,0,"4\t",,terminal_output +16159,13654645,"TERMINAL",0,0,"499",,terminal_output +16160,13655627,"TERMINAL",0,0,"5\t",,terminal_output +16161,13655672,"TERMINAL",0,0,"55050",,terminal_output +16162,13656661,"TERMINAL",0,0,"6\t",,terminal_output +16163,13656752,"TERMINAL",0,0,"611",,terminal_output +16164,13657692,"TERMINAL",0,0,"7\t",,terminal_output +16165,13657745,"TERMINAL",0,0,"722",,terminal_output +16166,13658827,"TERMINAL",0,0,"8\t",,terminal_output +16167,13658827,"TERMINAL",0,0,"833",,terminal_output +16168,13659858,"TERMINAL",0,0,"9\t",,terminal_output +16169,13659858,"TERMINAL",0,0,"944",,terminal_output +16170,13660886,"TERMINAL",0,0,"50\t",,terminal_output +16171,13660886,"TERMINAL",0,0,"5055",,terminal_output +16172,13661841,"TERMINAL",0,0,"1\t",,terminal_output +16173,13661937,"TERMINAL",0,0,"166",,terminal_output +16174,13662882,"TERMINAL",0,0,"2\t",,terminal_output +16175,13663001,"TERMINAL",0,0,"277",,terminal_output +16176,13663951,"TERMINAL",0,0,"3\t",,terminal_output +16177,13663992,"TERMINAL",0,0,"388",,terminal_output +16178,13665074,"TERMINAL",0,0,"4\t",,terminal_output +16179,13665075,"TERMINAL",0,0,"499",,terminal_output +16180,13666068,"TERMINAL",0,0,"5\t",,terminal_output +16181,13666081,"TERMINAL",0,0,"58:008:00",,terminal_output +16182,13667054,"TERMINAL",0,0,"6\t",,terminal_output +16183,13667122,"TERMINAL",0,0,"611",,terminal_output +16184,13668080,"TERMINAL",0,0,"7\t",,terminal_output +16185,13668170,"TERMINAL",0,0,"722",,terminal_output +16186,13669121,"TERMINAL",0,0,"8\t",,terminal_output +16187,13669210,"TERMINAL",0,0,"833",,terminal_output +16188,13670168,"TERMINAL",0,0,"9\t",,terminal_output +16189,13670287,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2162,0,"",shellscript,selection_mouse +16190,13670325,"TERMINAL",0,0,"944",,terminal_output +16191,13670959,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2147,0,"",shellscript,selection_mouse +16192,13670966,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",2146,0,"",shellscript,selection_command +16193,13671229,"TERMINAL",0,0,"4:00\t",,terminal_output +16194,13671311,"TERMINAL",0,0,"4:0066",,terminal_output +16195,13672348,"TERMINAL",0,0,"1\t",,terminal_output +16196,13672348,"TERMINAL",0,0,"277",,terminal_output +16197,13673452,"TERMINAL",0,0,"2\t",,terminal_output +16198,13673453,"TERMINAL",0,0,"388",,terminal_output +16199,13674416,"TERMINAL",0,0,"4\t",,terminal_output +16200,13674454,"TERMINAL",0,0,"499",,terminal_output +16201,13675349,"TERMINAL",0,0,"5\t",,terminal_output +16202,13675485,"TERMINAL",0,0,"51010",,terminal_output +16203,13676443,"TERMINAL",0,0,"6\t",,terminal_output +16204,13676500,"TERMINAL",0,0,"611",,terminal_output +16205,13677467,"TERMINAL",0,0,"7\t",,terminal_output +16206,13677538,"TERMINAL",0,0,"722",,terminal_output +16207,13678496,"TERMINAL",0,0,"8\t",,terminal_output +16208,13678592,"TERMINAL",0,0,"833",,terminal_output +16209,13679522,"TERMINAL",0,0,"9\t",,terminal_output +16210,13679624,"TERMINAL",0,0,"944",,terminal_output +16211,13680552,"TERMINAL",0,0,"10\t",,terminal_output +16212,13680696,"TERMINAL",0,0,"1055",,terminal_output +16213,13681583,"TERMINAL",0,0,"1\t",,terminal_output +16214,13681706,"TERMINAL",0,0,"166",,terminal_output +16215,13682618,"TERMINAL",0,0,"2\t",,terminal_output +16216,13682754,"TERMINAL",0,0,"277",,terminal_output +16217,13683660,"TERMINAL",0,0,"3\t",,terminal_output +16218,13683806,"TERMINAL",0,0,"388",,terminal_output +16219,13684700,"TERMINAL",0,0,"4\t",,terminal_output +16220,13684848,"TERMINAL",0,0,"499",,terminal_output +16221,13685862,"TERMINAL",0,0,"5\t",,terminal_output +16222,13685876,"TERMINAL",0,0,"52020",,terminal_output +16223,13686802,"TERMINAL",0,0,"6\t",,terminal_output +16224,13686942,"TERMINAL",0,0,"611",,terminal_output +16225,13687818,"TERMINAL",0,0,"7\t",,terminal_output +16226,13687960,"TERMINAL",0,0,"722",,terminal_output +16227,13688319,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,0,"",shellscript,selection_mouse +16228,13688535,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,2,"_f",shellscript,selection_mouse +16229,13688605,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,3,"_fu",shellscript,selection_mouse +16230,13688606,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,4,"_ful",shellscript,selection_mouse +16231,13688606,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,5,"_full",shellscript,selection_mouse +16232,13688655,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,6,"_full_",shellscript,selection_mouse +16233,13688655,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,7,"_full_p",shellscript,selection_mouse +16234,13688693,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,8,"_full_pr",shellscript,selection_mouse +16235,13688783,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,9,"_full_pre",shellscript,selection_mouse +16236,13688816,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,11,"_full_prec.",shellscript,selection_mouse +16237,13688843,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,12,"_full_prec.p",shellscript,selection_mouse +16238,13688888,"TERMINAL",0,0,"8\t",,terminal_output +16239,13689030,"TERMINAL",0,0,"833",,terminal_output +16240,13689102,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,11,"_full_prec.",shellscript,selection_mouse +16241,13689193,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,10,"_full_prec",shellscript,selection_mouse +16242,13689960,"TERMINAL",0,0,"9\t",,terminal_output +16243,13690041,"TERMINAL",0,0,"944",,terminal_output +16244,13690253,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1589,10,"",shellscript,content +16245,13690992,"TERMINAL",0,0,"20\t",,terminal_output +16246,13691088,"TERMINAL",0,0,"2055",,terminal_output +16247,13691979,"TERMINAL",0,0,"1\t",,terminal_output +16248,13692131,"TERMINAL",0,0,"166",,terminal_output +16249,13693059,"TERMINAL",0,0,"2\t",,terminal_output +16250,13693170,"TERMINAL",0,0,"277",,terminal_output +16251,13694170,"TERMINAL",0,0,"3\t",,terminal_output +16252,13694212,"TERMINAL",0,0,"388",,terminal_output +16253,13695187,"TERMINAL",0,0,"4\t",,terminal_output +16254,13695305,"TERMINAL",0,0,"499",,terminal_output +16255,13696136,"TERMINAL",0,0,"5\t",,terminal_output +16256,13696313,"TERMINAL",0,0,"53131",,terminal_output +16257,13697172,"TERMINAL",0,0,"6\t",,terminal_output +16258,13697354,"TERMINAL",0,0,"722",,terminal_output +16259,13698231,"TERMINAL",0,0,"7\t",,terminal_output +16260,13698400,"TERMINAL",0,0,"833",,terminal_output +16261,13699282,"TERMINAL",0,0,"8\t",,terminal_output +16262,13699436,"TERMINAL",0,0,"944",,terminal_output +16263,13700300,"TERMINAL",0,0,"9\t",,terminal_output +16264,13700492,"TERMINAL",0,0,"3055",,terminal_output +16265,13701327,"TERMINAL",0,0,"31\t",,terminal_output +16266,13701522,"TERMINAL",0,0,"166",,terminal_output +16267,13702460,"TERMINAL",0,0,"2\t",,terminal_output +16268,13702596,"TERMINAL",0,0,"277",,terminal_output +16269,13703485,"TERMINAL",0,0,"3\t",,terminal_output +16270,13703621,"TERMINAL",0,0,"388",,terminal_output +16271,13704449,"TERMINAL",0,0,"4\t",,terminal_output +16272,13704650,"TERMINAL",0,0,"499",,terminal_output +16273,13705483,"TERMINAL",0,0,"5\t",,terminal_output +16274,13705693,"TERMINAL",0,0,"54040",,terminal_output +16275,13706515,"TERMINAL",0,0,"6\t",,terminal_output +16276,13706737,"TERMINAL",0,0,"611",,terminal_output +16277,13707557,"TERMINAL",0,0,"7\t",,terminal_output +16278,13707776,"TERMINAL",0,0,"722",,terminal_output +16279,13708594,"TERMINAL",0,0,"8\t",,terminal_output +16280,13708822,"TERMINAL",0,0,"833",,terminal_output +16281,13711264,"TERMINAL",0,0,"955",,terminal_output +16282,13711265,"TERMINAL",0,0,"90",,terminal_output +16283,13712188,"TERMINAL",0,0,"4166",,terminal_output +16284,13712190,"TERMINAL",0,0,"4119",,terminal_output +16285,13713126,"TERMINAL",0,0,"277",,terminal_output +16286,13713126,"TERMINAL",0,0,"2\t",,terminal_output +16287,13714178,"TERMINAL",0,0,"3\t",,terminal_output +16288,13714195,"TERMINAL",0,0,"388",,terminal_output +16289,13715255,"TERMINAL",0,0,"4\t",,terminal_output +16290,13715269,"TERMINAL",0,0,"499",,terminal_output +16291,13716248,"TERMINAL",0,0,"5\t",,terminal_output +16292,13716303,"TERMINAL",0,0,"55151",,terminal_output +16293,13717414,"TERMINAL",0,0,"6\t",,terminal_output +16294,13717414,"TERMINAL",0,0,"722",,terminal_output +16295,13718354,"TERMINAL",0,0,"8\t",,terminal_output +16296,13718354,"TERMINAL",0,0,"833",,terminal_output +16297,13719417,"TERMINAL",0,0,"9\t",,terminal_output +16298,13719419,"TERMINAL",0,0,"944",,terminal_output +16299,13720411,"TERMINAL",0,0,"50\t",,terminal_output +16300,13720439,"TERMINAL",0,0,"5055",,terminal_output +16301,13721467,"TERMINAL",0,0,"1\t",,terminal_output +16302,13721477,"TERMINAL",0,0,"166",,terminal_output +16303,13722510,"TERMINAL",0,0,"2\t",,terminal_output +16304,13722528,"TERMINAL",0,0,"277",,terminal_output +16305,13723521,"TERMINAL",0,0,"3\t",,terminal_output +16306,13723568,"TERMINAL",0,0,"388",,terminal_output +16307,13724562,"TERMINAL",0,0,"4\t",,terminal_output +16308,13724608,"TERMINAL",0,0,"499",,terminal_output +16309,13725624,"TERMINAL",0,0,"5\t",,terminal_output +16310,13725670,"TERMINAL",0,0,"59:009:00",,terminal_output +16311,13726639,"TERMINAL",0,0,"6\t",,terminal_output +16312,13726714,"TERMINAL",0,0,"611",,terminal_output +16313,13727678,"TERMINAL",0,0,"7\t",,terminal_output +16314,13727737,"TERMINAL",0,0,"722",,terminal_output +16315,13728767,"TERMINAL",0,0,"8\t",,terminal_output +16316,13728779,"TERMINAL",0,0,"833",,terminal_output +16317,13729797,"TERMINAL",0,0,"9\t",,terminal_output +16318,13729825,"TERMINAL",0,0,"944",,terminal_output +16319,13730911,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1130,0,"",shellscript,selection_mouse +16320,13730936,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_wsd_3e-6.sbatch",1129,0,"",shellscript,selection_command +16321,13730954,"TERMINAL",0,0,"5:00\t",,terminal_output +16322,13730955,"TERMINAL",0,0,"5:0055",,terminal_output +16323,13731846,"TERMINAL",0,0,"1\t",,terminal_output +16324,13731937,"TERMINAL",0,0,"166",,terminal_output +16325,13732887,"TERMINAL",0,0,"2\t",,terminal_output +16326,13732950,"TERMINAL",0,0,"277",,terminal_output +16327,13733992,"TERMINAL",0,0,"3\t",,terminal_output +16328,13734001,"TERMINAL",0,0,"388",,terminal_output +16329,13734956,"TERMINAL",0,0,"4\t",,terminal_output +16330,13735055,"TERMINAL",0,0,"499",,terminal_output +16331,13736004,"TERMINAL",0,0,"5\t",,terminal_output +16332,13736110,"TERMINAL",0,0,"51010",,terminal_output +16333,13737418,"TERMINAL",0,0,"6\t",,terminal_output +16334,13737436,"TERMINAL",0,0,"611",,terminal_output +16335,13738076,"TERMINAL",0,0,"7\t",,terminal_output +16336,13738170,"TERMINAL",0,0,"722",,terminal_output +16337,13739220,"TERMINAL",0,0,"8\t",,terminal_output +16338,13739221,"TERMINAL",0,0,"833",,terminal_output +16339,13740152,"TERMINAL",0,0,"9\t",,terminal_output +16340,13740250,"TERMINAL",0,0,"944",,terminal_output +16341,13741260,"TERMINAL",0,0,"10\t",,terminal_output +16342,13741296,"TERMINAL",0,0,"1066",,terminal_output +16343,13742227,"TERMINAL",0,0,"1\t",,terminal_output +16344,13742377,"TERMINAL",0,0,"277",,terminal_output +16345,13743177,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=1-00:00:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --image_height=64 \\n --image_width=64 \\n --init_lr=0 \\n --max_lr=3e-6 \\n --decay_end=0 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --ffn_dim=512 \\n --num_blocks=8 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +16346,13743285,"TERMINAL",0,0,"2\t",,terminal_output +16347,13743386,"TERMINAL",0,0,"388",,terminal_output +16348,13744387,"TERMINAL",0,0,"3\t",,terminal_output +16349,13744520,"TERMINAL",0,0,"499",,terminal_output +16350,13745370,"TERMINAL",0,0,"5\t",,terminal_output +16351,13745514,"TERMINAL",0,0,"52020",,terminal_output +16352,13746464,"TERMINAL",0,0,"6\t",,terminal_output +16353,13746511,"TERMINAL",0,0,"611",,terminal_output +16354,13747190,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",433,0,"",shellscript,selection_mouse +16355,13747256,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",432,0,"",shellscript,selection_command +16356,13747442,"TERMINAL",0,0,"7\t",,terminal_output +16357,13747597,"TERMINAL",0,0,"722",,terminal_output +16358,13748532,"TERMINAL",0,0,"8\t",,terminal_output +16359,13748586,"TERMINAL",0,0,"833",,terminal_output +16360,13749627,"TERMINAL",0,0,"9\t",,terminal_output +16361,13749634,"TERMINAL",0,0,"944",,terminal_output +16362,13750536,"TERMINAL",0,0,"20\t",,terminal_output +16363,13750670,"TERMINAL",0,0,"2055",,terminal_output +16364,13751621,"TERMINAL",0,0,"1\t",,terminal_output +16365,13751769,"TERMINAL",0,0,"166",,terminal_output +16366,13751979,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +16367,13752637,"TERMINAL",0,0,"2\t",,terminal_output +16368,13752914,"TERMINAL",0,0,"277",,terminal_output +16369,13754153,"TERMINAL",0,0,"3\t",,terminal_output +16370,13754154,"TERMINAL",0,0,"388",,terminal_output +16371,13754970,"TERMINAL",0,0,"4\t",,terminal_output +16372,13754970,"TERMINAL",0,0,"499",,terminal_output +16373,13755588,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_37M\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\n# slurm_job_id=$SLURM_JOB_ID\nslurm_job_id=3454953\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $slurm_job_id \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --darkness_threshold=50 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=lam-minecraft-8-node-darkness-filter-37M-$slurm_job_id \\n --tags lam minecraft 8-node darkness-filter 37M \\n --entity instant-uv \\n --project jafar \\n --num_latents=100 \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +16374,13756066,"TERMINAL",0,0,"5\t",,terminal_output +16375,13756066,"TERMINAL",0,0,"53030",,terminal_output +16376,13756848,"TERMINAL",0,0,"6\t",,terminal_output +16377,13756963,"TERMINAL",0,0,"611",,terminal_output +16378,13757736,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",436,0,"",shellscript,selection_mouse +16379,13757824,"TERMINAL",0,0,"7\t",,terminal_output +16380,13757871,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",436,1,"3",shellscript,selection_mouse +16381,13757872,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",436,2,"37",shellscript,selection_mouse +16382,13757913,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",436,3,"37M",shellscript,selection_mouse +16383,13758033,"TERMINAL",0,0,"722",,terminal_output +16384,13758850,"TERMINAL",0,0,"8\t",,terminal_output +16385,13759050,"TERMINAL",0,0,"833",,terminal_output +16386,13760000,"TERMINAL",0,0,"9\t",,terminal_output +16387,13760066,"TERMINAL",0,0,"944",,terminal_output +16388,13760466,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",438,0,"",shellscript,selection_mouse +16389,13760874,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",439,0,"",shellscript,selection_mouse +16390,13760876,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",438,0,"",shellscript,selection_command +16391,13760964,"TERMINAL",0,0,"30\t",,terminal_output +16392,13761117,"TERMINAL",0,0,"3055",,terminal_output +16393,13761403,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",439,0,"",shellscript,selection_mouse +16394,13761418,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",438,0,"",shellscript,selection_command +16395,13761532,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",438,1,"M",shellscript,selection_mouse +16396,13761537,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",439,0,"",shellscript,selection_command +16397,13761684,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",438,1,"M",shellscript,selection_mouse +16398,13761685,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",437,2,"7M",shellscript,selection_mouse +16399,13761826,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch",436,3,"37M",shellscript,selection_mouse +16400,13761969,"TERMINAL",0,0,"1\t",,terminal_output +16401,13762151,"TERMINAL",0,0,"166",,terminal_output +16402,13763100,"TERMINAL",0,0,"2\t",,terminal_output +16403,13763187,"TERMINAL",0,0,"277",,terminal_output +16404,13764045,"TERMINAL",0,0,"3\t",,terminal_output +16405,13764233,"TERMINAL",0,0,"388",,terminal_output +16406,13765670,"TERMINAL",0,0,"4\t",,terminal_output +16407,13765697,"TERMINAL",0,0,"44040",,terminal_output +16408,13766127,"TERMINAL",0,0,"5\t",,terminal_output +16409,13766368,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +16410,13766534,"TERMINAL",0,0,"611",,terminal_output +16411,13767162,"TERMINAL",0,0,"6\t",,terminal_output +16412,13767388,"TERMINAL",0,0,"722",,terminal_output +16413,13767770,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +16414,13768220,"TERMINAL",0,0,"7\t",,terminal_output +16415,13768488,"TERMINAL",0,0,"833",,terminal_output +16416,13768705,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",433,0,"",shellscript,selection_command +16417,13769270,"TERMINAL",0,0,"8\t",,terminal_output +16418,13769464,"TERMINAL",0,0,"944",,terminal_output +16419,13769549,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",433,0,"_",shellscript,content +16420,13769550,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",434,0,"",shellscript,selection_keyboard +16421,13769771,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",434,0,"37M",shellscript,content +16422,13770308,"TERMINAL",0,0,"9\t",,terminal_output +16423,13770518,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",436,0,"",shellscript,selection_command +16424,13770527,"TERMINAL",0,0,"4055",,terminal_output +16425,13771355,"TERMINAL",0,0,"41\t",,terminal_output +16426,13771607,"TERMINAL",0,0,"166",,terminal_output +16427,13772389,"TERMINAL",0,0,"2\t",,terminal_output +16428,13772624,"TERMINAL",0,0,"277",,terminal_output +16429,13773516,"TERMINAL",0,0,"3\t",,terminal_output +16430,13773652,"TERMINAL",0,0,"388",,terminal_output +16431,13774438,"TERMINAL",0,0,"4\t",,terminal_output +16432,13774670,"TERMINAL",0,0,"499",,terminal_output +16433,13775472,"TERMINAL",0,0,"5\t",,terminal_output +16434,13775780,"TERMINAL",0,0,"55050",,terminal_output +16435,13776514,"TERMINAL",0,0,"6\t",,terminal_output +16436,13776762,"TERMINAL",0,0,"611",,terminal_output +16437,13777555,"TERMINAL",0,0,"7\t",,terminal_output +16438,13777826,"TERMINAL",0,0,"722",,terminal_output +16439,13778604,"TERMINAL",0,0,"8\t",,terminal_output +16440,13778950,"TERMINAL",0,0,"833",,terminal_output +16441,13779602,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1950,0,"",shellscript,selection_mouse +16442,13779632,"TERMINAL",0,0,"9\t",,terminal_output +16443,13779903,"TERMINAL",0,0,"944",,terminal_output +16444,13780304,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1945,0,"",shellscript,selection_mouse +16445,13780689,"TERMINAL",0,0,"50\t",,terminal_output +16446,13780972,"TERMINAL",0,0,"5055",,terminal_output +16447,13781127,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1947,0,"",shellscript,selection_mouse +16448,13781811,"TERMINAL",0,0,"1\t",,terminal_output +16449,13782004,"TERMINAL",0,0,"166",,terminal_output +16450,13782477,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1914,41,"",shellscript,content +16451,13782502,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1918,0,"",shellscript,selection_command +16452,13782821,"TERMINAL",0,0,"2\t",,terminal_output +16453,13783065,"TERMINAL",0,0,"277",,terminal_output +16454,13783486,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1878,0,"",shellscript,selection_mouse +16455,13783865,"TERMINAL",0,0,"3\t",,terminal_output +16456,13784000,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1927,0,"",shellscript,selection_mouse +16457,13784086,"TERMINAL",0,0,"388",,terminal_output +16458,13784625,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1913,0,"",shellscript,selection_mouse +16459,13784637,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1912,0,"",shellscript,selection_command +16460,13784829,"TERMINAL",0,0,"4\t",,terminal_output +16461,13785136,"TERMINAL",0,0,"499",,terminal_output +16462,13785409,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1770,0,"",shellscript,selection_mouse +16463,13785872,"TERMINAL",0,0,"5\t",,terminal_output +16464,13785971,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1809,0,"",shellscript,selection_mouse +16465,13786173,"TERMINAL",0,0,"520:00:0020:00:00",,terminal_output +16466,13786512,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1792,0,"",shellscript,selection_mouse +16467,13786512,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1791,0,"",shellscript,selection_command +16468,13786918,"TERMINAL",0,0,"6\t",,terminal_output +16469,13787002,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1775,18,"",shellscript,content +16470,13787023,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1779,0,"",shellscript,selection_command +16471,13787127,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1799,0,"",shellscript,selection_command +16472,13787227,"TERMINAL",0,0,"611",,terminal_output +16473,13787595,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1795,20,"",shellscript,content +16474,13787619,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1799,0,"",shellscript,selection_command +16475,13787988,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1779,0,"",shellscript,selection_command +16476,13787988,"TERMINAL",0,0,"7\t",,terminal_output +16477,13788189,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1756,0,"",shellscript,selection_command +16478,13788267,"TERMINAL",0,0,"722",,terminal_output +16479,13788370,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1732,0,"",shellscript,selection_command +16480,13788672,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1710,0,"",shellscript,selection_command +16481,13788869,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1677,0,"",shellscript,selection_command +16482,13789001,"TERMINAL",0,0,"8\t",,terminal_output +16483,13789185,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1710,0,"",shellscript,selection_command +16484,13789330,"TERMINAL",0,0,"844",,terminal_output +16485,13789703,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1711,0,"",shellscript,selection_command +16486,13790067,"TERMINAL",0,0,"9\t",,terminal_output +16487,13790194,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1712,0,"",shellscript,selection_command +16488,13790249,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1713,0,"",shellscript,selection_command +16489,13790289,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1714,0,"",shellscript,selection_command +16490,13790289,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1715,0,"",shellscript,selection_command +16491,13790320,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1716,0,"",shellscript,selection_command +16492,13790376,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1717,0,"",shellscript,selection_command +16493,13790402,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1718,0,"",shellscript,selection_command +16494,13790403,"TERMINAL",0,0,"6:0055",,terminal_output +16495,13790425,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1719,0,"",shellscript,selection_command +16496,13790454,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1720,0,"",shellscript,selection_command +16497,13790479,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1721,0,"",shellscript,selection_command +16498,13790503,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1722,0,"",shellscript,selection_command +16499,13790630,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1723,0,"",shellscript,selection_command +16500,13791104,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1723,3,"",shellscript,content +16501,13791105,"TERMINAL",0,0,"6:00\t",,terminal_output +16502,13791395,"TERMINAL",0,0,"166",,terminal_output +16503,13792131,"TERMINAL",0,0,"1\t",,terminal_output +16504,13792486,"TERMINAL",0,0,"277",,terminal_output +16505,13793006,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1723,0,"2",shellscript,content +16506,13793007,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1724,0,"",shellscript,selection_keyboard +16507,13793135,"TERMINAL",0,0,"2\t",,terminal_output +16508,13793206,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1724,0,"4",shellscript,content +16509,13793206,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1725,0,"",shellscript,selection_keyboard +16510,13793330,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1725,0," ",shellscript,content +16511,13793331,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1726,0,"",shellscript,selection_keyboard +16512,13793484,"TERMINAL",0,0,"388",,terminal_output +16513,13794115,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1725,0,"",shellscript,selection_command +16514,13794201,"TERMINAL",0,0,"3\t",,terminal_output +16515,13794569,"TERMINAL",0,0,"499",,terminal_output +16516,13795226,"TERMINAL",0,0,"4\t",,terminal_output +16517,13795604,"TERMINAL",0,0,"51010",,terminal_output +16518,13796258,"TERMINAL",0,0,"5\t",,terminal_output +16519,13796617,"TERMINAL",0,0,"611",,terminal_output +16520,13797392,"TERMINAL",0,0,"6\t",,terminal_output +16521,13797664,"TERMINAL",0,0,"722",,terminal_output +16522,13798443,"TERMINAL",0,0,"8\t",,terminal_output +16523,13798810,"TERMINAL",0,0,"833",,terminal_output +16524,13799388,"TERMINAL",0,0,"9\t",,terminal_output +16525,13799749,"TERMINAL",0,0,"944",,terminal_output +16526,13800435,"TERMINAL",0,0,"10\t",,terminal_output +16527,13800798,"TERMINAL",0,0,"1055",,terminal_output +16528,13801478,"TERMINAL",0,0,"1\t",,terminal_output +16529,13801836,"TERMINAL",0,0,"166",,terminal_output +16530,13802534,"TERMINAL",0,0,"2\t",,terminal_output +16531,13802888,"TERMINAL",0,0,"277",,terminal_output +16532,13803541,"TERMINAL",0,0,"3\t",,terminal_output +16533,13803923,"TERMINAL",0,0,"388",,terminal_output +16534,13804579,"TERMINAL",0,0,"4\t",,terminal_output +16535,13805047,"TERMINAL",0,0,"499",,terminal_output +16536,13805619,"TERMINAL",0,0,"5\t",,terminal_output +16537,13806069,"TERMINAL",0,0,"52020",,terminal_output +16538,13806668,"TERMINAL",0,0,"6\t",,terminal_output +16539,13807108,"TERMINAL",0,0,"611",,terminal_output +16540,13807702,"TERMINAL",0,0,"7\t",,terminal_output +16541,13808129,"TERMINAL",0,0,"722",,terminal_output +16542,13808742,"TERMINAL",0,0,"8\t",,terminal_output +16543,13809256,"TERMINAL",0,0,"833",,terminal_output +16544,13809779,"TERMINAL",0,0,"9\t",,terminal_output +16545,13810278,"TERMINAL",0,0,"944",,terminal_output +16546,13810899,"TERMINAL",0,0,"20\t",,terminal_output +16547,13811216,"TERMINAL",0,0,"2055",,terminal_output +16548,13811857,"TERMINAL",0,0,"1\t",,terminal_output +16549,13812326,"TERMINAL",0,0,"166",,terminal_output +16550,13812882,"TERMINAL",0,0,"2\t",,terminal_output +16551,13813291,"TERMINAL",0,0,"288",,terminal_output +16552,13813970,"TERMINAL",0,0,"3\t",,terminal_output +16553,13814375,"TERMINAL",0,0,"499",,terminal_output +16554,13814989,"TERMINAL",0,0,"4\t",,terminal_output +16555,13815367,"TERMINAL",0,0,"53030",,terminal_output +16556,13815990,"TERMINAL",0,0,"5\t",,terminal_output +16557,13816436,"TERMINAL",0,0,"611",,terminal_output +16558,13817099,"TERMINAL",0,0,"6\t",,terminal_output +16559,13817486,"TERMINAL",0,0,"722",,terminal_output +16560,13818066,"TERMINAL",0,0,"7\t",,terminal_output +16561,13818470,"TERMINAL",0,0,"833",,terminal_output +16562,13819121,"TERMINAL",0,0,"8\t",,terminal_output +16563,13819518,"TERMINAL",0,0,"944",,terminal_output +16564,13820214,"TERMINAL",0,0,"9\t",,terminal_output +16565,13820538,"TERMINAL",0,0,"3055",,terminal_output +16566,13821245,"TERMINAL",0,0,"30\t",,terminal_output +16567,13821583,"TERMINAL",0,0,"166",,terminal_output +16568,13822235,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",83,0,"",shellscript,selection_mouse +16569,13822318,"TERMINAL",0,0,"1\t",,terminal_output +16570,13822616,"TERMINAL",0,0,"277",,terminal_output +16571,13823237,"TERMINAL",0,0,"2\t",,terminal_output +16572,13823704,"TERMINAL",0,0,"388",,terminal_output +16573,13823778,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",83,0,"2",shellscript,content +16574,13823779,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",84,0,"",shellscript,selection_keyboard +16575,13824278,"TERMINAL",0,0,"3\t",,terminal_output +16576,13824311,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",83,0,"",shellscript,selection_command +16577,13824700,"TERMINAL",0,0,"499",,terminal_output +16578,13825305,"TERMINAL",0,0,"5\t",,terminal_output +16579,13825603,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",82,0,"",shellscript,selection_command +16580,13825743,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",82,1,"",shellscript,content +16581,13825744,"TERMINAL",0,0,"54040",,terminal_output +16582,13826344,"TERMINAL",0,0,"6\t",,terminal_output +16583,13826765,"TERMINAL",0,0,"611",,terminal_output +16584,13827498,"TERMINAL",0,0,"7\t",,terminal_output +16585,13827840,"TERMINAL",0,0,"722",,terminal_output +16586,13828529,"TERMINAL",0,0,"820",,terminal_output +16587,13828970,"TERMINAL",0,0,"833",,terminal_output +16588,13829499,"TERMINAL",0,0,"9\t",,terminal_output +16589,13829886,"TERMINAL",0,0,"944",,terminal_output +16590,13830489,"TERMINAL",0,0,"40\t",,terminal_output +16591,13830957,"TERMINAL",0,0,"4055",,terminal_output +16592,13831535,"TERMINAL",0,0,"1\t",,terminal_output +16593,13831667,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1909,0,"",shellscript,selection_mouse +16594,13832635,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1695,0,"",shellscript,selection_mouse +16595,13832775,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1689,14,"CHECKPOINT_DIR",shellscript,selection_mouse +16596,13833380,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1665,0,"",shellscript,selection_mouse +16597,13833537,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1658,12,"SLURM_JOB_ID",shellscript,selection_mouse +16598,13833949,"TERMINAL",0,0,"188",,terminal_output +16599,13833987,"TERMINAL",0,0,"2\t",,terminal_output +16600,13835219,"TERMINAL",0,0,"499",,terminal_output +16601,13835219,"TERMINAL",0,0,"4\t",,terminal_output +16602,13836186,"TERMINAL",0,0,"55050",,terminal_output +16603,13836186,"TERMINAL",0,0,"5\t",,terminal_output +16604,13837337,"TERMINAL",0,0,"611",,terminal_output +16605,13837338,"TERMINAL",0,0,"6\t",,terminal_output +16606,13838339,"TERMINAL",0,0,"722",,terminal_output +16607,13838341,"TERMINAL",0,0,"7\t",,terminal_output +16608,13839360,"TERMINAL",0,0,"844",,terminal_output +16609,13839361,"TERMINAL",0,0,"9\t",,terminal_output +16610,13840336,"TERMINAL",0,0,"5055",,terminal_output +16611,13840348,"TERMINAL",0,0,"50\t",,terminal_output +16612,13841381,"TERMINAL",0,0,"1\t",,terminal_output +16613,13841382,"TERMINAL",0,0,"166",,terminal_output +16614,13842536,"TERMINAL",0,0,"2\t",,terminal_output +16615,13842537,"TERMINAL",0,0,"277",,terminal_output +16616,13843570,"TERMINAL",0,0,"3\t",,terminal_output +16617,13843570,"TERMINAL",0,0,"388",,terminal_output +16618,13844502,"TERMINAL",0,0,"4\t",,terminal_output +16619,13844549,"TERMINAL",0,0,"499",,terminal_output +16620,13845549,"TERMINAL",0,0,"5\t",,terminal_output +16621,13845551,"TERMINAL",0,0,"51:001:00",,terminal_output +16622,13846586,"TERMINAL",0,0,"6\t",,terminal_output +16623,13846601,"TERMINAL",0,0,"611",,terminal_output +16624,13847627,"TERMINAL",0,0,"7\t",,terminal_output +16625,13847654,"TERMINAL",0,0,"722",,terminal_output +16626,13848781,"TERMINAL",0,0,"8\t",,terminal_output +16627,13848786,"TERMINAL",0,0,"833",,terminal_output +16628,13849728,"TERMINAL",0,0,"9\t",,terminal_output +16629,13849757,"TERMINAL",0,0,"944",,terminal_output +16630,13850835,"TERMINAL",0,0,"7:00\t",,terminal_output +16631,13850835,"TERMINAL",0,0,"7:0055",,terminal_output +16632,13851037,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +16633,13851800,"TERMINAL",0,0,"1\t",,terminal_output +16634,13851825,"TERMINAL",0,0,"166",,terminal_output +16635,13852854,"TERMINAL",0,0,"2\t",,terminal_output +16636,13852864,"TERMINAL",0,0,"277",,terminal_output +16637,13853869,"TERMINAL",0,0,"3\t",,terminal_output +16638,13853932,"TERMINAL",0,0,"388",,terminal_output +16639,13854952,"TERMINAL",0,0,"4\t",,terminal_output +16640,13854985,"TERMINAL",0,0,"499",,terminal_output +16641,13855189,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",782,0,"",shellscript,selection_mouse +16642,13855935,"TERMINAL",0,0,"5\t",,terminal_output +16643,13855965,"TERMINAL",0,0,"51010",,terminal_output +16644,13856974,"TERMINAL",0,0,"6\t",,terminal_output +16645,13857010,"TERMINAL",0,0,"611",,terminal_output +16646,13857240,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",784,0,"",shellscript,selection_mouse +16647,13858115,"TERMINAL",0,0,"7\t",,terminal_output +16648,13858116,"TERMINAL",0,0,"722",,terminal_output +16649,13859053,"TERMINAL",0,0,"8\t",,terminal_output +16650,13859094,"TERMINAL",0,0,"833",,terminal_output +16651,13860160,"TERMINAL",0,0,"9\t",,terminal_output +16652,13860160,"TERMINAL",0,0,"944",,terminal_output +16653,13860440,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +16654,13861135,"TERMINAL",0,0,"10\t",,terminal_output +16655,13861173,"TERMINAL",0,0,"1055",,terminal_output +16656,13861768,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1553,0,"",shellscript,selection_mouse +16657,13862186,"TERMINAL",0,0,"1\t",,terminal_output +16658,13862258,"TERMINAL",0,0,"166",,terminal_output +16659,13863206,"TERMINAL",0,0,"2\t",,terminal_output +16660,13863263,"TERMINAL",0,0,"277",,terminal_output +16661,13863820,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1455,0,"",shellscript,selection_mouse +16662,13864276,"TERMINAL",0,0,"3\t",,terminal_output +16663,13864371,"TERMINAL",0,0,"399",,terminal_output +16664,13864618,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1552,0,"\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val",shellscript,content +16665,13864654,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1553,0,"",shellscript,selection_command +16666,13865310,"TERMINAL",0,0,"4\t",,terminal_output +16667,13865337,"TERMINAL",0,0,"52020",,terminal_output +16668,13866023,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +16669,13866387,"TERMINAL",0,0,"6\t",,terminal_output +16670,13866431,"TERMINAL",0,0,"611",,terminal_output +16671,13867428,"TERMINAL",0,0,"7\t",,terminal_output +16672,13867469,"TERMINAL",0,0,"722",,terminal_output +16673,13868418,"TERMINAL",0,0,"8\t",,terminal_output +16674,13868471,"TERMINAL",0,0,"833",,terminal_output +16675,13868525,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",1391,0,"",shellscript,selection_mouse +16676,13869450,"TERMINAL",0,0,"9\t",,terminal_output +16677,13869520,"TERMINAL",0,0,"944",,terminal_output +16678,13870528,"TERMINAL",0,0,"20\t",,terminal_output +16679,13870534,"TERMINAL",0,0,"2055",,terminal_output +16680,13871532,"TERMINAL",0,0,"1\t",,terminal_output +16681,13871609,"TERMINAL",0,0,"166",,terminal_output +16682,13872041,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +16683,13872561,"TERMINAL",0,0,"2\t",,terminal_output +16684,13872650,"TERMINAL",0,0,"277",,terminal_output +16685,13873905,"TERMINAL",0,0,"3\t",,terminal_output +16686,13873953,"TERMINAL",0,0,"388",,terminal_output +16687,13874658,"TERMINAL",0,0,"4\t",,terminal_output +16688,13874733,"TERMINAL",0,0,"499",,terminal_output +16689,13875665,"TERMINAL",0,0,"5\t",,terminal_output +16690,13875736,"TERMINAL",0,0,"53030",,terminal_output +16691,13876712,"TERMINAL",0,0,"6\t",,terminal_output +16692,13876767,"TERMINAL",0,0,"611",,terminal_output +16693,13877736,"TERMINAL",0,0,"7\t",,terminal_output +16694,13877804,"TERMINAL",0,0,"722",,terminal_output +16695,13878899,"TERMINAL",0,0,"8\t",,terminal_output +16696,13878906,"TERMINAL",0,0,"833",,terminal_output +16697,13879276,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=100 \\n --log \\n --name=coinrun-tokenizer-dev-$slurm_job_id \\n --tags tokenizer coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \\n --data_dir $array_records_dir_train\n",shellscript,tab +16698,13879839,"TERMINAL",0,0,"9\t",,terminal_output +16699,13879903,"TERMINAL",0,0,"944",,terminal_output +16700,13880844,"TERMINAL",0,0,"30\t",,terminal_output +16701,13880935,"TERMINAL",0,0,"3055",,terminal_output +16702,13881219,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",1613,0,"",shellscript,selection_mouse +16703,13881898,"TERMINAL",0,0,"1\t",,terminal_output +16704,13881949,"TERMINAL",0,0,"166",,terminal_output +16705,13882946,"TERMINAL",0,0,"2\t",,terminal_output +16706,13882995,"TERMINAL",0,0,"277",,terminal_output +16707,13884016,"TERMINAL",0,0,"3\t",,terminal_output +16708,13884035,"TERMINAL",0,0,"388",,terminal_output +16709,13885045,"TERMINAL",0,0,"4\t",,terminal_output +16710,13885069,"TERMINAL",0,0,"499",,terminal_output +16711,13886060,"TERMINAL",0,0,"5\t",,terminal_output +16712,13886104,"TERMINAL",0,0,"54040",,terminal_output +16713,13887192,"TERMINAL",0,0,"6\t",,terminal_output +16714,13887192,"TERMINAL",0,0,"611",,terminal_output +16715,13888208,"TERMINAL",0,0,"7\t",,terminal_output +16716,13888243,"TERMINAL",0,0,"722",,terminal_output +16717,13888771,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +16718,13889167,"TERMINAL",0,0,"8\t",,terminal_output +16719,13889261,"TERMINAL",0,0,"833",,terminal_output +16720,13890174,"TERMINAL",0,0,"9\t",,terminal_output +16721,13890320,"TERMINAL",0,0,"955",,terminal_output +16722,13890434,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2154,0,"",shellscript,selection_mouse +16723,13891215,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2159,0,"\n --val_data_dir $array_records_dir_val \",shellscript,content +16724,13891286,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2164,0,"",shellscript,selection_command +16725,13891367,"TERMINAL",0,0,"40\t",,terminal_output +16726,13891415,"TERMINAL",0,0,"4166",,terminal_output +16727,13892251,"TERMINAL",0,0,"1\t",,terminal_output +16728,13892406,"TERMINAL",0,0,"277",,terminal_output +16729,13893326,"TERMINAL",0,0,"2\t",,terminal_output +16730,13893390,"TERMINAL",0,0,"388",,terminal_output +16731,13894349,"TERMINAL",0,0,"41",,terminal_output +16732,13894454,"TERMINAL",0,0,"499",,terminal_output +16733,13895453,"TERMINAL",0,0,"5\t",,terminal_output +16734,13895487,"TERMINAL",0,0,"55050",,terminal_output +16735,13896416,"TERMINAL",0,0,"6\t",,terminal_output +16736,13896521,"TERMINAL",0,0,"611",,terminal_output +16737,13897531,"TERMINAL",0,0,"7\t",,terminal_output +16738,13897544,"TERMINAL",0,0,"722",,terminal_output +16739,13898548,"TERMINAL",0,0,"8\t",,terminal_output +16740,13898640,"TERMINAL",0,0,"833",,terminal_output +16741,13899532,"TERMINAL",0,0,"9\t",,terminal_output +16742,13899616,"TERMINAL",0,0,"944",,terminal_output +16743,13900566,"TERMINAL",0,0,"50\t",,terminal_output +16744,13900660,"TERMINAL",0,0,"5055",,terminal_output +16745,13901620,"TERMINAL",0,0,"1\t",,terminal_output +16746,13901712,"TERMINAL",0,0,"166",,terminal_output +16747,13902637,"TERMINAL",0,0,"2\t",,terminal_output +16748,13902715,"TERMINAL",0,0,"277",,terminal_output +16749,13903864,"TERMINAL",0,0,"3\t",,terminal_output +16750,13903865,"TERMINAL",0,0,"388",,terminal_output +16751,13904864,"TERMINAL",0,0,"499",,terminal_output +16752,13904865,"TERMINAL",0,0,"44",,terminal_output +16753,13905921,"TERMINAL",0,0,"52:002:00",,terminal_output +16754,13905921,"TERMINAL",0,0,"5\t",,terminal_output +16755,13906852,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",117,0,"",shellscript,selection_mouse +16756,13906933,"TERMINAL",0,0,"611",,terminal_output +16757,13906945,"TERMINAL",0,0,"6\t",,terminal_output +16758,13907898,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",116,1,"",shellscript,content +16759,13908006,"TERMINAL",0,0,"722",,terminal_output +16760,13908006,"TERMINAL",0,0,"7\t",,terminal_output +16761,13908019,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",115,1,"",shellscript,content +16762,13908286,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",114,1,"",shellscript,content +16763,13908689,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",113,1,"",shellscript,content +16764,13909011,"TERMINAL",0,0,"833",,terminal_output +16765,13909017,"TERMINAL",0,0,"8\t",,terminal_output +16766,13910085,"TERMINAL",0,0,"944",,terminal_output +16767,13910085,"TERMINAL",0,0,"9\t",,terminal_output +16768,13911167,"TERMINAL",0,0,"8:0055",,terminal_output +16769,13911167,"TERMINAL",0,0,"8:00\t",,terminal_output +16770,13912173,"TERMINAL",0,0,"1\t",,terminal_output +16771,13912181,"TERMINAL",0,0,"166",,terminal_output +16772,13913191,"TERMINAL",0,0,"2\t",,terminal_output +16773,13913192,"TERMINAL",0,0,"277",,terminal_output +16774,13914253,"TERMINAL",0,0,"388",,terminal_output +16775,13914253,"TERMINAL",0,0,"3\t",,terminal_output +16776,13915366,"TERMINAL",0,0,"499",,terminal_output +16777,13915366,"TERMINAL",0,0,"4\t",,terminal_output +16778,13916384,"TERMINAL",0,0,"5\t",,terminal_output +16779,13916385,"TERMINAL",0,0,"51111",,terminal_output +16780,13917313,"TERMINAL",0,0,"722",,terminal_output +16781,13917328,"TERMINAL",0,0,"7\t",,terminal_output +16782,13918417,"TERMINAL",0,0,"833",,terminal_output +16783,13918417,"TERMINAL",0,0,"8\t",,terminal_output +16784,13919403,"TERMINAL",0,0,"9\t",,terminal_output +16785,13919404,"TERMINAL",0,0,"944",,terminal_output +16786,13920440,"TERMINAL",0,0,"10\t",,terminal_output +16787,13920440,"TERMINAL",0,0,"1055",,terminal_output +16788,13921603,"TERMINAL",0,0,"1\t",,terminal_output +16789,13921604,"TERMINAL",0,0,"166",,terminal_output +16790,13922503,"TERMINAL",0,0,"2\t",,terminal_output +16791,13922545,"TERMINAL",0,0,"277",,terminal_output +16792,13923535,"TERMINAL",0,0,"3\t",,terminal_output +16793,13923548,"TERMINAL",0,0,"388",,terminal_output +16794,13924585,"TERMINAL",0,0,"4\t",,terminal_output +16795,13924593,"TERMINAL",0,0,"499",,terminal_output +16796,13925635,"TERMINAL",0,0,"5\t",,terminal_output +16797,13925650,"TERMINAL",0,0,"52020",,terminal_output +16798,13926650,"TERMINAL",0,0,"6\t",,terminal_output +16799,13926665,"TERMINAL",0,0,"611",,terminal_output +16800,13927679,"TERMINAL",0,0,"7\t",,terminal_output +16801,13927697,"TERMINAL",0,0,"722",,terminal_output +16802,13928717,"TERMINAL",0,0,"8\t",,terminal_output +16803,13928740,"TERMINAL",0,0,"833",,terminal_output +16804,13929750,"TERMINAL",0,0,"9\t",,terminal_output +16805,13929774,"TERMINAL",0,0,"944",,terminal_output +16806,13930804,"TERMINAL",0,0,"20\t",,terminal_output +16807,13930814,"TERMINAL",0,0,"2055",,terminal_output +16808,13931828,"TERMINAL",0,0,"1\t",,terminal_output +16809,13931862,"TERMINAL",0,0,"166",,terminal_output +16810,13933011,"TERMINAL",0,0,"2\t",,terminal_output +16811,13933019,"TERMINAL",0,0,"277",,terminal_output +16812,13933908,"TERMINAL",0,0,"3\t",,terminal_output +16813,13933948,"TERMINAL",0,0,"388",,terminal_output +16814,13934949,"TERMINAL",0,0,"4\t",,terminal_output +16815,13934978,"TERMINAL",0,0,"499",,terminal_output +16816,13936046,"TERMINAL",0,0,"5\t",,terminal_output +16817,13936086,"TERMINAL",0,0,"53030",,terminal_output +16818,13937055,"TERMINAL",0,0,"6\t",,terminal_output +16819,13937065,"TERMINAL",0,0,"611",,terminal_output +16820,13938047,"TERMINAL",0,0,"7\t",,terminal_output +16821,13938102,"TERMINAL",0,0,"722",,terminal_output +16822,13939085,"TERMINAL",0,0,"8\t",,terminal_output +16823,13939132,"TERMINAL",0,0,"833",,terminal_output +16824,13940233,"TERMINAL",0,0,"9\t",,terminal_output +16825,13940234,"TERMINAL",0,0,"944",,terminal_output +16826,13941202,"TERMINAL",0,0,"30\t",,terminal_output +16827,13941225,"TERMINAL",0,0,"3055",,terminal_output +16828,13942196,"TERMINAL",0,0,"1\t",,terminal_output +16829,13942299,"TERMINAL",0,0,"166",,terminal_output +16830,13943266,"TERMINAL",0,0,"2\t",,terminal_output +16831,13943310,"TERMINAL",0,0,"288",,terminal_output +16832,13944268,"TERMINAL",0,0,"3\t",,terminal_output +16833,13944380,"TERMINAL",0,0,"499",,terminal_output +16834,13945304,"TERMINAL",0,0,"5\t",,terminal_output +16835,13945402,"TERMINAL",0,0,"54040",,terminal_output +16836,13946372,"TERMINAL",0,0,"6\t",,terminal_output +16837,13946413,"TERMINAL",0,0,"611",,terminal_output +16838,13947393,"TERMINAL",0,0,"7\t",,terminal_output +16839,13947504,"TERMINAL",0,0,"722",,terminal_output +16840,13948525,"TERMINAL",0,0,"8\t",,terminal_output +16841,13948537,"TERMINAL",0,0,"833",,terminal_output +16842,13949467,"TERMINAL",0,0,"9\t",,terminal_output +16843,13949578,"TERMINAL",0,0,"944",,terminal_output +16844,13950573,"TERMINAL",0,0,"40\t",,terminal_output +16845,13950595,"TERMINAL",0,0,"4055",,terminal_output +16846,13951619,"TERMINAL",0,0,"1\t",,terminal_output +16847,13951654,"TERMINAL",0,0,"166",,terminal_output +16848,13952586,"TERMINAL",0,0,"2\t",,terminal_output +16849,13952687,"TERMINAL",0,0,"277",,terminal_output +16850,13953639,"TERMINAL",0,0,"3\t",,terminal_output +16851,13953745,"TERMINAL",0,0,"388",,terminal_output +16852,13954688,"TERMINAL",0,0,"4\t",,terminal_output +16853,13954744,"TERMINAL",0,0,"499",,terminal_output +16854,13957231,"TERMINAL",0,0,"55151",,terminal_output +16855,13957231,"TERMINAL",0,0,"5\t",,terminal_output +16856,13958569,"TERMINAL",0,0,"733",,terminal_output +16857,13958570,"TERMINAL",0,0,"70",,terminal_output +16858,13959536,"TERMINAL",0,0,"944",,terminal_output +16859,13959536,"TERMINAL",0,0,"9\t",,terminal_output +16860,13960570,"TERMINAL",0,0,"5055",,terminal_output +16861,13960575,"TERMINAL",0,0,"50\t",,terminal_output +16862,13961621,"TERMINAL",0,0,"166",,terminal_output +16863,13961630,"TERMINAL",0,0,"1\t",,terminal_output +16864,13962645,"TERMINAL",0,0,"277",,terminal_output +16865,13962652,"TERMINAL",0,0,"2\t",,terminal_output +16866,13963777,"TERMINAL",0,0,"388",,terminal_output +16867,13963778,"TERMINAL",0,0,"3\t",,terminal_output +16868,13965109,"TERMINAL",0,0,"499",,terminal_output +16869,13965109,"TERMINAL",0,0,"4\t",,terminal_output +16870,13966048,"TERMINAL",0,0,"53:003:00",,terminal_output +16871,13966054,"TERMINAL",0,0,"5\t",,terminal_output +16872,13967098,"TERMINAL",0,0,"611",,terminal_output +16873,13967100,"TERMINAL",0,0,"6\t",,terminal_output +16874,13968133,"TERMINAL",0,0,"7\t",,terminal_output +16875,13968133,"TERMINAL",0,0,"722",,terminal_output +16876,13969291,"TERMINAL",0,0,"8\t",,terminal_output +16877,13969291,"TERMINAL",0,0,"833",,terminal_output +16878,13970253,"TERMINAL",0,0,"9\t",,terminal_output +16879,13970254,"TERMINAL",0,0,"944",,terminal_output +16880,13971275,"TERMINAL",0,0,"9:00\t",,terminal_output +16881,13971278,"TERMINAL",0,0,"9:0055",,terminal_output +16882,13972381,"TERMINAL",0,0,"1\t",,terminal_output +16883,13972381,"TERMINAL",0,0,"177",,terminal_output +16884,13973331,"TERMINAL",0,0,"3\t",,terminal_output +16885,13973345,"TERMINAL",0,0,"388",,terminal_output +16886,13974385,"TERMINAL",0,0,"4\t",,terminal_output +16887,13974422,"TERMINAL",0,0,"499",,terminal_output +16888,13975475,"TERMINAL",0,0,"5\t",,terminal_output +16889,13975475,"TERMINAL",0,0,"51010",,terminal_output +16890,13976515,"TERMINAL",0,0,"6\t",,terminal_output +16891,13976516,"TERMINAL",0,0,"611",,terminal_output +16892,13977483,"TERMINAL",0,0,"7\t",,terminal_output +16893,13977517,"TERMINAL",0,0,"722",,terminal_output +16894,13978576,"TERMINAL",0,0,"8\t",,terminal_output +16895,13978576,"TERMINAL",0,0,"833",,terminal_output +16896,13979566,"TERMINAL",0,0,"9\t",,terminal_output +16897,13979608,"TERMINAL",0,0,"944",,terminal_output +16898,13980628,"TERMINAL",0,0,"10\t",,terminal_output +16899,13980673,"TERMINAL",0,0,"1055",,terminal_output +16900,13981649,"TERMINAL",0,0,"1\t",,terminal_output +16901,13981684,"TERMINAL",0,0,"166",,terminal_output +16902,13982686,"TERMINAL",0,0,"2\t",,terminal_output +16903,13982731,"TERMINAL",0,0,"277",,terminal_output +16904,13983717,"TERMINAL",0,0,"3\t",,terminal_output +16905,13983767,"TERMINAL",0,0,"388",,terminal_output +16906,13984798,"TERMINAL",0,0,"4\t",,terminal_output +16907,13984813,"TERMINAL",0,0,"499",,terminal_output +16908,13985896,"TERMINAL",0,0,"5\t",,terminal_output +16909,13985896,"TERMINAL",0,0,"52020",,terminal_output +16910,13986837,"TERMINAL",0,0,"6\t",,terminal_output +16911,13986899,"TERMINAL",0,0,"611",,terminal_output +16912,13987877,"TERMINAL",0,0,"7\t",,terminal_output +16913,13987943,"TERMINAL",0,0,"722",,terminal_output +16914,13988974,"TERMINAL",0,0,"8\t",,terminal_output +16915,13988986,"TERMINAL",0,0,"833",,terminal_output +16916,13989999,"TERMINAL",0,0,"9\t",,terminal_output +16917,13990035,"TERMINAL",0,0,"944",,terminal_output +16918,13991016,"TERMINAL",0,0,"20\t",,terminal_output +16919,13991074,"TERMINAL",0,0,"2055",,terminal_output +16920,13992152,"TERMINAL",0,0,"1\t",,terminal_output +16921,13992153,"TERMINAL",0,0,"166",,terminal_output +16922,13993171,"TERMINAL",0,0,"2\t",,terminal_output +16923,13993172,"TERMINAL",0,0,"277",,terminal_output +16924,13994208,"TERMINAL",0,0,"3\t",,terminal_output +16925,13994209,"TERMINAL",0,0,"388",,terminal_output +16926,13995233,"TERMINAL",0,0,"4\t",,terminal_output +16927,13995250,"TERMINAL",0,0,"499",,terminal_output +16928,13996206,"TERMINAL",0,0,"5\t",,terminal_output +16929,13996313,"TERMINAL",0,0,"53131",,terminal_output +16930,13997237,"TERMINAL",0,0,"6\t",,terminal_output +16931,13997330,"TERMINAL",0,0,"722",,terminal_output +16932,13998396,"TERMINAL",0,0,"7\t",,terminal_output +16933,13998396,"TERMINAL",0,0,"833",,terminal_output +16934,13999310,"TERMINAL",0,0,"9\t",,terminal_output +16935,13999413,"TERMINAL",0,0,"944",,terminal_output +16936,14000382,"TERMINAL",0,0,"30\t",,terminal_output +16937,14000461,"TERMINAL",0,0,"3055",,terminal_output +16938,14001393,"TERMINAL",0,0,"1\t",,terminal_output +16939,14001526,"TERMINAL",0,0,"166",,terminal_output +16940,14002487,"TERMINAL",0,0,"2\t",,terminal_output +16941,14002571,"TERMINAL",0,0,"277",,terminal_output +16942,14003509,"TERMINAL",0,0,"3\t",,terminal_output +16943,14003655,"TERMINAL",0,0,"388",,terminal_output +16944,14004508,"TERMINAL",0,0,"4\t",,terminal_output +16945,14004654,"TERMINAL",0,0,"499",,terminal_output +16946,14005563,"TERMINAL",0,0,"5\t",,terminal_output +16947,14005716,"TERMINAL",0,0,"54040",,terminal_output +16948,14006586,"TERMINAL",0,0,"6\t",,terminal_output +16949,14006742,"TERMINAL",0,0,"611",,terminal_output +16950,14007286,"TERMINAL",0,0,"watch",,terminal_focus +16951,14007627,"TERMINAL",0,0,"7\t",,terminal_output +16952,14007828,"TERMINAL",0,0,"722[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +16953,14008703,"TERMINAL",0,0,"8\t",,terminal_output +16954,14009758,"TERMINAL",0,0,"python",,terminal_command +16955,14009758,"TERMINAL",0,0,"9\t",,terminal_output +16956,14009796,"TERMINAL",0,0,"]633;C",,terminal_output +16957,14010068,"TERMINAL",0,0,"Python 3.10.18 (main, Jun 4 2025, 17:36:27) [Clang 20.1.4 ] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n",,terminal_output +16958,14010507,"TERMINAL",0,0,">>> ",,terminal_output +16959,14010732,"TERMINAL",0,0,"40\t",,terminal_output +16960,14011811,"TERMINAL",0,0,"1\t",,terminal_output +16961,14012827,"TERMINAL",0,0,"2\t",,terminal_output +16962,14012913,"TERMINAL",0,0,"6",,terminal_output +16963,14013026,"TERMINAL",0,0,"0",,terminal_output +16964,14013718,"TERMINAL",0,0,"*",,terminal_output +16965,14013864,"TERMINAL",0,0,"3\t",,terminal_output +16966,14014121,"TERMINAL",0,0,"0",,terminal_output +16967,14014268,"TERMINAL",0,0,".",,terminal_output +16968,14014660,"TERMINAL",0,0,"6",,terminal_output +16969,14014977,"TERMINAL",0,0,"4\t",,terminal_output +16970,14015333,"TERMINAL",0,0,"\r\n36.0\r\n>>> ",,terminal_output +16971,14015923,"TERMINAL",0,0,"5\t",,terminal_output +16972,14016955,"TERMINAL",0,0,"6\t",,terminal_output +16973,14018029,"TERMINAL",0,0,"7\t",,terminal_output +16974,14019073,"TERMINAL",0,0,"8\t",,terminal_output +16975,14020066,"TERMINAL",0,0,"9\t",,terminal_output +16976,14021233,"TERMINAL",0,0,"50\t",,terminal_output +16977,14022249,"TERMINAL",0,0,"1\t",,terminal_output +16978,14023236,"TERMINAL",0,0,"2\t",,terminal_output +16979,14024332,"TERMINAL",0,0,"3\t",,terminal_output +16980,14025320,"TERMINAL",0,0,"4\t",,terminal_output +16981,14026344,"TERMINAL",0,0,"5\t",,terminal_output +16982,14027368,"TERMINAL",0,0,"7\t",,terminal_output +16983,14028344,"TERMINAL",0,0,"8\t",,terminal_output +16984,14029382,"TERMINAL",0,0,"9\t",,terminal_output +16985,14030431,"TERMINAL",0,0,"20:00\t",,terminal_output +16986,14031583,"TERMINAL",0,0,"1\t",,terminal_output +16987,14032116,"TERMINAL",0,0,"\r>>> 60*0.6",,terminal_output +16988,14032597,"TERMINAL",0,0,"284",,terminal_output +16989,14033631,"TERMINAL",0,0,"3\t",,terminal_output +16990,14033972,"TERMINAL",0,0,")",,terminal_output +16991,14034366,"TERMINAL",0,0,"",,terminal_output +16992,14034572,"TERMINAL",0,0,"",,terminal_output +16993,14034576,"TERMINAL",0,0,"4\t",,terminal_output +16994,14035619,"TERMINAL",0,0,"5\t",,terminal_output +16995,14035703,"TERMINAL",0,0,"\r>>> [1@(6",,terminal_output +16996,14036291,"TERMINAL",0,0,"",,terminal_output +16997,14036534,"TERMINAL",0,0,")",,terminal_output +16998,14036670,"TERMINAL",0,0,"6\t",,terminal_output +16999,14037617,"TERMINAL",0,0,"*",,terminal_output +17000,14037686,"TERMINAL",0,0,"7\t",,terminal_output +17001,14038837,"TERMINAL",0,0,"8\t",,terminal_output +17002,14039280,"TERMINAL",0,0,"2",,terminal_output +17003,14039385,"TERMINAL",0,0,"0",,terminal_output +17004,14039571,"TERMINAL",0,0,"0",,terminal_output +17005,14039770,"TERMINAL",0,0,"9\t",,terminal_output +17006,14040731,"TERMINAL",0,0,"\r\n7200.0\r\n>>> ",,terminal_output +17007,14040832,"TERMINAL",0,0,"10\t",,terminal_output +17008,14041909,"TERMINAL",0,0,"1\t",,terminal_output +17009,14042562,"TERMINAL",0,0,"/",,terminal_output +17010,14042867,"TERMINAL",0,0,"2\t",,terminal_output +17011,14043958,"TERMINAL",0,0,"3\t",,terminal_output +17012,14044351,"TERMINAL",0,0,"\r>>> ",,terminal_output +17013,14044858,"TERMINAL",0,0,"\r>>> (60*0.6)*200",,terminal_output +17014,14044969,"TERMINAL",0,0,"4\t",,terminal_output +17015,14045391,"TERMINAL",0,0,"",,terminal_output +17016,14045876,"TERMINAL",0,0,"",,terminal_output +17017,14046005,"TERMINAL",0,0,"5\t",,terminal_output +17018,14046061,"TERMINAL",0,0,"",,terminal_output +17019,14046253,"TERMINAL",0,0,"",,terminal_output +17020,14046518,"TERMINAL",0,0,"",,terminal_output +17021,14046905,"TERMINAL",0,0,"\r0",,terminal_output +17022,14047029,"TERMINAL",0,0,"6\t",,terminal_output +17023,14047084,"TERMINAL",0,0,"\r.",,terminal_output +17024,14047299,"TERMINAL",0,0,"\r0",,terminal_output +17025,14047758,"TERMINAL",0,0,"\r\n120.0\r\n>>> ",,terminal_output +17026,14048164,"TERMINAL",0,0,"7\t",,terminal_output +17027,14049093,"TERMINAL",0,0,"8\t",,terminal_output +17028,14050209,"TERMINAL",0,0,"9\t",,terminal_output +17029,14051241,"TERMINAL",0,0,"20\t",,terminal_output +17030,14052258,"TERMINAL",0,0,"1\t",,terminal_output +17031,14053258,"TERMINAL",0,0,"2\t",,terminal_output +17032,14054308,"TERMINAL",0,0,"35",,terminal_output +17033,14055345,"TERMINAL",0,0,"5\t",,terminal_output +17034,14056450,"TERMINAL",0,0,"6\t",,terminal_output +17035,14057436,"TERMINAL",0,0,"76",,terminal_output +17036,14058500,"TERMINAL",0,0,"8\t",,terminal_output +17037,14059505,"TERMINAL",0,0,"9\t",,terminal_output +17038,14060540,"TERMINAL",0,0,"30\t",,terminal_output +17039,14061595,"TERMINAL",0,0,"1\t",,terminal_output +17040,14062612,"TERMINAL",0,0,"2\t",,terminal_output +17041,14063650,"TERMINAL",0,0,"3\t",,terminal_output +17042,14064744,"TERMINAL",0,0,"4\t",,terminal_output +17043,14065770,"TERMINAL",0,0,"5\t",,terminal_output +17044,14066761,"TERMINAL",0,0,"6\t",,terminal_output +17045,14067800,"TERMINAL",0,0,"7\t",,terminal_output +17046,14068943,"TERMINAL",0,0,"8\t",,terminal_output +17047,14069966,"TERMINAL",0,0,"9\t",,terminal_output +17048,14070992,"TERMINAL",0,0,"40\t",,terminal_output +17049,14072027,"TERMINAL",0,0,"1\t",,terminal_output +17050,14072995,"TERMINAL",0,0,"2\t",,terminal_output +17051,14074063,"TERMINAL",0,0,"3\t",,terminal_output +17052,14075198,"TERMINAL",0,0,"4\t",,terminal_output +17053,14076111,"TERMINAL",0,0,"5\t",,terminal_output +17054,14077186,"TERMINAL",0,0,"6\t",,terminal_output +17055,14078269,"TERMINAL",0,0,"7\t",,terminal_output +17056,14080535,"TERMINAL",0,0,"830",,terminal_output +17057,14081568,"TERMINAL",0,0,"5114",,terminal_output +17058,14082600,"TERMINAL",0,0,"2\t",,terminal_output +17059,14083692,"TERMINAL",0,0,"3\t",,terminal_output +17060,14084680,"TERMINAL",0,0,"4\t",,terminal_output +17061,14085769,"TERMINAL",0,0,"5\t",,terminal_output +17062,14086799,"TERMINAL",0,0,"6\t",,terminal_output +17063,14087828,"TERMINAL",0,0,"7\t",,terminal_output +17064,14088911,"TERMINAL",0,0,"8\t",,terminal_output +17065,14089936,"TERMINAL",0,0,"9\t",,terminal_output +17066,14090959,"TERMINAL",0,0,"1:00\t",,terminal_output +17067,14091988,"TERMINAL",0,0,"1\t",,terminal_output +17068,14092976,"TERMINAL",0,0,"2\t",,terminal_output +17069,14094016,"TERMINAL",0,0,"3\t",,terminal_output +17070,14095190,"TERMINAL",0,0,"4\t",,terminal_output +17071,14096092,"TERMINAL",0,0,"5\t",,terminal_output +17072,14097197,"TERMINAL",0,0,"6\t",,terminal_output +17073,14098240,"TERMINAL",0,0,"7\t",,terminal_output +17074,14099253,"TERMINAL",0,0,"8\t",,terminal_output +17075,14100278,"TERMINAL",0,0,"9\t",,terminal_output +17076,14101301,"TERMINAL",0,0,"10\t",,terminal_output +17077,14102428,"TERMINAL",0,0,"2\t",,terminal_output +17078,14103462,"TERMINAL",0,0,"3\t",,terminal_output +17079,14104411,"TERMINAL",0,0,"4\t",,terminal_output +17080,14105538,"TERMINAL",0,0,"5\t",,terminal_output +17081,14106524,"TERMINAL",0,0,"6\t",,terminal_output +17082,14107548,"TERMINAL",0,0,"7\t",,terminal_output +17083,14108572,"TERMINAL",0,0,"8\t",,terminal_output +17084,14109588,"TERMINAL",0,0,"9\t",,terminal_output +17085,14110629,"TERMINAL",0,0,"20\t",,terminal_output +17086,14111664,"TERMINAL",0,0,"1\t",,terminal_output +17087,14112704,"TERMINAL",0,0,"2\t",,terminal_output +17088,14113754,"TERMINAL",0,0,"3\t",,terminal_output +17089,14114781,"TERMINAL",0,0,"4\t",,terminal_output +17090,14115876,"TERMINAL",0,0,"5\t",,terminal_output +17091,14116856,"TERMINAL",0,0,"6\t",,terminal_output +17092,14117916,"TERMINAL",0,0,"7\t",,terminal_output +17093,14118927,"TERMINAL",0,0,"8\t",,terminal_output +17094,14120046,"TERMINAL",0,0,"9\t",,terminal_output +17095,14121064,"TERMINAL",0,0,"30\t",,terminal_output +17096,14122098,"TERMINAL",0,0,"1\t",,terminal_output +17097,14123113,"TERMINAL",0,0,"2\t",,terminal_output +17098,14124248,"TERMINAL",0,0,"3\t",,terminal_output +17099,14125205,"TERMINAL",0,0,"4\t",,terminal_output +17100,14126189,"TERMINAL",0,0,"5\t",,terminal_output +17101,14127312,"TERMINAL",0,0,"6\t",,terminal_output +17102,14128336,"TERMINAL",0,0,"7\t",,terminal_output +17103,14129339,"TERMINAL",0,0,"8\t",,terminal_output +17104,14130336,"TERMINAL",0,0,"40\t",,terminal_output +17105,14131408,"TERMINAL",0,0,"1\t",,terminal_output +17106,14132537,"TERMINAL",0,0,"2\t",,terminal_output +17107,14133558,"TERMINAL",0,0,"3\t",,terminal_output +17108,14134534,"TERMINAL",0,0,"4\t",,terminal_output +17109,14135606,"TERMINAL",0,0,"5\t",,terminal_output +17110,14136631,"TERMINAL",0,0,"6\t",,terminal_output +17111,14137610,"TERMINAL",0,0,"7\t",,terminal_output +17112,14138649,"TERMINAL",0,0,"8\t",,terminal_output +17113,14139804,"TERMINAL",0,0,"9\t",,terminal_output +17114,14140833,"TERMINAL",0,0,"50\t",,terminal_output +17115,14141769,"TERMINAL",0,0,"1\t",,terminal_output +17116,14142809,"TERMINAL",0,0,"2\t",,terminal_output +17117,14143902,"TERMINAL",0,0,"3\t",,terminal_output +17118,14144889,"TERMINAL",0,0,"4\t",,terminal_output +17119,14145926,"TERMINAL",0,0,"5\t",,terminal_output +17120,14147075,"TERMINAL",0,0,"6\t",,terminal_output +17121,14148014,"TERMINAL",0,0,"7\t",,terminal_output +17122,14149123,"TERMINAL",0,0,"8\t",,terminal_output +17123,14150157,"TERMINAL",0,0,"9\t",,terminal_output +17124,14151171,"TERMINAL",0,0,"2:00\t",,terminal_output +17125,14152157,"TERMINAL",0,0,"13",,terminal_output +17126,14153232,"TERMINAL",0,0,"2\t",,terminal_output +17127,14154248,"TERMINAL",0,0,"3\t",,terminal_output +17128,14155282,"TERMINAL",0,0,"4\t",,terminal_output +17129,14156394,"TERMINAL",0,0,"6\t",,terminal_output +17130,14157420,"TERMINAL",0,0,"7\t",,terminal_output +17131,14158441,"TERMINAL",0,0,"8\t",,terminal_output +17132,14159497,"TERMINAL",0,0,"9\t",,terminal_output +17133,14160475,"TERMINAL",0,0,"10\t",,terminal_output +17134,14161514,"TERMINAL",0,0,"1\t",,terminal_output +17135,14162548,"TERMINAL",0,0,"2\t",,terminal_output +17136,14163594,"TERMINAL",0,0,"3\t",,terminal_output +17137,14164624,"TERMINAL",0,0,"4\t",,terminal_output +17138,14165657,"TERMINAL",0,0,"5\t",,terminal_output +17139,14166838,"TERMINAL",0,0,"6\t",,terminal_output +17140,14167862,"TERMINAL",0,0,"7\t",,terminal_output +17141,14168791,"TERMINAL",0,0,"8\t",,terminal_output +17142,14169919,"TERMINAL",0,0,"9\t",,terminal_output +17143,14170868,"TERMINAL",0,0,"205",,terminal_output +17144,14171896,"TERMINAL",0,0,"1\t",,terminal_output +17145,14172954,"TERMINAL",0,0,"2\t",,terminal_output +17146,14173977,"TERMINAL",0,0,"3\t",,terminal_output +17147,14175150,"TERMINAL",0,0,"4\t",,terminal_output +17148,14176054,"TERMINAL",0,0,"5\t",,terminal_output +17149,14177099,"TERMINAL",0,0,"6\t",,terminal_output +17150,14178148,"TERMINAL",0,0,"7\t",,terminal_output +17151,14179236,"TERMINAL",0,0,"8\t",,terminal_output +17152,14180270,"TERMINAL",0,0,"9\t",,terminal_output +17153,14181254,"TERMINAL",0,0,"30\t",,terminal_output +17154,14182353,"TERMINAL",0,0,"1\t",,terminal_output +17155,14183349,"TERMINAL",0,0,"3\t",,terminal_output +17156,14184405,"TERMINAL",0,0,"4\t",,terminal_output +17157,14185475,"TERMINAL",0,0,"5\t",,terminal_output +17158,14186468,"TERMINAL",0,0,"6\t",,terminal_output +17159,14187523,"TERMINAL",0,0,"7\t",,terminal_output +17160,14188547,"TERMINAL",0,0,"8\t",,terminal_output +17161,14189571,"TERMINAL",0,0,"9\t",,terminal_output +17162,14190615,"TERMINAL",0,0,"40\t",,terminal_output +17163,14191644,"TERMINAL",0,0,"1\t",,terminal_output +17164,14192699,"TERMINAL",0,0,"2\t",,terminal_output +17165,14193747,"TERMINAL",0,0,"3\t",,terminal_output +17166,14194179,"TERMINAL",0,0,"^D\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +17167,14194764,"TERMINAL",0,0,"4\t",,terminal_output +17168,14195813,"TERMINAL",0,0,"5\t",,terminal_output +17169,14196841,"TERMINAL",0,0,"6\t",,terminal_output +17170,14197866,"TERMINAL",0,0,"queue",,terminal_command +17171,14197988,"TERMINAL",0,0,"7\t",,terminal_output +17172,14198003,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 17:22:47 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469360 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 20:06:52\t 1 hkn07363466287 accelerat train_la tum_cte0 R 20:06:52\t 1 hkn0736\t",,terminal_output +17173,14198913,"TERMINAL",0,0,"8\t",,terminal_output +17174,14199008,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1131,0,"",shellscript,selection_mouse +17175,14199041,"TERMINAL",0,0,"833",,terminal_output +17176,14199957,"TERMINAL",0,0,"9\t",,terminal_output +17177,14199997,"TERMINAL",0,0,"944",,terminal_output +17178,14200246,"TERMINAL",0,0,"bash",,terminal_focus +17179,14201011,"TERMINAL",0,0,"50\t",,terminal_output +17180,14201066,"TERMINAL",0,0,"5055",,terminal_output +17181,14203167,"TERMINAL",0,0,"dev",,terminal_command +17182,14203271,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +17183,14203486,"TERMINAL",0,0,"188",,terminal_output +17184,14203570,"TERMINAL",0,0,"1\t",,terminal_output +17185,14204679,"TERMINAL",0,0,"499",,terminal_output +17186,14204679,"TERMINAL",0,0,"44",,terminal_output +17187,14205675,"TERMINAL",0,0,"57:007:00",,terminal_output +17188,14205675,"TERMINAL",0,0,"5\t",,terminal_output +17189,14206704,"TERMINAL",0,0,"611",,terminal_output +17190,14206706,"TERMINAL",0,0,"6\t",,terminal_output +17191,14207761,"TERMINAL",0,0,"722",,terminal_output +17192,14207761,"TERMINAL",0,0,"7\t",,terminal_output +17193,14208489,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +17194,14208864,"TERMINAL",0,0,"8\t",,terminal_output +17195,14208864,"TERMINAL",0,0,"833",,terminal_output +17196,14209902,"TERMINAL",0,0,"9\t",,terminal_output +17197,14209902,"TERMINAL",0,0,"944",,terminal_output +17198,14210900,"TERMINAL",0,0,"3:00\t",,terminal_output +17199,14210960,"TERMINAL",0,0,"3:0055",,terminal_output +17200,14211947,"TERMINAL",0,0,"1\t",,terminal_output +17201,14211982,"TERMINAL",0,0,"166",,terminal_output +17202,14212949,"TERMINAL",0,0,"2\t",,terminal_output +17203,14212967,"TERMINAL",0,0,"277",,terminal_output +17204,14214047,"TERMINAL",0,0,"3\t",,terminal_output +17205,14214047,"TERMINAL",0,0,"388",,terminal_output +17206,14215132,"TERMINAL",0,0,"4\t",,terminal_output +17207,14215133,"TERMINAL",0,0,"499",,terminal_output +17208,14216155,"TERMINAL",0,0,"5\t",,terminal_output +17209,14216211,"TERMINAL",0,0,"51010",,terminal_output +17210,14217097,"TERMINAL",0,0,"6\t",,terminal_output +17211,14217140,"TERMINAL",0,0,"611",,terminal_output +17212,14218146,"TERMINAL",0,0,"7\t",,terminal_output +17213,14218169,"TERMINAL",0,0,"722",,terminal_output +17214,14219200,"TERMINAL",0,0,"8\t",,terminal_output +17215,14219210,"TERMINAL",0,0,"833",,terminal_output +17216,14220211,"TERMINAL",0,0,"9\t",,terminal_output +17217,14220270,"TERMINAL",0,0,"944",,terminal_output +17218,14221250,"TERMINAL",0,0,"10\t",,terminal_output +17219,14221296,"TERMINAL",0,0,"1066",,terminal_output +17220,14222349,"TERMINAL",0,0,"1\t",,terminal_output +17221,14222352,"TERMINAL",0,0,"277",,terminal_output +17222,14223401,"TERMINAL",0,0,"3\t",,terminal_output +17223,14223407,"TERMINAL",0,0,"388",,terminal_output +17224,14224376,"TERMINAL",0,0,"4\t",,terminal_output +17225,14224432,"TERMINAL",0,0,"499",,terminal_output +17226,14225407,"TERMINAL",0,0,"5\t",,terminal_output +17227,14225558,"TERMINAL",0,0,"52020",,terminal_output +17228,14226565,"TERMINAL",0,0,"6\t",,terminal_output +17229,14226566,"TERMINAL",0,0,"611",,terminal_output +17230,14227564,"TERMINAL",0,0,"7\t",,terminal_output +17231,14227568,"TERMINAL",0,0,"722",,terminal_output +17232,14228604,"TERMINAL",0,0,"842",,terminal_output +17233,14228604,"TERMINAL",0,0,"833",,terminal_output +17234,14229569,"TERMINAL",0,0,"9\t",,terminal_output +17235,14229665,"TERMINAL",0,0,"944",,terminal_output +17236,14230751,"TERMINAL",0,0,"20\t",,terminal_output +17237,14230751,"TERMINAL",0,0,"2055",,terminal_output +17238,14231682,"TERMINAL",0,0,"1\t",,terminal_output +17239,14231770,"TERMINAL",0,0,"166",,terminal_output +17240,14232688,"TERMINAL",0,0,"2\t",,terminal_output +17241,14232800,"TERMINAL",0,0,"277",,terminal_output +17242,14233732,"TERMINAL",0,0,"3\t",,terminal_output +17243,14233809,"TERMINAL",0,0,"388",,terminal_output +17244,14234836,"TERMINAL",0,0,"4\t",,terminal_output +17245,14234853,"TERMINAL",0,0,"499",,terminal_output +17246,14235821,"TERMINAL",0,0,"5\t",,terminal_output +17247,14235920,"TERMINAL",0,0,"53030",,terminal_output +17248,14236846,"TERMINAL",0,0,"6\t",,terminal_output +17249,14236953,"TERMINAL",0,0,"611",,terminal_output +17250,14237890,"TERMINAL",0,0,"7\t",,terminal_output +17251,14237990,"TERMINAL",0,0,"722",,terminal_output +17252,14239030,"TERMINAL",0,0,"8\t",,terminal_output +17253,14239031,"TERMINAL",0,0,"833",,terminal_output +17254,14240062,"TERMINAL",0,0,"9\t",,terminal_output +17255,14240070,"TERMINAL",0,0,"944",,terminal_output +17256,14241087,"TERMINAL",0,0,"30\t",,terminal_output +17257,14241116,"TERMINAL",0,0,"3055",,terminal_output +17258,14242105,"TERMINAL",0,0,"16",,terminal_output +17259,14242154,"TERMINAL",0,0,"166",,terminal_output +17260,14243127,"TERMINAL",0,0,"2\t",,terminal_output +17261,14243202,"TERMINAL",0,0,"277",,terminal_output +17262,14244203,"TERMINAL",0,0,"3\t",,terminal_output +17263,14244238,"TERMINAL",0,0,"388",,terminal_output +17264,14245169,"TERMINAL",0,0,"4\t",,terminal_output +17265,14245333,"TERMINAL",0,0,"44040",,terminal_output +17266,14246226,"TERMINAL",0,0,"5\t",,terminal_output +17267,14246364,"TERMINAL",0,0,"611",,terminal_output +17268,14247245,"TERMINAL",0,0,"6\t",,terminal_output +17269,14247389,"TERMINAL",0,0,"722",,terminal_output +17270,14248484,"TERMINAL",0,0,"7\t",,terminal_output +17271,14248527,"TERMINAL",0,0,"833",,terminal_output +17272,14249377,"TERMINAL",0,0,"9\t",,terminal_output +17273,14249505,"TERMINAL",0,0,"944",,terminal_output +17274,14250351,"TERMINAL",0,0,"40\t",,terminal_output +17275,14250564,"TERMINAL",0,0,"4055",,terminal_output +17276,14251561,"TERMINAL",0,0,"1\t",,terminal_output +17277,14251562,"TERMINAL",0,0,"166",,terminal_output +17278,14252463,"TERMINAL",0,0,"2\t",,terminal_output +17279,14252669,"TERMINAL",0,0,"277",,terminal_output +17280,14253576,"TERMINAL",0,0,"3\t",,terminal_output +17281,14253650,"TERMINAL",0,0,"388",,terminal_output +17282,14254556,"TERMINAL",0,0,"4\t",,terminal_output +17283,14254698,"TERMINAL",0,0,"499",,terminal_output +17284,14255621,"TERMINAL",0,0,"5\t",,terminal_output +17285,14255761,"TERMINAL",0,0,"55050",,terminal_output +17286,14256669,"TERMINAL",0,0,"6\t",,terminal_output +17287,14256811,"TERMINAL",0,0,"611",,terminal_output +17288,14257632,"TERMINAL",0,0,"7\t",,terminal_output +17289,14257825,"TERMINAL",0,0,"722",,terminal_output +17290,14258675,"TERMINAL",0,0,"8\t",,terminal_output +17291,14258867,"TERMINAL",0,0,"833",,terminal_output +17292,14259723,"TERMINAL",0,0,"9\t",,terminal_output +17293,14259908,"TERMINAL",0,0,"944",,terminal_output +17294,14260753,"TERMINAL",0,0,"50\t",,terminal_output +17295,14260947,"TERMINAL",0,0,"5055",,terminal_output +17296,14261794,"TERMINAL",0,0,"1\t",,terminal_output +17297,14261993,"TERMINAL",0,0,"166",,terminal_output +17298,14262831,"TERMINAL",0,0,"2\t",,terminal_output +17299,14263109,"TERMINAL",0,0,"277",,terminal_output +17300,14263637,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1363,0,"",shellscript,selection_mouse +17301,14263806,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1362,1,"s",shellscript,selection_mouse +17302,14263807,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1361,2,"ns",shellscript,selection_mouse +17303,14263807,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1360,3,"uns",shellscript,selection_mouse +17304,14263861,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1359,4,"runs",shellscript,selection_mouse +17305,14263910,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1358,5,"-runs",shellscript,selection_mouse +17306,14263958,"TERMINAL",0,0,"3\t",,terminal_output +17307,14263990,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1357,6,"g-runs",shellscript,selection_mouse +17308,14264037,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1356,7,"ig-runs",shellscript,selection_mouse +17309,14264133,"TERMINAL",0,0,"388",,terminal_output +17310,14264265,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1355,8,"big-runs",shellscript,selection_mouse +17311,14264948,"TERMINAL",0,0,"4\t",,terminal_output +17312,14265132,"TERMINAL",0,0,"499",,terminal_output +17313,14266068,"TERMINAL",0,0,"5\t",,terminal_output +17314,14266183,"TERMINAL",0,0,"58:008:00",,terminal_output +17315,14267004,"TERMINAL",0,0,"6\t",,terminal_output +17316,14267212,"TERMINAL",0,0,"611",,terminal_output +17317,14268113,"TERMINAL",0,0,"73",,terminal_output +17318,14268251,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1355,8,"c",shellscript,content +17319,14268252,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1356,0,"",shellscript,selection_keyboard +17320,14268272,"TERMINAL",0,0,"722",,terminal_output +17321,14268299,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1356,0,"o",shellscript,content +17322,14268300,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1357,0,"",shellscript,selection_keyboard +17323,14268523,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1357,0,"i",shellscript,content +17324,14268524,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1358,0,"",shellscript,selection_keyboard +17325,14268610,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1358,0,"n",shellscript,content +17326,14268611,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1359,0,"",shellscript,selection_keyboard +17327,14268804,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1359,0,"r",shellscript,content +17328,14268805,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1360,0,"",shellscript,selection_keyboard +17329,14268880,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1360,0,"u",shellscript,content +17330,14268881,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1361,0,"",shellscript,selection_keyboard +17331,14268967,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1361,0,"n",shellscript,content +17332,14268968,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1362,0,"",shellscript,selection_keyboard +17333,14269103,"TERMINAL",0,0,"84",,terminal_output +17334,14269347,"TERMINAL",0,0,"844",,terminal_output +17335,14270111,"TERMINAL",0,0,"9\t",,terminal_output +17336,14270359,"TERMINAL",0,0,"4:0055",,terminal_output +17337,14271200,"TERMINAL",0,0,"4:00\t",,terminal_output +17338,14271436,"TERMINAL",0,0,"166",,terminal_output +17339,14272209,"TERMINAL",0,0,"1\t",,terminal_output +17340,14272424,"TERMINAL",0,0,"277",,terminal_output +17341,14273335,"TERMINAL",0,0,"2\t",,terminal_output +17342,14273474,"TERMINAL",0,0,"388",,terminal_output +17343,14274360,"TERMINAL",0,0,"3\t",,terminal_output +17344,14274507,"TERMINAL",0,0,"499",,terminal_output +17345,14275310,"TERMINAL",0,0,"5\t",,terminal_output +17346,14275590,"TERMINAL",0,0,"51010",,terminal_output +17347,14276382,"TERMINAL",0,0,"6\t",,terminal_output +17348,14276608,"TERMINAL",0,0,"611",,terminal_output +17349,14277398,"TERMINAL",0,0,"7\t",,terminal_output +17350,14277635,"TERMINAL",0,0,"722",,terminal_output +17351,14278524,"TERMINAL",0,0,"8\t",,terminal_output +17352,14278689,"TERMINAL",0,0,"833",,terminal_output +17353,14279470,"TERMINAL",0,0,"9\t",,terminal_output +17354,14279724,"TERMINAL",0,0,"944",,terminal_output +17355,14280610,"TERMINAL",0,0,"10\t",,terminal_output +17356,14280801,"TERMINAL",0,0,"1055",,terminal_output +17357,14281652,"TERMINAL",0,0,"1\t",,terminal_output +17358,14281807,"TERMINAL",0,0,"166",,terminal_output +17359,14282653,"TERMINAL",0,0,"2\t",,terminal_output +17360,14282852,"TERMINAL",0,0,"277",,terminal_output +17361,14283140,"TERMINAL",0,0,"watch",,terminal_focus +17362,14283710,"TERMINAL",0,0,"3\t",,terminal_output +17363,14283894,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +17364,14284663,"TERMINAL",0,0,"4\t",,terminal_output +17365,14285286,"TERMINAL",0,0,"queue",,terminal_command +17366,14285352,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 17:24:15 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469360 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 20:08:20\t 1 hkn07363466287 accelerat train_la tum_cte0 R 20:08:20\t 1 hkn0736\t",,terminal_output +17367,14285701,"TERMINAL",0,0,"5\t",,terminal_output +17368,14286305,"TERMINAL",0,0,"bash",,terminal_focus +17369,14286562,"TERMINAL",0,0,"611",,terminal_output +17370,14286780,"TERMINAL",0,0,"6\t",,terminal_output +17371,14287433,"TERMINAL",0,0,"722",,terminal_output +17372,14287799,"TERMINAL",0,0,"7\t",,terminal_output +17373,14288464,"TERMINAL",0,0,"833",,terminal_output +17374,14288909,"TERMINAL",0,0,"8\t",,terminal_output +17375,14289091,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +17376,14289201,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output +17377,14289505,"TERMINAL",0,0,"944",,terminal_output +17378,14289885,"TERMINAL",0,0,"9\t",,terminal_output +17379,14290558,"TERMINAL",0,0,"2055",,terminal_output +17380,14290661,"TERMINAL",0,0,"cd checkpoints/",,terminal_command +17381,14290766,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints",,terminal_output +17382,14290898,"TERMINAL",0,0,"20\t",,terminal_output +17383,14290995,"TERMINAL",0,0,"ls",,terminal_command +17384,14291053,"TERMINAL",0,0,"]633;C",,terminal_output +17385,14291182,"TERMINAL",0,0,"0000 3292221 3292339 3297576 3299065 3301025 3310437 causal maskgit-maskprob-fix train_dyn_new_arch-bugfixed-temporal-shift\r\n3290283 3292258 3294600 3297577 3299066 3301026 3311671 causal-fr maskgit-speedrun train_dyn_yolorun_new_arch\r\n3290284 3292328 3294601 3297578 3299068 3301027 3311672 causal-fr-flashattn mihir train_lam_minecraft_overfit_sample\r\n3290295 3292329 3294602 3297582 3299069 3301029 3313562 checkpoints_alfred tokenizer train_tokenizer_batch_size_scaling_16_node\r\n3290296 3292330 3294603 3297586 3299258 3301030 3313563 coinrun tokenizer_ckpt_dir train_tokenizer_minecraft_overfit_sample\r\n3290366 3292331 3296502 3297606 3299259 3301031 3313564 debug train_dynamics_lr_schedule_const wrap\r\n3290367 3292332 3296540 3297671 3299272 3306801 3313565 dyn train_dynamics_lr_schedule_cos\r\n3290391 3292333 3296571 3297693 3299579 3307618 3313570 dynamics_ckpt_dir train_dynamics_lr_schedule_wsd\r\n3290392 3292334 3296573 3297706 3300233 3307619 3313571 interactive train_dyn_causal_180M\r\n3290439 3292335 3296574 3297727 3300290 3309662 3313572 lam train_dyn_causal_255M\r\n3290440 3292336 3296575 3299016 3300658 3309663 3316022 lam-1-action train_dyn_causal_356M\r\n3291405 3292337 3297569 3299062 3300663 3309699 ali lam_ckpt_dir train_dyn_causal_500M\r\n3292213 3292338 3297575 3299063 3300672 3310436 big-runs lam_main_test train_dyn_new_arch-bugfixed-spatial-shift\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints",,terminal_output +17386,14291598,"TERMINAL",0,0,"166",,terminal_output +17387,14291939,"TERMINAL",0,0,"1\t",,terminal_output +17388,14292679,"TERMINAL",0,0,"277",,terminal_output +17389,14293016,"TERMINAL",0,0,"2\t",,terminal_output +17390,14293675,"TERMINAL",0,0,"388",,terminal_output +17391,14294010,"TERMINAL",0,0,"3\t",,terminal_output +17392,14294723,"TERMINAL",0,0,"499",,terminal_output +17393,14295149,"TERMINAL",0,0,"4\t",,terminal_output +17394,14295820,"TERMINAL",0,0,"53030",,terminal_output +17395,14295970,"TERMINAL",0,0,"cd coinrun/",,terminal_command +17396,14296061,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun",,terminal_output +17397,14296126,"TERMINAL",0,0,"5\t",,terminal_output +17398,14296360,"TERMINAL",0,0,"ls",,terminal_command +17399,14296440,"TERMINAL",0,0,"]633;Ccausal dynamics_ckpt_dir maskgit\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun",,terminal_output +17400,14296816,"TERMINAL",0,0,"611",,terminal_output +17401,14297130,"TERMINAL",0,0,"6\t",,terminal_output +17402,14297852,"TERMINAL",0,0,"722",,terminal_output +17403,14298228,"TERMINAL",0,0,"7\t",,terminal_output +17404,14298958,"TERMINAL",0,0,"833",,terminal_output +17405,14299262,"TERMINAL",0,0,"8\t",,terminal_output +17406,14300061,"TERMINAL",0,0,"944",,terminal_output +17407,14300241,"TERMINAL",0,0,"9\t",,terminal_output +17408,14300978,"TERMINAL",0,0,"3055",,terminal_output +17409,14301285,"TERMINAL",0,0,"30\t",,terminal_output +17410,14302036,"TERMINAL",0,0,"166",,terminal_output +17411,14302346,"TERMINAL",0,0,"2\t",,terminal_output +17412,14303116,"TERMINAL",0,0,"277",,terminal_output +17413,14303492,"TERMINAL",0,0,"3\t",,terminal_output +17414,14304163,"TERMINAL",0,0,"388",,terminal_output +17415,14304401,"TERMINAL",0,0,"4\t",,terminal_output +17416,14305150,"TERMINAL",0,0,"499",,terminal_output +17417,14305488,"TERMINAL",0,0,"5\t",,terminal_output +17418,14306186,"TERMINAL",0,0,"54040",,terminal_output +17419,14306513,"TERMINAL",0,0,"6\t",,terminal_output +17420,14307338,"TERMINAL",0,0,"611",,terminal_output +17421,14307518,"TERMINAL",0,0,"7\t",,terminal_output +17422,14308276,"TERMINAL",0,0,"733",,terminal_output +17423,14308623,"TERMINAL",0,0,"8\t",,terminal_output +17424,14309145,"TERMINAL",0,0,"cd ..",,terminal_command +17425,14309246,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints",,terminal_output +17426,14309318,"TERMINAL",0,0,"944",,terminal_output +17427,14309618,"TERMINAL",0,0,"9\t",,terminal_output +17428,14310310,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +17429,14310555,"TERMINAL",0,0,"4055",,terminal_output +17430,14310663,"TERMINAL",0,0,"40\t",,terminal_output +17431,14311407,"TERMINAL",0,0,"166",,terminal_output +17432,14311683,"TERMINAL",0,0,"1\t",,terminal_output +17433,14312008,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1355,0,"",shellscript,selection_mouse +17434,14312476,"TERMINAL",0,0,"277",,terminal_output +17435,14312564,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1355,0,"m",shellscript,content +17436,14312566,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1356,0,"",shellscript,selection_keyboard +17437,14312762,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1356,0,"i",shellscript,content +17438,14312763,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1357,0,"",shellscript,selection_keyboard +17439,14312763,"TERMINAL",0,0,"2\t",,terminal_output +17440,14312865,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1357,0,"h",shellscript,content +17441,14312866,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1358,0,"",shellscript,selection_keyboard +17442,14312990,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1358,0,"i",shellscript,content +17443,14312991,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1359,0,"",shellscript,selection_keyboard +17444,14313114,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1359,0,"r",shellscript,content +17445,14313115,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1360,0,"",shellscript,selection_keyboard +17446,14313491,"TERMINAL",0,0,"388",,terminal_output +17447,14313789,"TERMINAL",0,0,"3\t",,terminal_output +17448,14313893,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1360,0,"/",shellscript,content +17449,14313893,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1361,0,"",shellscript,selection_keyboard +17450,14314086,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",1360,0,"",shellscript,selection_command +17451,14314527,"TERMINAL",0,0,"499",,terminal_output +17452,14314803,"TERMINAL",0,0,"4\t",,terminal_output +17453,14315626,"TERMINAL",0,0,"55050",,terminal_output +17454,14315934,"TERMINAL",0,0,"5\t",,terminal_output +17455,14316621,"TERMINAL",0,0,"611",,terminal_output +17456,14316874,"TERMINAL",0,0,"6\t",,terminal_output +17457,14317675,"TERMINAL",0,0,"722",,terminal_output +17458,14317916,"TERMINAL",0,0,"7\t",,terminal_output +17459,14318702,"TERMINAL",0,0,"833",,terminal_output +17460,14318948,"TERMINAL",0,0,"8\t",,terminal_output +17461,14319732,"TERMINAL",0,0,"944",,terminal_output +17462,14319986,"TERMINAL",0,0,"9\t",,terminal_output +17463,14320774,"TERMINAL",0,0,"5055",,terminal_output +17464,14321102,"TERMINAL",0,0,"50\t",,terminal_output +17465,14321814,"TERMINAL",0,0,"166",,terminal_output +17466,14322062,"TERMINAL",0,0,"1\t",,terminal_output +17467,14322888,"TERMINAL",0,0,"277",,terminal_output +17468,14323194,"TERMINAL",0,0,"2\t",,terminal_output +17469,14323926,"TERMINAL",0,0,"388",,terminal_output +17470,14324149,"TERMINAL",0,0,"3\t",,terminal_output +17471,14326689,"TERMINAL",0,0,"49:019:01",,terminal_output +17472,14326689,"TERMINAL",0,0,"416",,terminal_output +17473,14327668,"TERMINAL",0,0,"722",,terminal_output +17474,14327680,"TERMINAL",0,0,"74",,terminal_output +17475,14328717,"TERMINAL",0,0,"833",,terminal_output +17476,14328720,"TERMINAL",0,0,"8\t",,terminal_output +17477,14329763,"TERMINAL",0,0,"9\t",,terminal_output +17478,14329799,"TERMINAL",0,0,"944",,terminal_output +17479,14330884,"TERMINAL",0,0,"5:00\t",,terminal_output +17480,14330885,"TERMINAL",0,0,"5:0055",,terminal_output +17481,14331835,"TERMINAL",0,0,"1\t",,terminal_output +17482,14331874,"TERMINAL",0,0,"166",,terminal_output +17483,14332879,"TERMINAL",0,0,"2\t",,terminal_output +17484,14332887,"TERMINAL",0,0,"277",,terminal_output +17485,14333990,"TERMINAL",0,0,"3\t",,terminal_output +17486,14333990,"TERMINAL",0,0,"388",,terminal_output +17487,14334988,"TERMINAL",0,0,"4\t",,terminal_output +17488,14334991,"TERMINAL",0,0,"499",,terminal_output +17489,14335993,"TERMINAL",0,0,"5\t",,terminal_output +17490,14336019,"TERMINAL",0,0,"51010",,terminal_output +17491,14337075,"TERMINAL",0,0,"6\t",,terminal_output +17492,14337075,"TERMINAL",0,0,"611",,terminal_output +17493,14338075,"TERMINAL",0,0,"7\t",,terminal_output +17494,14338101,"TERMINAL",0,0,"722",,terminal_output +17495,14339110,"TERMINAL",0,0,"8\t",,terminal_output +17496,14339142,"TERMINAL",0,0,"833",,terminal_output +17497,14340214,"TERMINAL",0,0,"9\t",,terminal_output +17498,14340218,"TERMINAL",0,0,"944",,terminal_output +17499,14341257,"TERMINAL",0,0,"10\t",,terminal_output +17500,14341259,"TERMINAL",0,0,"1055",,terminal_output +17501,14342237,"TERMINAL",0,0,"1\t",,terminal_output +17502,14342269,"TERMINAL",0,0,"166",,terminal_output +17503,14343261,"TERMINAL",0,0,"2\t",,terminal_output +17504,14343308,"TERMINAL",0,0,"288",,terminal_output +17505,14344382,"TERMINAL",0,0,"499",,terminal_output +17506,14344383,"TERMINAL",0,0,"35",,terminal_output +17507,14344637,"TERMINAL",0,0,"watch",,terminal_focus +17508,14345428,"TERMINAL",0,0,"52020",,terminal_output +17509,14345428,"TERMINAL",0,0,"5\t",,terminal_output +17510,14346552,"TERMINAL",0,0,"611",,terminal_output +17511,14346552,"TERMINAL",0,0,"6\t",,terminal_output +17512,14347576,"TERMINAL",0,0,"7\t",,terminal_output +17513,14347623,"TERMINAL",0,0,"722",,terminal_output +17514,14348608,"TERMINAL",0,0,"8\t",,terminal_output +17515,14348608,"TERMINAL",0,0,"833",,terminal_output +17516,14349550,"TERMINAL",0,0,"9\t",,terminal_output +17517,14349592,"TERMINAL",0,0,"944",,terminal_output +17518,14350655,"TERMINAL",0,0,"20\t",,terminal_output +17519,14350661,"TERMINAL",0,0,"2055",,terminal_output +17520,14351698,"TERMINAL",0,0,"1\t",,terminal_output +17521,14351728,"TERMINAL",0,0,"166",,terminal_output +17522,14352708,"TERMINAL",0,0,"2\t",,terminal_output +17523,14352709,"TERMINAL",0,0,"277",,terminal_output +17524,14353704,"TERMINAL",0,0,"3\t",,terminal_output +17525,14353744,"TERMINAL",0,0,"388",,terminal_output +17526,14354732,"TERMINAL",0,0,"4\t",,terminal_output +17527,14354753,"TERMINAL",0,0,"499",,terminal_output +17528,14355780,"TERMINAL",0,0,"5\t",,terminal_output +17529,14355797,"TERMINAL",0,0,"53030",,terminal_output +17530,14357121,"TERMINAL",0,0,"6\t",,terminal_output +17531,14357121,"TERMINAL",0,0,"611",,terminal_output +17532,14357845,"TERMINAL",0,0,"7\t",,terminal_output +17533,14357886,"TERMINAL",0,0,"722",,terminal_output +17534,14358884,"TERMINAL",0,0,"8\t",,terminal_output +17535,14358969,"TERMINAL",0,0,"833",,terminal_output +17536,14359427,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base copy.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +17537,14359945,"TERMINAL",0,0,"9\t",,terminal_output +17538,14359966,"TERMINAL",0,0,"944",,terminal_output +17539,14360967,"TERMINAL",0,0,"30\t",,terminal_output +17540,14361019,"TERMINAL",0,0,"3055",,terminal_output +17541,14362000,"TERMINAL",0,0,"1\t",,terminal_output +17542,14362084,"TERMINAL",0,0,"166",,terminal_output +17543,14363052,"TERMINAL",0,0,"2\t",,terminal_output +17544,14363133,"TERMINAL",0,0,"277",,terminal_output +17545,14364062,"TERMINAL",0,0,"3\t",,terminal_output +17546,14364119,"TERMINAL",0,0,"388",,terminal_output +17547,14365133,"TERMINAL",0,0,"4\t",,terminal_output +17548,14365542,"TERMINAL",0,0,"499",,terminal_output +17549,14366145,"TERMINAL",0,0,"5\t",,terminal_output +17550,14366198,"TERMINAL",0,0,"54040",,terminal_output +17551,14367281,"TERMINAL",0,0,"6\t",,terminal_output +17552,14367287,"TERMINAL",0,0,"611",,terminal_output +17553,14368230,"TERMINAL",0,0,"7\t",,terminal_output +17554,14368282,"TERMINAL",0,0,"733",,terminal_output +17555,14369290,"TERMINAL",0,0,"8\t",,terminal_output +17556,14369333,"TERMINAL",0,0,"944",,terminal_output +17557,14370302,"TERMINAL",0,0,"9\t",,terminal_output +17558,14370362,"TERMINAL",0,0,"4055",,terminal_output +17559,14371444,"TERMINAL",0,0,"41\t",,terminal_output +17560,14371444,"TERMINAL",0,0,"166",,terminal_output +17561,14372468,"TERMINAL",0,0,"2\t",,terminal_output +17562,14372469,"TERMINAL",0,0,"277",,terminal_output +17563,14373421,"TERMINAL",0,0,"3\t",,terminal_output +17564,14373519,"TERMINAL",0,0,"388",,terminal_output +17565,14374582,"TERMINAL",0,0,"4\t",,terminal_output +17566,14374582,"TERMINAL",0,0,"499",,terminal_output +17567,14374779,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +17568,14375523,"TERMINAL",0,0,"5\t",,terminal_output +17569,14375576,"TERMINAL",0,0,"55050",,terminal_output +17570,14376572,"TERMINAL",0,0,"6\t",,terminal_output +17571,14376634,"TERMINAL",0,0,"611",,terminal_output +17572,14377631,"TERMINAL",0,0,"7\t",,terminal_output +17573,14377725,"TERMINAL",0,0,"722",,terminal_output +17574,14378687,"TERMINAL",0,0,"8\t",,terminal_output +17575,14378712,"TERMINAL",0,0,"833",,terminal_output +17576,14379543,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_133M\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\n# slurm_job_id=$SLURM_JOB_ID\nslurm_job_id=3454956\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $slurm_job_id \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --darkness_threshold=50 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=lam-minecraft-8-node-darkness-filter-133M-$slurm_job_id \\n --tags lam minecraft 8-node darkness-filter 133M \\n --entity instant-uv \\n --project jafar \\n --num_latents=100 \\n --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +17577,14380043,"TERMINAL",0,0,"9\t",,terminal_output +17578,14380043,"TERMINAL",0,0,"944",,terminal_output +17579,14380757,"TERMINAL",0,0,"50\t",,terminal_output +17580,14380869,"TERMINAL",0,0,"5055",,terminal_output +17581,14381875,"TERMINAL",0,0,"1\t",,terminal_output +17582,14381901,"TERMINAL",0,0,"166",,terminal_output +17583,14382805,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",0,0,"",shellscript,tab +17584,14383180,"TERMINAL",0,0,"2\t",,terminal_output +17585,14383180,"TERMINAL",0,0,"277",,terminal_output +17586,14383865,"TERMINAL",0,0,"3\t",,terminal_output +17587,14383947,"TERMINAL",0,0,"388",,terminal_output +17588,14384900,"TERMINAL",0,0,"4\t",,terminal_output +17589,14384986,"TERMINAL",0,0,"499",,terminal_output +17590,14386048,"TERMINAL",0,0,"53",,terminal_output +17591,14386089,"TERMINAL",0,0,"510:0010:00",,terminal_output +17592,14386974,"TERMINAL",0,0,"6\t",,terminal_output +17593,14387026,"TERMINAL",0,0,"611",,terminal_output +17594,14387981,"TERMINAL",0,0,"7\t",,terminal_output +17595,14388068,"TERMINAL",0,0,"722",,terminal_output +17596,14389072,"TERMINAL",0,0,"8\t",,terminal_output +17597,14389113,"TERMINAL",0,0,"833",,terminal_output +17598,14390180,"TERMINAL",0,0,"9\t",,terminal_output +17599,14390181,"TERMINAL",0,0,"944",,terminal_output +17600,14391198,"TERMINAL",0,0,"6:00\t",,terminal_output +17601,14391198,"TERMINAL",0,0,"6:0055",,terminal_output +17602,14392226,"TERMINAL",0,0,"1\t",,terminal_output +17603,14392252,"TERMINAL",0,0,"166",,terminal_output +17604,14393246,"TERMINAL",0,0,"2\t",,terminal_output +17605,14393277,"TERMINAL",0,0,"288",,terminal_output +17606,14394270,"TERMINAL",0,0,"3\t",,terminal_output +17607,14394368,"TERMINAL",0,0,"499",,terminal_output +17608,14395270,"TERMINAL",0,0,"4\t",,terminal_output +17609,14395372,"TERMINAL",0,0,"51010",,terminal_output +17610,14396297,"TERMINAL",0,0,"5\t",,terminal_output +17611,14396431,"TERMINAL",0,0,"611",,terminal_output +17612,14397445,"TERMINAL",0,0,"7\t",,terminal_output +17613,14397446,"TERMINAL",0,0,"722",,terminal_output +17614,14398438,"TERMINAL",0,0,"8\t",,terminal_output +17615,14398495,"TERMINAL",0,0,"833",,terminal_output +17616,14399408,"TERMINAL",0,0,"9\t",,terminal_output +17617,14399543,"TERMINAL",0,0,"944",,terminal_output +17618,14400517,"TERMINAL",0,0,"10\t",,terminal_output +17619,14400573,"TERMINAL",0,0,"1055",,terminal_output +17620,14401493,"TERMINAL",0,0,"1\t",,terminal_output +17621,14401645,"TERMINAL",0,0,"166",,terminal_output +17622,14402526,"TERMINAL",0,0,"2\t",,terminal_output +17623,14402675,"TERMINAL",0,0,"277",,terminal_output +17624,14403707,"TERMINAL",0,0,"3\t",,terminal_output +17625,14403714,"TERMINAL",0,0,"388",,terminal_output +17626,14403971,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2123,0,"",shellscript,selection_mouse +17627,14404003,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2122,0,"",shellscript,selection_command +17628,14404657,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2123,0,"",shellscript,selection_mouse +17629,14404668,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2122,0,"",shellscript,selection_command +17630,14404669,"TERMINAL",0,0,"4\t",,terminal_output +17631,14404794,"TERMINAL",0,0,"499",,terminal_output +17632,14404839,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2122,1,"\",shellscript,selection_mouse +17633,14404844,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2123,0,"",shellscript,selection_command +17634,14404947,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2120,3,"0 \",shellscript,selection_mouse +17635,14404947,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2119,4,"60 \",shellscript,selection_mouse +17636,14404947,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2117,6,"2560 \",shellscript,selection_mouse +17637,14404948,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2116,7,"=2560 \",shellscript,selection_mouse +17638,14404948,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2114,9,"im=2560 \",shellscript,selection_mouse +17639,14405000,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2113,10,"dim=2560 \",shellscript,selection_mouse +17640,14405034,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2112,11,"_dim=2560 \",shellscript,selection_mouse +17641,14405038,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2111,12,"n_dim=2560 \",shellscript,selection_mouse +17642,14405057,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2110,13,"fn_dim=2560 \",shellscript,selection_mouse +17643,14405078,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2109,14,"ffn_dim=2560 \",shellscript,selection_mouse +17644,14405095,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2086,37,"-latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17645,14405117,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2084,39," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17646,14405143,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2083,40," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17647,14405165,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2082,41," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17648,14405183,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2081,42," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17649,14405209,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2060,63," --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17650,14405310,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2038,85," --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17651,14405352,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2016,107," --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17652,14405494,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",1992,131," --num_latents=100 \\n --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17653,14405642,"TERMINAL",0,0,"5\t",,terminal_output +17654,14405791,"TERMINAL",0,0,"52020",,terminal_output +17655,14405915,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2016,107," --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17656,14406681,"TERMINAL",0,0,"6\t",,terminal_output +17657,14406841,"TERMINAL",0,0,"611",,terminal_output +17658,14407724,"TERMINAL",0,0,"7\t",,terminal_output +17659,14407878,"TERMINAL",0,0,"722",,terminal_output +17660,14408753,"TERMINAL",0,0,"8\t",,terminal_output +17661,14408929,"TERMINAL",0,0,"833",,terminal_output +17662,14409201,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2119,0,"",shellscript,selection_mouse +17663,14409576,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2123,0,"",shellscript,selection_mouse +17664,14409578,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2122,0,"",shellscript,selection_command +17665,14409735,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2122,1,"\",shellscript,selection_mouse +17666,14409736,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2121,1," ",shellscript,selection_mouse +17667,14409736,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2093,29,"_dim=32 \\n --ffn_dim=2560 ",shellscript,selection_mouse +17668,14409736,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2090,32,"ent_dim=32 \\n --ffn_dim=2560 ",shellscript,selection_mouse +17669,14409737,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2088,34,"atent_dim=32 \\n --ffn_dim=2560 ",shellscript,selection_mouse +17670,14409737,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2087,35,"latent_dim=32 \\n --ffn_dim=2560 ",shellscript,selection_mouse +17671,14409753,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2085,37,"--latent_dim=32 \\n --ffn_dim=2560 ",shellscript,selection_mouse +17672,14409754,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2123,0,"",shellscript,selection_command +17673,14409791,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2084,39," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17674,14409877,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2062,61," --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17675,14409932,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2040,83," --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17676,14409933,"TERMINAL",0,0,"9\t",,terminal_output +17677,14409997,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2018,105," --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17678,14410027,"TERMINAL",0,0,"944",,terminal_output +17679,14410510,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2017,106," --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17680,14410596,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch",2016,107," --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17681,14410832,"TERMINAL",0,0,"20\t",,terminal_output +17682,14411013,"TERMINAL",0,0,"2055",,terminal_output +17683,14411876,"TERMINAL",0,0,"1\t",,terminal_output +17684,14412048,"TERMINAL",0,0,"166",,terminal_output +17685,14412903,"TERMINAL",0,0,"2\t",,terminal_output +17686,14413185,"TERMINAL",0,0,"277",,terminal_output +17687,14414038,"TERMINAL",0,0,"3\t",,terminal_output +17688,14414143,"TERMINAL",0,0,"388",,terminal_output +17689,14414991,"TERMINAL",0,0,"46",,terminal_output +17690,14415175,"TERMINAL",0,0,"499",,terminal_output +17691,14416015,"TERMINAL",0,0,"5\t",,terminal_output +17692,14416213,"TERMINAL",0,0,"53030",,terminal_output +17693,14417097,"TERMINAL",0,0,"6\t",,terminal_output +17694,14417252,"TERMINAL",0,0,"611",,terminal_output +17695,14418129,"TERMINAL",0,0,"7\t",,terminal_output +17696,14418294,"TERMINAL",0,0,"733",,terminal_output +17697,14419161,"TERMINAL",0,0,"8\t",,terminal_output +17698,14419336,"TERMINAL",0,0,"944",,terminal_output +17699,14420282,"TERMINAL",0,0,"9\t",,terminal_output +17700,14420456,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",0,0,"",shellscript,tab +17701,14420457,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2160,0,"",shellscript,selection_mouse +17702,14420519,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2159,0,"",shellscript,selection_command +17703,14420520,"TERMINAL",0,0,"3055",,terminal_output +17704,14421218,"TERMINAL",0,0,"30\t",,terminal_output +17705,14421542,"TERMINAL",0,0,"166",,terminal_output +17706,14422259,"TERMINAL",0,0,"1\t",,terminal_output +17707,14422581,"TERMINAL",0,0,"277",,terminal_output +17708,14422682,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2160,0,"\n --val_data_dir $array_records_dir_val \",shellscript,content +17709,14422728,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2165,0,"",shellscript,selection_command +17710,14423289,"TERMINAL",0,0,"2\t",,terminal_output +17711,14423679,"TERMINAL",0,0,"388",,terminal_output +17712,14423998,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2211,44,"",shellscript,content +17713,14424033,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2159,0,"",shellscript,selection_command +17714,14424282,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2160,0,"\n ",shellscript,content +17715,14424402,"TERMINAL",0,0,"4\t",,terminal_output +17716,14424653,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2165,0," --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,content +17717,14424701,"TERMINAL",0,0,"499",,terminal_output +17718,14425370,"TERMINAL",0,0,"5\t",,terminal_output +17719,14425701,"TERMINAL",0,0,"54040",,terminal_output +17720,14425820,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2169,0,"",shellscript,selection_mouse +17721,14426209,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2165,4,"",shellscript,content +17722,14426440,"TERMINAL",0,0,"6\t",,terminal_output +17723,14426769,"TERMINAL",0,0,"611",,terminal_output +17724,14427482,"TERMINAL",0,0,"7\t",,terminal_output +17725,14427788,"TERMINAL",0,0,"722",,terminal_output +17726,14428506,"TERMINAL",0,0,"8\t",,terminal_output +17727,14428881,"TERMINAL",0,0,"833",,terminal_output +17728,14429515,"TERMINAL",0,0,"9\t",,terminal_output +17729,14429932,"TERMINAL",0,0,"944",,terminal_output +17730,14430579,"TERMINAL",0,0,"40\t",,terminal_output +17731,14430933,"TERMINAL",0,0,"4055",,terminal_output +17732,14431629,"TERMINAL",0,0,"1\t",,terminal_output +17733,14432119,"TERMINAL",0,0,"166",,terminal_output +17734,14432672,"TERMINAL",0,0,"2\t",,terminal_output +17735,14432980,"TERMINAL",0,0,"277",,terminal_output +17736,14433696,"TERMINAL",0,0,"3\t",,terminal_output +17737,14434109,"TERMINAL",0,0,"388",,terminal_output +17738,14434718,"TERMINAL",0,0,"4\t",,terminal_output +17739,14435128,"TERMINAL",0,0,"499",,terminal_output +17740,14435735,"TERMINAL",0,0,"5\t",,terminal_output +17741,14436103,"TERMINAL",0,0,"55050",,terminal_output +17742,14436778,"TERMINAL",0,0,"6\t",,terminal_output +17743,14437145,"TERMINAL",0,0,"611",,terminal_output +17744,14437824,"TERMINAL",0,0,"7\t",,terminal_output +17745,14438209,"TERMINAL",0,0,"722",,terminal_output +17746,14438850,"TERMINAL",0,0,"8\t",,terminal_output +17747,14439326,"TERMINAL",0,0,"833",,terminal_output +17748,14439899,"TERMINAL",0,0,"9\t",,terminal_output +17749,14440284,"TERMINAL",0,0,"955",,terminal_output +17750,14440935,"TERMINAL",0,0,"50\t",,terminal_output +17751,14441337,"TERMINAL",0,0,"5166",,terminal_output +17752,14442091,"TERMINAL",0,0,"1\t",,terminal_output +17753,14442378,"TERMINAL",0,0,"277",,terminal_output +17754,14443115,"TERMINAL",0,0,"2\t",,terminal_output +17755,14443526,"TERMINAL",0,0,"388",,terminal_output +17756,14444152,"TERMINAL",0,0,"3\t",,terminal_output +17757,14444469,"TERMINAL",0,0,"499",,terminal_output +17758,14445083,"TERMINAL",0,0,"4\t",,terminal_output +17759,14445610,"TERMINAL",0,0,"51:001:00",,terminal_output +17760,14446131,"TERMINAL",0,0,"5\t",,terminal_output +17761,14446602,"TERMINAL",0,0,"611",,terminal_output +17762,14447227,"TERMINAL",0,0,"6\t",,terminal_output +17763,14449559,"TERMINAL",0,0,"744",,terminal_output +17764,14449570,"TERMINAL",0,0,"75",,terminal_output +17765,14450603,"TERMINAL",0,0,"7:0055",,terminal_output +17766,14450610,"TERMINAL",0,0,"7:004",,terminal_output +17767,14451726,"TERMINAL",0,0,"1\t",,terminal_output +17768,14451726,"TERMINAL",0,0,"166",,terminal_output +17769,14452683,"TERMINAL",0,0,"2\t",,terminal_output +17770,14452723,"TERMINAL",0,0,"277",,terminal_output +17771,14453760,"TERMINAL",0,0,"3\t",,terminal_output +17772,14453800,"TERMINAL",0,0,"388",,terminal_output +17773,14454760,"TERMINAL",0,0,"4\t",,terminal_output +17774,14454772,"TERMINAL",0,0,"499",,terminal_output +17775,14455834,"TERMINAL",0,0,"5\t",,terminal_output +17776,14455835,"TERMINAL",0,0,"51010",,terminal_output +17777,14456837,"TERMINAL",0,0,"6\t",,terminal_output +17778,14456855,"TERMINAL",0,0,"611",,terminal_output +17779,14457876,"TERMINAL",0,0,"7\t",,terminal_output +17780,14457897,"TERMINAL",0,0,"722",,terminal_output +17781,14458910,"TERMINAL",0,0,"8\t",,terminal_output +17782,14458939,"TERMINAL",0,0,"833",,terminal_output +17783,14459948,"TERMINAL",0,0,"9\t",,terminal_output +17784,14459976,"TERMINAL",0,0,"944",,terminal_output +17785,14461036,"TERMINAL",0,0,"10\t",,terminal_output +17786,14461038,"TERMINAL",0,0,"1055",,terminal_output +17787,14462059,"TERMINAL",0,0,"1\t",,terminal_output +17788,14462060,"TERMINAL",0,0,"166",,terminal_output +17789,14463060,"TERMINAL",0,0,"2\t",,terminal_output +17790,14463098,"TERMINAL",0,0,"277",,terminal_output +17791,14464210,"TERMINAL",0,0,"3\t",,terminal_output +17792,14464210,"TERMINAL",0,0,"388",,terminal_output +17793,14465213,"TERMINAL",0,0,"4\t",,terminal_output +17794,14465213,"TERMINAL",0,0,"499",,terminal_output +17795,14466179,"TERMINAL",0,0,"5\t",,terminal_output +17796,14466214,"TERMINAL",0,0,"52020",,terminal_output +17797,14467282,"TERMINAL",0,0,"6\t",,terminal_output +17798,14467323,"TERMINAL",0,0,"611",,terminal_output +17799,14468303,"TERMINAL",0,0,"7\t",,terminal_output +17800,14468312,"TERMINAL",0,0,"733",,terminal_output +17801,14469339,"TERMINAL",0,0,"8\t",,terminal_output +17802,14469382,"TERMINAL",0,0,"944",,terminal_output +17803,14470354,"TERMINAL",0,0,"20\t",,terminal_output +17804,14470379,"TERMINAL",0,0,"2055",,terminal_output +17805,14471480,"TERMINAL",0,0,"1\t",,terminal_output +17806,14471481,"TERMINAL",0,0,"166",,terminal_output +17807,14472509,"TERMINAL",0,0,"2\t",,terminal_output +17808,14472510,"TERMINAL",0,0,"277",,terminal_output +17809,14473528,"TERMINAL",0,0,"3\t",,terminal_output +17810,14473528,"TERMINAL",0,0,"388",,terminal_output +17811,14474472,"TERMINAL",0,0,"4\t",,terminal_output +17812,14474612,"TERMINAL",0,0,"499",,terminal_output +17813,14475602,"TERMINAL",0,0,"5\t",,terminal_output +17814,14475645,"TERMINAL",0,0,"53030",,terminal_output +17815,14476542,"TERMINAL",0,0,"6\t",,terminal_output +17816,14476706,"TERMINAL",0,0,"611",,terminal_output +17817,14477616,"TERMINAL",0,0,"7\t",,terminal_output +17818,14477686,"TERMINAL",0,0,"722",,terminal_output +17819,14478656,"TERMINAL",0,0,"8\t",,terminal_output +17820,14478824,"TERMINAL",0,0,"833",,terminal_output +17821,14479672,"TERMINAL",0,0,"9\t",,terminal_output +17822,14479824,"TERMINAL",0,0,"944",,terminal_output +17823,14480702,"TERMINAL",0,0,"30\t",,terminal_output +17824,14480840,"TERMINAL",0,0,"3055",,terminal_output +17825,14481731,"TERMINAL",0,0,"1\t",,terminal_output +17826,14481876,"TERMINAL",0,0,"166",,terminal_output +17827,14482771,"TERMINAL",0,0,"2\t",,terminal_output +17828,14482949,"TERMINAL",0,0,"277",,terminal_output +17829,14483809,"TERMINAL",0,0,"3\t",,terminal_output +17830,14483987,"TERMINAL",0,0,"388",,terminal_output +17831,14484618,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +17832,14484939,"TERMINAL",0,0,"4\t",,terminal_output +17833,14485013,"TERMINAL",0,0,"499",,terminal_output +17834,14486082,"TERMINAL",0,0,"5\t",,terminal_output +17835,14486087,"TERMINAL",0,0,"54040",,terminal_output +17836,14486929,"TERMINAL",0,0,"6\t",,terminal_output +17837,14487147,"TERMINAL",0,0,"611",,terminal_output +17838,14487968,"TERMINAL",0,0,"7\t",,terminal_output +17839,14488163,"TERMINAL",0,0,"722",,terminal_output +17840,14489258,"TERMINAL",0,0,"8\t",,terminal_output +17841,14489279,"TERMINAL",0,0,"833",,terminal_output +17842,14490120,"TERMINAL",0,0,"9\t",,terminal_output +17843,14490204,"TERMINAL",0,0,"944",,terminal_output +17844,14491092,"TERMINAL",0,0,"40\t",,terminal_output +17845,14491249,"TERMINAL",0,0,"4055",,terminal_output +17846,14492168,"TERMINAL",0,0,"1\t",,terminal_output +17847,14492307,"TERMINAL",0,0,"177",,terminal_output +17848,14493205,"TERMINAL",0,0,"2\t",,terminal_output +17849,14493351,"TERMINAL",0,0,"388",,terminal_output +17850,14493803,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +17851,14494331,"TERMINAL",0,0,"3\t",,terminal_output +17852,14494385,"TERMINAL",0,0,"499",,terminal_output +17853,14495099,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",433,0,"",shellscript,selection_mouse +17854,14495130,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",432,0,"",shellscript,selection_command +17855,14495260,"TERMINAL",0,0,"4\t",,terminal_output +17856,14495475,"TERMINAL",0,0,"55050",,terminal_output +17857,14496279,"TERMINAL",0,0,"5\t",,terminal_output +17858,14496479,"TERMINAL",0,0,"611",,terminal_output +17859,14497111,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",431,1,"",shellscript,content +17860,14497317,"TERMINAL",0,0,"7\t",,terminal_output +17861,14497492,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",431,0,"1",shellscript,content +17862,14497492,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",432,0,"",shellscript,selection_keyboard +17863,14497576,"TERMINAL",0,0,"722",,terminal_output +17864,14497631,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",432,0,"1",shellscript,content +17865,14497632,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",433,0,"",shellscript,selection_keyboard +17866,14498367,"TERMINAL",0,0,"8\t",,terminal_output +17867,14498567,"TERMINAL",0,0,"833",,terminal_output +17868,14499113,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",0,0,"",shellscript,tab +17869,14499450,"TERMINAL",0,0,"9\t",,terminal_output +17870,14499652,"TERMINAL",0,0,"944",,terminal_output +17871,14500473,"TERMINAL",0,0,"50\t",,terminal_output +17872,14500684,"TERMINAL",0,0,"5055",,terminal_output +17873,14501227,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",432,0,"",shellscript,selection_mouse +17874,14501476,"TERMINAL",0,0,"1\t",,terminal_output +17875,14501713,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",431,1,"",shellscript,content +17876,14501761,"TERMINAL",0,0,"166",,terminal_output +17877,14501841,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",430,1,"",shellscript,content +17878,14501934,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",430,0,"1",shellscript,content +17879,14501935,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",431,0,"",shellscript,selection_keyboard +17880,14502284,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",431,0,"1",shellscript,content +17881,14502285,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",432,0,"",shellscript,selection_keyboard +17882,14502517,"TERMINAL",0,0,"2\t",,terminal_output +17883,14502742,"TERMINAL",0,0,"277",,terminal_output +17884,14502799,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",432,0,"3",shellscript,content +17885,14502800,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",433,0,"",shellscript,selection_keyboard +17886,14503639,"TERMINAL",0,0,"3\t",,terminal_output +17887,14503791,"TERMINAL",0,0,"388",,terminal_output +17888,14504688,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",0,0,"",shellscript,tab +17889,14504831,"TERMINAL",0,0,"4\t",,terminal_output +17890,14504870,"TERMINAL",0,0,"499",,terminal_output +17891,14505700,"TERMINAL",0,0,"5\t",,terminal_output +17892,14505910,"TERMINAL",0,0,"52:002:00",,terminal_output +17893,14506702,"TERMINAL",0,0,"6\t",,terminal_output +17894,14506981,"TERMINAL",0,0,"611",,terminal_output +17895,14507714,"TERMINAL",0,0,"7\t",,terminal_output +17896,14507970,"TERMINAL",0,0,"722",,terminal_output +17897,14508745,"TERMINAL",0,0,"8\t",,terminal_output +17898,14509032,"TERMINAL",0,0,"833",,terminal_output +17899,14509818,"TERMINAL",0,0,"9\t",,terminal_output +17900,14510060,"TERMINAL",0,0,"944",,terminal_output +17901,14511007,"TERMINAL",0,0,"8:00\t",,terminal_output +17902,14511385,"TERMINAL",0,0,"8:0055",,terminal_output +17903,14511871,"TERMINAL",0,0,"1\t",,terminal_output +17904,14512245,"TERMINAL",0,0,"166",,terminal_output +17905,14513191,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_311M\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\n# slurm_job_id=$SLURM_JOB_ID\nslurm_job_id=3454955\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $slurm_job_id \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --darkness_threshold=50 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=lam-minecraft-8-node-darkness-filter-311M-$slurm_job_id \\n --tags lam minecraft 8-node darkness-filter 311M \\n --entity instant-uv \\n --project jafar \\n --num_latents=100 \\n --model_dim=896 \\n --num_blocks=12 \\n --num_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +17906,14513359,"TERMINAL",0,0,"2\t",,terminal_output +17907,14513359,"TERMINAL",0,0,"277",,terminal_output +17908,14514015,"TERMINAL",0,0,"3\t",,terminal_output +17909,14514330,"TERMINAL",0,0,"388",,terminal_output +17910,14514758,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",0,0,"",shellscript,tab +17911,14515029,"TERMINAL",0,0,"4\t",,terminal_output +17912,14515570,"TERMINAL",0,0,"41010",,terminal_output +17913,14516183,"TERMINAL",0,0,"5\t",,terminal_output +17914,14516493,"TERMINAL",0,0,"611",,terminal_output +17915,14516925,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2123,0,"",shellscript,selection_mouse +17916,14516941,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2122,0,"",shellscript,selection_command +17917,14517080,"TERMINAL",0,0,"6\t",,terminal_output +17918,14517139,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2122,1,"\",shellscript,selection_mouse +17919,14517230,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2123,0,"",shellscript,selection_command +17920,14517231,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2121,2," \",shellscript,selection_mouse +17921,14517231,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2096,27,"m=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17922,14517232,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2094,29,"dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17923,14517342,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2093,30,"_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17924,14517342,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2070,53,"heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17925,14517343,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2069,54,"_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17926,14517343,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2068,55,"m_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17927,14517343,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2067,56,"um_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17928,14517377,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2044,79,"num_blocks=12 \\n --num_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17929,14517402,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2043,80,"-num_blocks=12 \\n --num_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17930,14517427,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2042,81,"--num_blocks=12 \\n --num_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17931,14517462,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2041,82," --num_blocks=12 \\n --num_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17932,14517509,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2040,83," --num_blocks=12 \\n --num_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17933,14517510,"TERMINAL",0,0,"722",,terminal_output +17934,14517584,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2039,84," --num_blocks=12 \\n --num_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17935,14517621,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2038,85," --num_blocks=12 \\n --num_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17936,14517631,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch",2016,107," --model_dim=896 \\n --num_blocks=12 \\n --num_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,selection_mouse +17937,14518100,"TERMINAL",0,0,"7\t",,terminal_output +17938,14518436,"TERMINAL",0,0,"833",,terminal_output +17939,14519153,"TERMINAL",0,0,"8\t",,terminal_output +17940,14519509,"TERMINAL",0,0,"944",,terminal_output +17941,14520180,"TERMINAL",0,0,"9\t",,terminal_output +17942,14520552,"TERMINAL",0,0,"1055",,terminal_output +17943,14521021,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",0,0,"",shellscript,tab +17944,14521022,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2269,0,"",shellscript,selection_mouse +17945,14521125,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2266,3,"0 \",shellscript,selection_mouse +17946,14521203,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2262,7,"=2560 \",shellscript,selection_mouse +17947,14521203,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2261,8,"m=2560 \",shellscript,selection_mouse +17948,14521204,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2260,9,"im=2560 \",shellscript,selection_mouse +17949,14521204,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2235,34,"tent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17950,14521247,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2234,35,"atent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17951,14521265,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2233,36,"latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17952,14521292,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2231,38,"--latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17953,14521303,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2230,39," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17954,14521304,"TERMINAL",0,0,"10\t",,terminal_output +17955,14521339,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2229,40," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17956,14521344,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2228,41," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17957,14521386,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2227,42," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17958,14521433,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2206,63," --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17959,14521502,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2184,85," --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17960,14521662,"TERMINAL",0,0,"166",,terminal_output +17961,14521881,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2162,107," --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +17962,14522265,"TERMINAL",0,0,"1\t",,terminal_output +17963,14522587,"TERMINAL",0,0,"277",,terminal_output +17964,14523034,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2162,107,"",shellscript,content +17965,14523353,"TERMINAL",0,0,"2\t",,terminal_output +17966,14523659,"TERMINAL",0,0,"388",,terminal_output +17967,14523739,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2162,0," --model_dim=896 \\n --num_blocks=12 \\n --num_heads=14 \\n --latent_dim=48 \\n --ffn_dim=3584 \",shellscript,content +17968,14524392,"TERMINAL",0,0,"4\t",,terminal_output +17969,14524663,"TERMINAL",0,0,"499",,terminal_output +17970,14525375,"TERMINAL",0,0,"5\t",,terminal_output +17971,14526223,"TERMINAL",0,0,"52020",,terminal_output +17972,14526414,"TERMINAL",0,0,"6\t",,terminal_output +17973,14526791,"TERMINAL",0,0,"611",,terminal_output +17974,14527451,"TERMINAL",0,0,"7\t",,terminal_output +17975,14527796,"TERMINAL",0,0,"722",,terminal_output +17976,14528482,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2076,0,"",shellscript,selection_mouse +17977,14528518,"TERMINAL",0,0,"8\t",,terminal_output +17978,14528659,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2074,12,"lam_ablation",shellscript,selection_mouse +17979,14528846,"TERMINAL",0,0,"833",,terminal_output +17980,14529651,"TERMINAL",0,0,"9\t",,terminal_output +17981,14529883,"TERMINAL",0,0,"944",,terminal_output +17982,14530246,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2074,12,"l",shellscript,content +17983,14530247,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2075,0,"",shellscript,selection_keyboard +17984,14530426,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2075,0,"a",shellscript,content +17985,14530427,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2076,0,"",shellscript,selection_keyboard +17986,14530485,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2076,0,"m",shellscript,content +17987,14530486,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2077,0,"",shellscript,selection_keyboard +17988,14530576,"TERMINAL",0,0,"20\t",,terminal_output +17989,14530911,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2077,0,"_",shellscript,content +17990,14530912,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2078,0,"",shellscript,selection_keyboard +17991,14530931,"TERMINAL",0,0,"2055",,terminal_output +17992,14531144,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2078,0,"s",shellscript,content +17993,14531145,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2079,0,"",shellscript,selection_keyboard +17994,14531277,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2079,0,"i",shellscript,content +17995,14531278,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2080,0,"",shellscript,selection_keyboard +17996,14531398,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2080,0,"z",shellscript,content +17997,14531399,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2081,0,"",shellscript,selection_keyboard +17998,14531503,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2081,0,"e",shellscript,content +17999,14531504,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2082,0,"",shellscript,selection_keyboard +18000,14531577,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2082,0,"d",shellscript,content +18001,14531577,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2083,0,"",shellscript,selection_keyboard +18002,14531662,"TERMINAL",0,0,"1\t",,terminal_output +18003,14531975,"TERMINAL",0,0,"166",,terminal_output +18004,14532170,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2083,0,"s",shellscript,content +18005,14532170,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2084,0,"",shellscript,selection_keyboard +18006,14532571,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2083,1,"",shellscript,content +18007,14532668,"TERMINAL",0,0,"2\t",,terminal_output +18008,14532692,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2082,1,"",shellscript,content +18009,14533026,"TERMINAL",0,0,"277",,terminal_output +18010,14533108,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2074,8,"",shellscript,content +18011,14533692,"TERMINAL",0,0,"3\t",,terminal_output +18012,14533769,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2074,0,"lam_size",shellscript,content +18013,14534014,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2082,0,"ds",shellscript,content +18014,14534079,"TERMINAL",0,0,"388",,terminal_output +18015,14534390,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2074,10,"lam_ablation",shellscript,content +18016,14534766,"TERMINAL",0,0,"4\t",,terminal_output +18017,14535111,"TERMINAL",0,0,"499",,terminal_output +18018,14535663,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2205,0,"",shellscript,selection_mouse +18019,14535769,"TERMINAL",0,0,"5\t",,terminal_output +18020,14536152,"TERMINAL",0,0,"53030",,terminal_output +18021,14536444,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2086,0,"",shellscript,selection_mouse +18022,14536813,"TERMINAL",0,0,"6\t",,terminal_output +18023,14536886,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2086,0," ",shellscript,content +18024,14536887,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2087,0,"",shellscript,selection_keyboard +18025,14537127,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2087,0,"m",shellscript,content +18026,14537128,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2088,0,"",shellscript,selection_keyboard +18027,14537223,"TERMINAL",0,0,"611",,terminal_output +18028,14537307,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2088,0,"o",shellscript,content +18029,14537308,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2089,0,"",shellscript,selection_keyboard +18030,14537386,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2089,0,"d",shellscript,content +18031,14537387,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2090,0,"",shellscript,selection_keyboard +18032,14537504,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2090,0,"e",shellscript,content +18033,14537505,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2091,0,"",shellscript,selection_keyboard +18034,14537561,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2091,0,"l",shellscript,content +18035,14537562,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2092,0,"",shellscript,selection_keyboard +18036,14537846,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2092,0,"s",shellscript,content +18037,14537847,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2093,0,"",shellscript,selection_keyboard +18038,14537850,"TERMINAL",0,0,"7\t",,terminal_output +18039,14537924,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2093,0,"i",shellscript,content +18040,14537925,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2094,0,"",shellscript,selection_keyboard +18041,14538041,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2094,0,"z",shellscript,content +18042,14538042,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2095,0,"",shellscript,selection_keyboard +18043,14538118,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2095,0,"e",shellscript,content +18044,14538119,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",2096,0,"",shellscript,selection_keyboard +18045,14538235,"TERMINAL",0,0,"722",,terminal_output +18046,14538888,"TERMINAL",0,0,"8\t",,terminal_output +18047,14539460,"TERMINAL",0,0,"844",,terminal_output +18048,14540051,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",0,0,"",shellscript,tab +18049,14540504,"TERMINAL",0,0,"9\t",,terminal_output +18050,14540570,"TERMINAL",0,0,"3055",,terminal_output +18051,14541092,"TERMINAL",0,0,"30\t",,terminal_output +18052,14541571,"TERMINAL",0,0,"166",,terminal_output +18053,14542034,"TERMINAL",0,0,"1\t",,terminal_output +18054,14542403,"TERMINAL",0,0,"277",,terminal_output +18055,14542869,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2086,0,"",shellscript,selection_mouse +18056,14543163,"TERMINAL",0,0,"2\t",,terminal_output +18057,14543425,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2086,0," ",shellscript,content +18058,14543426,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2087,0,"",shellscript,selection_keyboard +18059,14543512,"TERMINAL",0,0,"388",,terminal_output +18060,14543620,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2087,0,"l",shellscript,content +18061,14543621,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2088,0,"",shellscript,selection_keyboard +18062,14543712,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2088,0,"a",shellscript,content +18063,14543713,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2089,0,"",shellscript,selection_keyboard +18064,14543836,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2089,0,"m",shellscript,content +18065,14543837,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2090,0,"",shellscript,selection_keyboard +18066,14544093,"TERMINAL",0,0,"3\t",,terminal_output +18067,14544604,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2089,1,"",shellscript,content +18068,14544648,"TERMINAL",0,0,"499",,terminal_output +18069,14544733,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2088,1,"",shellscript,content +18070,14545115,"TERMINAL",0,0,"4\t",,terminal_output +18071,14545155,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2087,1,"",shellscript,content +18072,14545317,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2087,0,"m",shellscript,content +18073,14545318,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2088,0,"",shellscript,selection_keyboard +18074,14545465,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2088,0,"o",shellscript,content +18075,14545466,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2089,0,"",shellscript,selection_keyboard +18076,14545538,"TERMINAL",0,0,"54040",,terminal_output +18077,14545578,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2089,0,"d",shellscript,content +18078,14545579,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2090,0,"",shellscript,selection_keyboard +18079,14545730,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2090,0,"e",shellscript,content +18080,14545731,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2091,0,"",shellscript,selection_keyboard +18081,14545785,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2091,0,"l",shellscript,content +18082,14545786,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2092,0,"",shellscript,selection_keyboard +18083,14545991,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2092,0,"s",shellscript,content +18084,14545992,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2093,0,"",shellscript,selection_keyboard +18085,14546074,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2093,0,"i",shellscript,content +18086,14546075,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2094,0,"",shellscript,selection_keyboard +18087,14546226,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2094,0,"z",shellscript,content +18088,14546227,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2095,0,"",shellscript,selection_keyboard +18089,14546227,"TERMINAL",0,0,"5\t",,terminal_output +18090,14546306,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2095,0,"e",shellscript,content +18091,14546306,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",2096,0,"",shellscript,selection_keyboard +18092,14546592,"TERMINAL",0,0,"611",,terminal_output +18093,14547276,"TERMINAL",0,0,"6\t",,terminal_output +18094,14547742,"TERMINAL",0,0,"722",,terminal_output +18095,14548029,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +18096,14548606,"TERMINAL",0,0,"7\t",,terminal_output +18097,14548811,"TERMINAL",0,0,"833",,terminal_output +18098,14549321,"TERMINAL",0,0,"8\t",,terminal_output +18099,14549715,"TERMINAL",0,0,"944",,terminal_output +18100,14549911,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2085,0,"",shellscript,selection_mouse +18101,14550359,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2086,0,"",shellscript,selection_command +18102,14550360,"TERMINAL",0,0,"40\t",,terminal_output +18103,14550850,"TERMINAL",0,0,"4055",,terminal_output +18104,14551143,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2085,0,"",shellscript,selection_command +18105,14551353,"TERMINAL",0,0,"1\t",,terminal_output +18106,14551651,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2085,0," ",shellscript,content +18107,14551652,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2086,0,"",shellscript,selection_keyboard +18108,14551840,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2086,0,"m",shellscript,content +18109,14551841,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2087,0,"",shellscript,selection_keyboard +18110,14551841,"TERMINAL",0,0,"166",,terminal_output +18111,14552034,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2087,0,"o",shellscript,content +18112,14552035,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2088,0,"",shellscript,selection_keyboard +18113,14552099,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2088,0,"d",shellscript,content +18114,14552100,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2089,0,"",shellscript,selection_keyboard +18115,14552243,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2089,0,"e",shellscript,content +18116,14552244,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2090,0,"",shellscript,selection_keyboard +18117,14552410,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2090,0,"l",shellscript,content +18118,14552411,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2091,0,"",shellscript,selection_keyboard +18119,14552411,"TERMINAL",0,0,"2\t",,terminal_output +18120,14552562,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2091,0,"s",shellscript,content +18121,14552563,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2092,0,"",shellscript,selection_keyboard +18122,14552800,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2091,1,"",shellscript,content +18123,14552881,"TERMINAL",0,0,"277",,terminal_output +18124,14553266,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2091,0,"s",shellscript,content +18125,14553267,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2092,0,"",shellscript,selection_keyboard +18126,14553402,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2092,0,"i",shellscript,content +18127,14553403,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2093,0,"",shellscript,selection_keyboard +18128,14553426,"TERMINAL",0,0,"3\t",,terminal_output +18129,14553455,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2093,0,"z",shellscript,content +18130,14553456,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2094,0,"",shellscript,selection_keyboard +18131,14553574,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2094,0,"e",shellscript,content +18132,14553575,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2095,0,"",shellscript,selection_keyboard +18133,14553913,"TERMINAL",0,0,"388",,terminal_output +18134,14553944,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2095,0," ",shellscript,content +18135,14553945,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2096,0,"",shellscript,selection_keyboard +18136,14554459,"TERMINAL",0,0,"4\t",,terminal_output +18137,14554828,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2096,0,"l",shellscript,content +18138,14554829,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2097,0,"",shellscript,selection_keyboard +18139,14554937,"TERMINAL",0,0,"499",,terminal_output +18140,14555557,"TERMINAL",0,0,"5\t",,terminal_output +18141,14555645,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2096,1,"",shellscript,content +18142,14555957,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2096,0,"a",shellscript,content +18143,14555957,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2097,0,"",shellscript,selection_keyboard +18144,14556044,"TERMINAL",0,0,"55050",,terminal_output +18145,14556142,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2097,0,"c",shellscript,content +18146,14556142,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2098,0,"",shellscript,selection_keyboard +18147,14556305,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2098,0,"t",shellscript,content +18148,14556305,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2099,0,"",shellscript,selection_keyboard +18149,14556391,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2099,0,"i",shellscript,content +18150,14556392,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2100,0,"",shellscript,selection_keyboard +18151,14556480,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2100,0,"o",shellscript,content +18152,14556480,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2101,0,"",shellscript,selection_keyboard +18153,14556552,"TERMINAL",0,0,"6\t",,terminal_output +18154,14556626,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2101,0,"n",shellscript,content +18155,14556626,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2102,0,"",shellscript,selection_keyboard +18156,14556774,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2102,0,"s",shellscript,content +18157,14556775,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2103,0,"",shellscript,selection_keyboard +18158,14556893,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2103,0,"p",shellscript,content +18159,14556894,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2104,0,"",shellscript,selection_keyboard +18160,14557021,"TERMINAL",0,0,"611",,terminal_output +18161,14557099,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2104,0,"a",shellscript,content +18162,14557099,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2105,0,"",shellscript,selection_keyboard +18163,14557244,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2105,0,"c",shellscript,content +18164,14557244,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2106,0,"",shellscript,selection_keyboard +18165,14557337,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2106,0,"e",shellscript,content +18166,14557337,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",2107,0,"",shellscript,selection_keyboard +18167,14557575,"TERMINAL",0,0,"7\t",,terminal_output +18168,14558111,"TERMINAL",0,0,"722",,terminal_output +18169,14558665,"TERMINAL",0,0,"8\t",,terminal_output +18170,14559119,"TERMINAL",0,0,"833",,terminal_output +18171,14559799,"TERMINAL",0,0,"9\t",,terminal_output +18172,14560160,"TERMINAL",0,0,"944",,terminal_output +18173,14560697,"TERMINAL",0,0,"50\t",,terminal_output +18174,14561286,"TERMINAL",0,0,"5055",,terminal_output +18175,14561772,"TERMINAL",0,0,"1\t",,terminal_output +18176,14562313,"TERMINAL",0,0,"166",,terminal_output +18177,14562716,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",0,0,"",shellscript,tab +18178,14562860,"TERMINAL",0,0,"2\t",,terminal_output +18179,14563402,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",0,0,"",shellscript,tab +18180,14563491,"TERMINAL",0,0,"288",,terminal_output +18181,14563863,"TERMINAL",0,0,"3\t",,terminal_output +18182,14564376,"TERMINAL",0,0,"499",,terminal_output +18183,14564860,"TERMINAL",0,0,"40",,terminal_output +18184,14565456,"TERMINAL",0,0,"53:003:00",,terminal_output +18185,14565576,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",0,0,"",shellscript,tab +18186,14565900,"TERMINAL",0,0,"5\t",,terminal_output +18187,14566510,"TERMINAL",0,0,"611",,terminal_output +18188,14566993,"TERMINAL",0,0,"6\t",,terminal_output +18189,14567300,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",433,0,"",shellscript,selection_mouse +18190,14567470,"TERMINAL",0,0,"722",,terminal_output +18191,14568016,"TERMINAL",0,0,"7\t",,terminal_output +18192,14568235,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",432,1,"",shellscript,content +18193,14568343,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",431,1,"",shellscript,content +18194,14568534,"TERMINAL",0,0,"833",,terminal_output +18195,14568716,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",430,1,"",shellscript,content +18196,14569022,"TERMINAL",0,0,"8\t",,terminal_output +18197,14569238,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",430,0,"4",shellscript,content +18198,14569239,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",431,0,"",shellscript,selection_keyboard +18199,14569298,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",431,0,"0",shellscript,content +18200,14569299,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",432,0,"",shellscript,selection_keyboard +18201,14569420,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",432,0,"0",shellscript,content +18202,14569421,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",433,0,"",shellscript,selection_keyboard +18203,14569563,"TERMINAL",0,0,"944",,terminal_output +18204,14570094,"TERMINAL",0,0,"9\t",,terminal_output +18205,14572750,"TERMINAL",0,0,"9:0077",,terminal_output +18206,14572751,"TERMINAL",0,0,"9:00\t",,terminal_output +18207,14573639,"TERMINAL",0,0,"3\t",,terminal_output +18208,14573639,"TERMINAL",0,0,"388",,terminal_output +18209,14574714,"TERMINAL",0,0,"4\t",,terminal_output +18210,14574722,"TERMINAL",0,0,"499",,terminal_output +18211,14575388,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_400M\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\n# slurm_job_id=$SLURM_JOB_ID\nslurm_job_id=3454954\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $slurm_job_id \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --darkness_threshold=50 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=lam-minecraft-8-node-darkness-filter-400M-$slurm_job_id \\n --tags lam minecraft 8-node darkness-filter 400M \\n --entity instant-uv \\n --project jafar \\n --num_latents=100 \\n --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +18212,14575935,"TERMINAL",0,0,"5\t",,terminal_output +18213,14575935,"TERMINAL",0,0,"51010",,terminal_output +18214,14576967,"TERMINAL",0,0,"6\t",,terminal_output +18215,14576979,"TERMINAL",0,0,"611",,terminal_output +18216,14577794,"TERMINAL",0,0,"7\t",,terminal_output +18217,14577991,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",0,0,"",shellscript,tab +18218,14578229,"TERMINAL",0,0,"722",,terminal_output +18219,14578845,"TERMINAL",0,0,"8\t",,terminal_output +18220,14578862,"TERMINAL",0,0,"833",,terminal_output +18221,14579835,"TERMINAL",0,0,"9\t",,terminal_output +18222,14579974,"TERMINAL",0,0,"944",,terminal_output +18223,14580611,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2124,0,"",shellscript,selection_mouse +18224,14580656,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2123,0,"",shellscript,selection_command +18225,14580753,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2123,1,"\",shellscript,selection_mouse +18226,14580791,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2124,0,"",shellscript,selection_command +18227,14580948,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2121,3,"6 \",shellscript,selection_mouse +18228,14580948,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2118,6,"4096 \",shellscript,selection_mouse +18229,14580949,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2116,8,"m=4096 \",shellscript,selection_mouse +18230,14580951,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2115,9,"im=4096 \",shellscript,selection_mouse +18231,14580951,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2113,11,"_dim=4096 \",shellscript,selection_mouse +18232,14580951,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2089,35,"atent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18233,14580952,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2087,37,"-latent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18234,14580952,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2086,38,"--latent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18235,14580952,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2085,39," --latent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18236,14580952,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2084,40," --latent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18237,14580992,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2062,62," --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18238,14581043,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2061,63," --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18239,14581086,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2039,85," --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18240,14581086,"TERMINAL",0,0,"10\t",,terminal_output +18241,14581086,"TERMINAL",0,0,"1055",,terminal_output +18242,14581191,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2016,108," --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18243,14581449,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",1992,132," --num_latents=100 \\n --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18244,14581893,"TERMINAL",0,0,"1\t",,terminal_output +18245,14581984,"TERMINAL",0,0,"166",,terminal_output +18246,14582534,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",1995,0,"",shellscript,selection_mouse +18247,14582913,"TERMINAL",0,0,"2\t",,terminal_output +18248,14582960,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2017,0,"",shellscript,selection_mouse +18249,14582985,"TERMINAL",0,0,"277",,terminal_output +18250,14583108,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2016,4," ",shellscript,selection_mouse +18251,14583277,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2016,27," --model_dim=1024 \\n ",shellscript,selection_mouse +18252,14583318,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2016,39," --model_dim=1024 \\n --num_blocks",shellscript,selection_mouse +18253,14583336,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2016,60," --model_dim=1024 \\n --num_blocks=12 \\n --num_heads",shellscript,selection_mouse +18254,14583382,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2016,63," --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16",shellscript,selection_mouse +18255,14583431,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2016,86," --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 ",shellscript,selection_mouse +18256,14583431,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2016,87," --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 \",shellscript,selection_mouse +18257,14583756,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2016,108," --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \",shellscript,selection_mouse +18258,14583949,"TERMINAL",0,0,"3\t",,terminal_output +18259,14584032,"TERMINAL",0,0,"388",,terminal_output +18260,14585066,"TERMINAL",0,0,"4\t",,terminal_output +18261,14585101,"TERMINAL",0,0,"499",,terminal_output +18262,14586102,"TERMINAL",0,0,"5\t",,terminal_output +18263,14586141,"TERMINAL",0,0,"52020",,terminal_output +18264,14586922,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",0,0,"",shellscript,tab +18265,14586923,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2279,0,"",shellscript,selection_mouse +18266,14587011,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2278,1,"\",shellscript,selection_mouse +18267,14587012,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2275,4,"60 \",shellscript,selection_mouse +18268,14587012,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2270,9,"im=2560 \",shellscript,selection_mouse +18269,14587060,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2245,34,"tent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +18270,14587094,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2241,38,"--latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +18271,14587094,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2240,39," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +18272,14587118,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2238,41," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +18273,14587163,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2237,42," --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +18274,14587163,"TERMINAL",0,0,"6\t",,terminal_output +18275,14587178,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2216,63," --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +18276,14587249,"TERMINAL",0,0,"611",,terminal_output +18277,14587292,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2194,85," --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +18278,14587485,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2172,107," --model_dim=640 \\n --num_blocks=10 \\n --num_heads=10 \\n --latent_dim=32 \\n --ffn_dim=2560 \",shellscript,selection_mouse +18279,14588107,"TERMINAL",0,0,"7\t",,terminal_output +18280,14588210,"TERMINAL",0,0,"722",,terminal_output +18281,14588469,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2172,107,"",shellscript,content +18282,14588862,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M copy.sbatch",2172,0," --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \",shellscript,content +18283,14589164,"TERMINAL",0,0,"8\t",,terminal_output +18284,14589291,"TERMINAL",0,0,"833",,terminal_output +18285,14590194,"TERMINAL",0,0,"9\t",,terminal_output +18286,14590332,"TERMINAL",0,0,"955",,terminal_output +18287,14591296,"TERMINAL",0,0,"20\t",,terminal_output +18288,14591334,"TERMINAL",0,0,"2166",,terminal_output +18289,14592252,"TERMINAL",0,0,"1\t",,terminal_output +18290,14592405,"TERMINAL",0,0,"277",,terminal_output +18291,14593343,"TERMINAL",0,0,"2\t",,terminal_output +18292,14593592,"TERMINAL",0,0,"388",,terminal_output +18293,14594368,"TERMINAL",0,0,"4\t",,terminal_output +18294,14594537,"TERMINAL",0,0,"499",,terminal_output +18295,14595359,"TERMINAL",0,0,"5\t",,terminal_output +18296,14595526,"TERMINAL",0,0,"53030",,terminal_output +18297,14596516,"TERMINAL",0,0,"6\t",,terminal_output +18298,14596556,"TERMINAL",0,0,"611",,terminal_output +18299,14597762,"TERMINAL",0,0,"7\t",,terminal_output +18300,14597762,"TERMINAL",0,0,"722",,terminal_output +18301,14598559,"TERMINAL",0,0,"8\t",,terminal_output +18302,14598686,"TERMINAL",0,0,"833",,terminal_output +18303,14599510,"TERMINAL",0,0,"9\t",,terminal_output +18304,14599687,"TERMINAL",0,0,"944",,terminal_output +18305,14600608,"TERMINAL",0,0,"30\t",,terminal_output +18306,14600747,"TERMINAL",0,0,"3055",,terminal_output +18307,14601635,"TERMINAL",0,0,"1\t",,terminal_output +18308,14601781,"TERMINAL",0,0,"166",,terminal_output +18309,14602656,"TERMINAL",0,0,"2\t",,terminal_output +18310,14602822,"TERMINAL",0,0,"277",,terminal_output +18311,14603942,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_400M\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation modelsize"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +18312,14603968,"TERMINAL",0,0,"3\t",,terminal_output +18313,14604041,"TERMINAL",0,0,"388",,terminal_output +18314,14604740,"TERMINAL",0,0,"4\t",,terminal_output +18315,14604959,"TERMINAL",0,0,"499",,terminal_output +18316,14605734,"TERMINAL",0,0,"5\t",,terminal_output +18317,14605959,"TERMINAL",0,0,"54040",,terminal_output +18318,14606769,"TERMINAL",0,0,"6\t",,terminal_output +18319,14607009,"TERMINAL",0,0,"611",,terminal_output +18320,14607793,"TERMINAL",0,0,"7\t",,terminal_output +18321,14608219,"TERMINAL",0,0,"722",,terminal_output +18322,14608824,"TERMINAL",0,0,"8\t",,terminal_output +18323,14609219,"TERMINAL",0,0,"833",,terminal_output +18324,14609859,"TERMINAL",0,0,"9\t",,terminal_output +18325,14610248,"TERMINAL",0,0,"944",,terminal_output +18326,14610894,"TERMINAL",0,0,"40\t",,terminal_output +18327,14611264,"TERMINAL",0,0,"4055",,terminal_output +18328,14611986,"TERMINAL",0,0,"1\t",,terminal_output +18329,14612220,"TERMINAL",0,0,"166",,terminal_output +18330,14613092,"TERMINAL",0,0,"2\t",,terminal_output +18331,14613267,"TERMINAL",0,0,"277",,terminal_output +18332,14614010,"TERMINAL",0,0,"3\t",,terminal_output +18333,14614353,"TERMINAL",0,0,"399",,terminal_output +18334,14615183,"TERMINAL",0,0,"4\t",,terminal_output +18335,14615431,"TERMINAL",0,0,"55050",,terminal_output +18336,14616092,"TERMINAL",0,0,"5\t",,terminal_output +18337,14616486,"TERMINAL",0,0,"611",,terminal_output +18338,14617196,"TERMINAL",0,0,"6\t",,terminal_output +18339,14617606,"TERMINAL",0,0,"722",,terminal_output +18340,14618182,"TERMINAL",0,0,"7\t",,terminal_output +18341,14618527,"TERMINAL",0,0,"833",,terminal_output +18342,14619247,"TERMINAL",0,0,"8\t",,terminal_output +18343,14619657,"TERMINAL",0,0,"944",,terminal_output +18344,14620268,"TERMINAL",0,0,"9\t",,terminal_output +18345,14620691,"TERMINAL",0,0,"5055",,terminal_output +18346,14621293,"TERMINAL",0,0,"50\t",,terminal_output +18347,14621701,"TERMINAL",0,0,"166",,terminal_output +18348,14622307,"TERMINAL",0,0,"2\t",,terminal_output +18349,14622736,"TERMINAL",0,0,"277",,terminal_output +18350,14623446,"TERMINAL",0,0,"3\t",,terminal_output +18351,14623709,"TERMINAL",0,0,"388",,terminal_output +18352,14624386,"TERMINAL",0,0,"4\t",,terminal_output +18353,14624781,"TERMINAL",0,0,"499",,terminal_output +18354,14625434,"TERMINAL",0,0,"5\t",,terminal_output +18355,14625788,"TERMINAL",0,0,"54:004:00",,terminal_output +18356,14626517,"TERMINAL",0,0,"61",,terminal_output +18357,14626834,"TERMINAL",0,0,"611",,terminal_output +18358,14627539,"TERMINAL",0,0,"7\t",,terminal_output +18359,14627877,"TERMINAL",0,0,"722",,terminal_output +18360,14628286,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch",66,0,"",shellscript,selection_mouse +18361,14628320,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch",65,0,"",shellscript,selection_command +18362,14628547,"TERMINAL",0,0,"8\t",,terminal_output +18363,14628925,"TERMINAL",0,0,"833",,terminal_output +18364,14629309,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch",66,0,"",shellscript,selection_command +18365,14629408,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch",65,1,"",shellscript,content +18366,14629532,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch",65,0,"1",shellscript,content +18367,14629533,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch",66,0,"",shellscript,selection_keyboard +18368,14629613,"TERMINAL",0,0,"9\t",,terminal_output +18369,14629962,"TERMINAL",0,0,"944",,terminal_output +18370,14630642,"TERMINAL",0,0,"30:00\t",,terminal_output +18371,14631127,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +18372,14631264,"TERMINAL",0,0,"30:0055",,terminal_output +18373,14631739,"TERMINAL",0,0,"1\t",,terminal_output +18374,14632090,"TERMINAL",0,0,"166",,terminal_output +18375,14632373,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",92,0,"",shellscript,selection_mouse +18376,14632741,"TERMINAL",0,0,"2\t",,terminal_output +18377,14632952,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",38,0,"",shellscript,selection_mouse +18378,14633096,"TERMINAL",0,0,"277",,terminal_output +18379,14633524,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",66,0,"",shellscript,selection_mouse +18380,14633830,"TERMINAL",0,0,"3\t",,terminal_output +18381,14634153,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",65,1,"",shellscript,content +18382,14634155,"TERMINAL",0,0,"388",,terminal_output +18383,14634195,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",65,0,"1",shellscript,content +18384,14634195,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",66,0,"",shellscript,selection_keyboard +18385,14634782,"TERMINAL",0,0,"4\t",,terminal_output +18386,14635237,"TERMINAL",0,0,"499",,terminal_output +18387,14636271,"TERMINAL",0,0,"5\t",,terminal_output +18388,14636288,"TERMINAL",0,0,"51010",,terminal_output +18389,14636864,"TERMINAL",0,0,"6\t",,terminal_output +18390,14637648,"TERMINAL",0,0,"611",,terminal_output +18391,14637926,"TERMINAL",0,0,"7\t",,terminal_output +18392,14638874,"TERMINAL",0,0,"733",,terminal_output +18393,14639010,"TERMINAL",0,0,"8\t",,terminal_output +18394,14639392,"TERMINAL",0,0,"944",,terminal_output +18395,14640060,"TERMINAL",0,0,"9\t",,terminal_output +18396,14640501,"TERMINAL",0,0,"1055",,terminal_output +18397,14641056,"TERMINAL",0,0,"10\t",,terminal_output +18398,14641441,"TERMINAL",0,0,"166",,terminal_output +18399,14642075,"TERMINAL",0,0,"1\t",,terminal_output +18400,14642293,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",0,0,"",shellscript,tab +18401,14642511,"TERMINAL",0,0,"277",,terminal_output +18402,14643362,"TERMINAL",0,0,"2\t",,terminal_output +18403,14643582,"TERMINAL",0,0,"388",,terminal_output +18404,14644135,"TERMINAL",0,0,"3\t",,terminal_output +18405,14644202,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",66,0,"",shellscript,selection_mouse +18406,14644598,"TERMINAL",0,0,"499",,terminal_output +18407,14644809,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",65,1,"",shellscript,content +18408,14645119,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",65,0,"1",shellscript,content +18409,14645120,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",66,0,"",shellscript,selection_keyboard +18410,14645260,"TERMINAL",0,0,"4\t",,terminal_output +18411,14645653,"TERMINAL",0,0,"52020",,terminal_output +18412,14646457,"TERMINAL",0,0,"5\t",,terminal_output +18413,14646677,"TERMINAL",0,0,"611",,terminal_output +18414,14647107,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",0,0,"",shellscript,tab +18415,14647297,"TERMINAL",0,0,"6\t",,terminal_output +18416,14647891,"TERMINAL",0,0,"722",,terminal_output +18417,14648446,"TERMINAL",0,0,"7\t",,terminal_output +18418,14649532,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",66,0,"",shellscript,selection_mouse +18419,14649600,"TERMINAL",0,0,"833",,terminal_output +18420,14649696,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",65,1,"",shellscript,content +18421,14649699,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",65,0,"1",shellscript,content +18422,14649699,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",66,0,"",shellscript,selection_keyboard +18423,14649700,"TERMINAL",0,0,"9\t",,terminal_output +18424,14649850,"TERMINAL",0,0,"944",,terminal_output +18425,14650481,"TERMINAL",0,0,"20\t",,terminal_output +18426,14650887,"TERMINAL",0,0,"2055",,terminal_output +18427,14651525,"TERMINAL",0,0,"1\t",,terminal_output +18428,14651918,"TERMINAL",0,0,"166",,terminal_output +18429,14652452,"TERMINAL",0,0,"22",,terminal_output +18430,14652939,"TERMINAL",0,0,"277",,terminal_output +18431,14653812,"TERMINAL",0,0,"35",,terminal_output +18432,14653959,"TERMINAL",0,0,"388",,terminal_output +18433,14654854,"TERMINAL",0,0,"4\t",,terminal_output +18434,14654997,"TERMINAL",0,0,"499",,terminal_output +18435,14656043,"TERMINAL",0,0,"5\t",,terminal_output +18436,14656110,"TERMINAL",0,0,"53030",,terminal_output +18437,14656946,"TERMINAL",0,0,"61",,terminal_output +18438,14657152,"TERMINAL",0,0,"611",,terminal_output +18439,14657970,"TERMINAL",0,0,"7\t",,terminal_output +18440,14658246,"TERMINAL",0,0,"722",,terminal_output +18441,14658956,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",0,0,"",shellscript,tab +18442,14658956,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2124,0,"",shellscript,selection_mouse +18443,14659066,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",2123,0,"",shellscript,selection_command +18444,14659154,"TERMINAL",0,0,"8\t",,terminal_output +18445,14659268,"TERMINAL",0,0,"833",,terminal_output +18446,14660170,"TERMINAL",0,0,"9\t",,terminal_output +18447,14660388,"TERMINAL",0,0,"944",,terminal_output +18448,14661196,"TERMINAL",0,0,"30\t",,terminal_output +18449,14661400,"TERMINAL",0,0,"3055",,terminal_output +18450,14662200,"TERMINAL",0,0,"1\t",,terminal_output +18451,14662395,"TERMINAL",0,0,"177",,terminal_output +18452,14663292,"TERMINAL",0,0,"2\t",,terminal_output +18453,14663436,"TERMINAL",0,0,"388",,terminal_output +18454,14664200,"TERMINAL",0,0,"3\t",,terminal_output +18455,14664389,"TERMINAL",0,0,"499",,terminal_output +18456,14665257,"TERMINAL",0,0,"4\t",,terminal_output +18457,14665434,"TERMINAL",0,0,"54040",,terminal_output +18458,14666469,"TERMINAL",0,0,"5\t",,terminal_output +18459,14666495,"TERMINAL",0,0,"611",,terminal_output +18460,14667375,"TERMINAL",0,0,"7\t",,terminal_output +18461,14667531,"TERMINAL",0,0,"722",,terminal_output +18462,14667829,"slurm/jobs/mihir/horeka/lam/coinrun/train_lam_single_gpu.sh",0,0,"",shellscript,tab +18463,14668396,"TERMINAL",0,0,"8\t",,terminal_output +18464,14668670,"TERMINAL",0,0,"833",,terminal_output +18465,14669415,"TERMINAL",0,0,"9\t",,terminal_output +18466,14669605,"TERMINAL",0,0,"944",,terminal_output +18467,14670423,"TERMINAL",0,0,"406",,terminal_output +18468,14671024,"TERMINAL",0,0,"4055",,terminal_output +18469,14671459,"TERMINAL",0,0,"1\t",,terminal_output +18470,14671610,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",0,0,"",shellscript,tab +18471,14671722,"TERMINAL",0,0,"166",,terminal_output +18472,14672502,"TERMINAL",0,0,"2\t",,terminal_output +18473,14672758,"TERMINAL",0,0,"277",,terminal_output +18474,14673616,"TERMINAL",0,0,"3\t",,terminal_output +18475,14673778,"TERMINAL",0,0,"388",,terminal_output +18476,14674607,"TERMINAL",0,0,"4\t",,terminal_output +18477,14674804,"TERMINAL",0,0,"499",,terminal_output +18478,14675618,"TERMINAL",0,0,"5\t",,terminal_output +18479,14675843,"TERMINAL",0,0,"55050",,terminal_output +18480,14676713,"TERMINAL",0,0,"6\t",,terminal_output +18481,14676994,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +18482,14677052,"TERMINAL",0,0,"611",,terminal_output +18483,14678034,"TERMINAL",0,0,"7\t",,terminal_output +18484,14678074,"TERMINAL",0,0,"722",,terminal_output +18485,14678751,"TERMINAL",0,0,"8\t",,terminal_output +18486,14678997,"TERMINAL",0,0,"833",,terminal_output +18487,14679370,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",0,0,"",shellscript,tab +18488,14679805,"TERMINAL",0,0,"9\t",,terminal_output +18489,14680012,"TERMINAL",0,0,"944",,terminal_output +18490,14680919,"TERMINAL",0,0,"50\t",,terminal_output +18491,14681077,"TERMINAL",0,0,"5055",,terminal_output +18492,14681871,"TERMINAL",0,0,"1\t",,terminal_output +18493,14682141,"TERMINAL",0,0,"166",,terminal_output +18494,14682661,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch",0,0,"",shellscript,tab +18495,14682939,"TERMINAL",0,0,"2\t",,terminal_output +18496,14683135,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +18497,14683298,"TERMINAL",0,0,"277",,terminal_output +18498,14683974,"TERMINAL",0,0,"3\t",,terminal_output +18499,14684191,"TERMINAL",0,0,"388",,terminal_output +18500,14684976,"TERMINAL",0,0,"4\t",,terminal_output +18501,14685260,"TERMINAL",0,0,"499",,terminal_output +18502,14686041,"TERMINAL",0,0,"5\t",,terminal_output +18503,14686368,"TERMINAL",0,0,"55:005:00",,terminal_output +18504,14687049,"TERMINAL",0,0,"6\t",,terminal_output +18505,14687301,"TERMINAL",0,0,"622",,terminal_output +18506,14688169,"TERMINAL",0,0,"7\t",,terminal_output +18507,14688343,"TERMINAL",0,0,"833",,terminal_output +18508,14689186,"TERMINAL",0,0,"8\t",,terminal_output +18509,14689375,"TERMINAL",0,0,"944",,terminal_output +18510,14690216,"TERMINAL",0,0,"9\t",,terminal_output +18511,14690419,"TERMINAL",0,0,"1:0055",,terminal_output +18512,14691199,"TERMINAL",0,0,"1:00\t",,terminal_output +18513,14691541,"TERMINAL",0,0,"166",,terminal_output +18514,14692266,"TERMINAL",0,0,"1\t",,terminal_output +18515,14692503,"TERMINAL",0,0,"277",,terminal_output +18516,14693391,"TERMINAL",0,0,"2\t",,terminal_output +18517,14695520,"TERMINAL",0,0,"31010",,terminal_output +18518,14695529,"TERMINAL",0,0,"4\t",,terminal_output +18519,14696408,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",433,0,"",shellscript,selection_mouse +18520,14696586,"TERMINAL",0,0,"643",,terminal_output +18521,14696625,"TERMINAL",0,0,"611",,terminal_output +18522,14697662,"TERMINAL",0,0,"7\t",,terminal_output +18523,14697662,"TERMINAL",0,0,"722",,terminal_output +18524,14697893,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",433,0,"_",shellscript,content +18525,14697894,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",434,0,"",shellscript,selection_keyboard +18526,14698711,"TERMINAL",0,0,"8\t",,terminal_output +18527,14698718,"TERMINAL",0,0,"833",,terminal_output +18528,14699736,"TERMINAL",0,0,"9\t",,terminal_output +18529,14699742,"TERMINAL",0,0,"944",,terminal_output +18530,14699931,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",434,0,"A",shellscript,content +18531,14699932,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",435,0,"",shellscript,selection_keyboard +18532,14700163,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",435,0,"S",shellscript,content +18533,14700164,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",436,0,"",shellscript,selection_keyboard +18534,14700721,"TERMINAL",0,0,"10\t",,terminal_output +18535,14700748,"TERMINAL",0,0,"1055",,terminal_output +18536,14701102,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",436,0,"_",shellscript,content +18537,14701103,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",437,0,"",shellscript,selection_keyboard +18538,14701781,"TERMINAL",0,0,"1\t",,terminal_output +18539,14701791,"TERMINAL",0,0,"166",,terminal_output +18540,14702768,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",437,0,"6",shellscript,content +18541,14702769,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",438,0,"",shellscript,selection_keyboard +18542,14702781,"TERMINAL",0,0,"2\t",,terminal_output +18543,14702863,"TERMINAL",0,0,"277",,terminal_output +18544,14703842,"TERMINAL",0,0,"3\t",,terminal_output +18545,14703853,"TERMINAL",0,0,"388",,terminal_output +18546,14704878,"TERMINAL",0,0,"4\t",,terminal_output +18547,14704916,"TERMINAL",0,0,"499",,terminal_output +18548,14705921,"TERMINAL",0,0,"5\t",,terminal_output +18549,14705923,"TERMINAL",0,0,"52020",,terminal_output +18550,14706595,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",436,0,"",shellscript,selection_mouse +18551,14707002,"TERMINAL",0,0,"6\t",,terminal_output +18552,14707002,"TERMINAL",0,0,"611",,terminal_output +18553,14707193,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",435,1,"",shellscript,content +18554,14707671,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",434,1,"",shellscript,content +18555,14707765,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",434,0,"a",shellscript,content +18556,14707765,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",435,0,"",shellscript,selection_keyboard +18557,14707903,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",435,0,"c",shellscript,content +18558,14707903,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",436,0,"",shellscript,selection_keyboard +18559,14707964,"TERMINAL",0,0,"7\t",,terminal_output +18560,14708019,"TERMINAL",0,0,"722",,terminal_output +18561,14708128,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",436,0,"t",shellscript,content +18562,14708129,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",437,0,"",shellscript,selection_keyboard +18563,14708278,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",437,0,"i",shellscript,content +18564,14708279,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",438,0,"",shellscript,selection_keyboard +18565,14708388,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",438,0,"o",shellscript,content +18566,14708388,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",439,0,"",shellscript,selection_keyboard +18567,14708551,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",439,0,"n",shellscript,content +18568,14708551,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",440,0,"",shellscript,selection_keyboard +18569,14708693,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",440,0,"s",shellscript,content +18570,14708693,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",441,0,"",shellscript,selection_keyboard +18571,14708775,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",441,0,"p",shellscript,content +18572,14708776,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",442,0,"",shellscript,selection_keyboard +18573,14708862,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",442,0,"a",shellscript,content +18574,14708862,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",443,0,"",shellscript,selection_keyboard +18575,14708995,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",443,0,"c",shellscript,content +18576,14708996,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",444,0,"",shellscript,selection_keyboard +18577,14709034,"TERMINAL",0,0,"8\t",,terminal_output +18578,14709079,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",444,0,"e",shellscript,content +18579,14709080,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",445,0,"",shellscript,selection_keyboard +18580,14709115,"TERMINAL",0,0,"833",,terminal_output +18581,14710093,"TERMINAL",0,0,"9\t",,terminal_output +18582,14710094,"TERMINAL",0,0,"944",,terminal_output +18583,14711101,"TERMINAL",0,0,"20\t",,terminal_output +18584,14711135,"TERMINAL",0,0,"2055",,terminal_output +18585,14712102,"TERMINAL",0,0,"1\t",,terminal_output +18586,14712570,"TERMINAL",0,0,"166",,terminal_output +18587,14713251,"TERMINAL",0,0,"2\t",,terminal_output +18588,14713257,"TERMINAL",0,0,"277",,terminal_output +18589,14714279,"TERMINAL",0,0,"3\t",,terminal_output +18590,14714279,"TERMINAL",0,0,"388",,terminal_output +18591,14715215,"TERMINAL",0,0,"4\t",,terminal_output +18592,14715358,"TERMINAL",0,0,"43030",,terminal_output +18593,14716265,"TERMINAL",0,0,"5\t",,terminal_output +18594,14716335,"TERMINAL",0,0,"611",,terminal_output +18595,14716975,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base copy.sbatch",0,0,"",shellscript,tab +18596,14717301,"TERMINAL",0,0,"6\t",,terminal_output +18597,14717359,"TERMINAL",0,0,"722",,terminal_output +18598,14718599,"TERMINAL",0,0,"8\t",,terminal_output +18599,14718599,"TERMINAL",0,0,"833",,terminal_output +18600,14719365,"TERMINAL",0,0,"9\t",,terminal_output +18601,14719496,"TERMINAL",0,0,"944",,terminal_output +18602,14720394,"TERMINAL",0,0,"30\t",,terminal_output +18603,14720520,"TERMINAL",0,0,"3055",,terminal_output +18604,14721449,"TERMINAL",0,0,"1\t",,terminal_output +18605,14721505,"TERMINAL",0,0,"166",,terminal_output +18606,14722468,"TERMINAL",0,0,"2\t",,terminal_output +18607,14722576,"TERMINAL",0,0,"277",,terminal_output +18608,14723602,"TERMINAL",0,0,"3\t",,terminal_output +18609,14723602,"TERMINAL",0,0,"388",,terminal_output +18610,14724530,"TERMINAL",0,0,"4\t",,terminal_output +18611,14724676,"TERMINAL",0,0,"499",,terminal_output +18612,14725569,"TERMINAL",0,0,"5\t",,terminal_output +18613,14725710,"TERMINAL",0,0,"54040",,terminal_output +18614,14726701,"TERMINAL",0,0,"6\t",,terminal_output +18615,14726740,"TERMINAL",0,0,"611",,terminal_output +18616,14727713,"TERMINAL",0,0,"7\t",,terminal_output +18617,14727722,"TERMINAL",0,0,"722",,terminal_output +18618,14728711,"TERMINAL",0,0,"8\t",,terminal_output +18619,14728765,"TERMINAL",0,0,"833",,terminal_output +18620,14729741,"TERMINAL",0,0,"9\t",,terminal_output +18621,14729808,"TERMINAL",0,0,"944",,terminal_output +18622,14730752,"TERMINAL",0,0,"40\t",,terminal_output +18623,14730848,"TERMINAL",0,0,"4055",,terminal_output +18624,14731794,"TERMINAL",0,0,"1\t",,terminal_output +18625,14731885,"TERMINAL",0,0,"166",,terminal_output +18626,14732825,"TERMINAL",0,0,"2\t",,terminal_output +18627,14732919,"TERMINAL",0,0,"277",,terminal_output +18628,14733959,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M_actionspace_6\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation modelsize actionspace"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +18629,14734166,"TERMINAL",0,0,"3\t",,terminal_output +18630,14734166,"TERMINAL",0,0,"388",,terminal_output +18631,14734920,"TERMINAL",0,0,"4\t",,terminal_output +18632,14735011,"TERMINAL",0,0,"499",,terminal_output +18633,14735952,"TERMINAL",0,0,"5\t",,terminal_output +18634,14736048,"TERMINAL",0,0,"55050",,terminal_output +18635,14736846,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",447,0,"",shellscript,selection_mouse +18636,14736891,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",446,0,"",shellscript,selection_command +18637,14736983,"TERMINAL",0,0,"6\t",,terminal_output +18638,14737109,"TERMINAL",0,0,"611",,terminal_output +18639,14738115,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",446,0,"1",shellscript,content +18640,14738116,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",447,0,"",shellscript,selection_keyboard +18641,14738156,"TERMINAL",0,0,"7\t",,terminal_output +18642,14738157,"TERMINAL",0,0,"722",,terminal_output +18643,14739066,"TERMINAL",0,0,"8\t",,terminal_output +18644,14739159,"TERMINAL",0,0,"833",,terminal_output +18645,14740101,"TERMINAL",0,0,"9\t",,terminal_output +18646,14740194,"TERMINAL",0,0,"944",,terminal_output +18647,14741122,"TERMINAL",0,0,"50\t",,terminal_output +18648,14741223,"TERMINAL",0,0,"5055",,terminal_output +18649,14741751,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2024,0,"",shellscript,selection_mouse +18650,14742156,"TERMINAL",0,0,"1\t",,terminal_output +18651,14742252,"TERMINAL",0,0,"166",,terminal_output +18652,14742620,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2024,0,"\n ",shellscript,content +18653,14742962,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2029,0,"-",shellscript,content +18654,14742963,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2030,0,"",shellscript,selection_keyboard +18655,14743095,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2030,0,"-",shellscript,content +18656,14743095,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2031,0,"",shellscript,selection_keyboard +18657,14743184,"TERMINAL",0,0,"2\t",,terminal_output +18658,14743339,"TERMINAL",0,0,"288",,terminal_output +18659,14743983,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2031,0,"n",shellscript,content +18660,14743984,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2032,0,"",shellscript,selection_keyboard +18661,14744131,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2032,0,"u",shellscript,content +18662,14744131,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2033,0,"",shellscript,selection_keyboard +18663,14744224,"TERMINAL",0,0,"3\t",,terminal_output +18664,14744365,"TERMINAL",0,0,"499",,terminal_output +18665,14744732,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2033,0,"m",shellscript,content +18666,14744733,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2034,0,"",shellscript,selection_keyboard +18667,14745107,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2034,0,"_",shellscript,content +18668,14745108,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2035,0,"",shellscript,selection_keyboard +18669,14745285,"TERMINAL",0,0,"4\t",,terminal_output +18670,14745398,"TERMINAL",0,0,"56:006:00",,terminal_output +18671,14745409,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2035,0,"l",shellscript,content +18672,14745410,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2036,0,"",shellscript,selection_keyboard +18673,14745842,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2036,0,"a",shellscript,content +18674,14745842,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2037,0,"",shellscript,selection_keyboard +18675,14746036,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2037,0,"t",shellscript,content +18676,14746037,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2038,0,"",shellscript,selection_keyboard +18677,14746227,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2038,0,"e",shellscript,content +18678,14746228,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2039,0,"",shellscript,selection_keyboard +18679,14746309,"TERMINAL",0,0,"5\t",,terminal_output +18680,14746369,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2039,0,"n",shellscript,content +18681,14746370,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2040,0,"",shellscript,selection_keyboard +18682,14746443,"TERMINAL",0,0,"611",,terminal_output +18683,14746529,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2040,0,"t",shellscript,content +18684,14746530,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2041,0,"",shellscript,selection_keyboard +18685,14746719,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2041,0,"s",shellscript,content +18686,14746720,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2042,0,"",shellscript,selection_keyboard +18687,14747465,"TERMINAL",0,0,"7\t",,terminal_output +18688,14747465,"TERMINAL",0,0,"722",,terminal_output +18689,14748372,"TERMINAL",0,0,"8\t",,terminal_output +18690,14748486,"TERMINAL",0,0,"833",,terminal_output +18691,14748736,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2042,0,"=",shellscript,content +18692,14748737,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2043,0,"",shellscript,selection_keyboard +18693,14749006,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2043,0,"1",shellscript,content +18694,14749007,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2044,0,"",shellscript,selection_keyboard +18695,14749100,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2044,0,"6",shellscript,content +18696,14749101,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2045,0,"",shellscript,selection_keyboard +18697,14749411,"TERMINAL",0,0,"9\t",,terminal_output +18698,14749610,"TERMINAL",0,0,"944",,terminal_output +18699,14749900,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2045,0," ",shellscript,content +18700,14749901,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2046,0,"",shellscript,selection_keyboard +18701,14750107,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2046,0,"\",shellscript,content +18702,14750108,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2047,0,"",shellscript,selection_keyboard +18703,14750457,"TERMINAL",0,0,"2:00\t",,terminal_output +18704,14750609,"TERMINAL",0,0,"2:0055",,terminal_output +18705,14751191,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",2046,0,"",shellscript,selection_command +18706,14751495,"TERMINAL",0,0,"1\t",,terminal_output +18707,14751971,"TERMINAL",0,0,"166",,terminal_output +18708,14752850,"TERMINAL",0,0,"2\t",,terminal_output +18709,14752851,"TERMINAL",0,0,"277",,terminal_output +18710,14753613,"TERMINAL",0,0,"3\t",,terminal_output +18711,14753713,"TERMINAL",0,0,"388",,terminal_output +18712,14754625,"TERMINAL",0,0,"4\t",,terminal_output +18713,14754733,"TERMINAL",0,0,"499",,terminal_output +18714,14755657,"TERMINAL",0,0,"5\t",,terminal_output +18715,14755799,"TERMINAL",0,0,"51010",,terminal_output +18716,14756768,"TERMINAL",0,0,"6\t",,terminal_output +18717,14756796,"TERMINAL",0,0,"611",,terminal_output +18718,14757703,"TERMINAL",0,0,"7\t",,terminal_output +18719,14757901,"TERMINAL",0,0,"722",,terminal_output +18720,14758736,"TERMINAL",0,0,"8\t",,terminal_output +18721,14758871,"TERMINAL",0,0,"833",,terminal_output +18722,14759788,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16 copy.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M_actionspace_16\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --num_latents=16 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation modelsize actionspace"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +18723,14759941,"TERMINAL",0,0,"9\t",,terminal_output +18724,14759978,"TERMINAL",0,0,"944",,terminal_output +18725,14760900,"TERMINAL",0,0,"10\t",,terminal_output +18726,14760989,"TERMINAL",0,0,"1055",,terminal_output +18727,14761881,"TERMINAL",0,0,"1\t",,terminal_output +18728,14762017,"TERMINAL",0,0,"166",,terminal_output +18729,14762886,"TERMINAL",0,0,"2\t",,terminal_output +18730,14763036,"TERMINAL",0,0,"277",,terminal_output +18731,14763948,"TERMINAL",0,0,"3\t",,terminal_output +18732,14764105,"TERMINAL",0,0,"388",,terminal_output +18733,14764993,"TERMINAL",0,0,"4\t",,terminal_output +18734,14765134,"TERMINAL",0,0,"499",,terminal_output +18735,14766076,"TERMINAL",0,0,"5\t",,terminal_output +18736,14766138,"TERMINAL",0,0,"52020",,terminal_output +18737,14767113,"TERMINAL",0,0,"6\t",,terminal_output +18738,14767208,"TERMINAL",0,0,"611",,terminal_output +18739,14768048,"TERMINAL",0,0,"7\t",,terminal_output +18740,14768540,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M_actionspace_16\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --num_latents=16 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation modelsize actionspace"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +18741,14768755,"TERMINAL",0,0,"722",,terminal_output +18742,14769141,"TERMINAL",0,0,"8\t",,terminal_output +18743,14769480,"TERMINAL",0,0,"833",,terminal_output +18744,14769664,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",340,0,"",shellscript,selection_mouse +18745,14770062,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",448,0,"",shellscript,selection_mouse +18746,14770082,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",447,0,"",shellscript,selection_command +18747,14770128,"TERMINAL",0,0,"9\t",,terminal_output +18748,14770313,"TERMINAL",0,0,"955",,terminal_output +18749,14770485,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",448,0,"",shellscript,selection_command +18750,14770624,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",447,1,"",shellscript,content +18751,14770793,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",446,1,"",shellscript,content +18752,14771160,"TERMINAL",0,0,"20\t",,terminal_output +18753,14771313,"TERMINAL",0,0,"2166",,terminal_output +18754,14771438,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",446,0,"3",shellscript,content +18755,14771439,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",447,0,"",shellscript,selection_keyboard +18756,14771485,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",447,0,"2",shellscript,content +18757,14771486,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",448,0,"",shellscript,selection_keyboard +18758,14772186,"TERMINAL",0,0,"1\t",,terminal_output +18759,14772482,"TERMINAL",0,0,"277",,terminal_output +18760,14773225,"TERMINAL",0,0,"2\t",,terminal_output +18761,14773393,"TERMINAL",0,0,"388",,terminal_output +18762,14774001,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",2067,0,"",shellscript,selection_mouse +18763,14774291,"TERMINAL",0,0,"3\t",,terminal_output +18764,14774484,"TERMINAL",0,0,"499",,terminal_output +18765,14774876,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",2045,0,"",shellscript,selection_mouse +18766,14775247,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",2044,1,"",shellscript,content +18767,14775305,"TERMINAL",0,0,"4\t",,terminal_output +18768,14775349,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",2043,1,"",shellscript,content +18769,14775407,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",2043,0,"3",shellscript,content +18770,14775407,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",2044,0,"",shellscript,selection_keyboard +18771,14775534,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",2044,0,"2",shellscript,content +18772,14775535,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",2045,0,"",shellscript,selection_keyboard +18773,14775546,"TERMINAL",0,0,"53030",,terminal_output +18774,14776429,"TERMINAL",0,0,"6\t",,terminal_output +18775,14776819,"TERMINAL",0,0,"611",,terminal_output +18776,14777404,"TERMINAL",0,0,"7\t",,terminal_output +18777,14777632,"TERMINAL",0,0,"722",,terminal_output +18778,14778420,"TERMINAL",0,0,"8\t",,terminal_output +18779,14778785,"TERMINAL",0,0,"833",,terminal_output +18780,14779504,"TERMINAL",0,0,"9\t",,terminal_output +18781,14779733,"TERMINAL",0,0,"944",,terminal_output +18782,14780339,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32 copy.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M_actionspace_32\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --num_latents=32 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation modelsize actionspace"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +18783,14780527,"TERMINAL",0,0,"30\t",,terminal_output +18784,14780761,"TERMINAL",0,0,"3055",,terminal_output +18785,14781877,"TERMINAL",0,0,"1\t",,terminal_output +18786,14781902,"TERMINAL",0,0,"166",,terminal_output +18787,14782557,"TERMINAL",0,0,"2\t",,terminal_output +18788,14783329,"TERMINAL",0,0,"277",,terminal_output +18789,14783597,"TERMINAL",0,0,"3\t",,terminal_output +18790,14783919,"TERMINAL",0,0,"388",,terminal_output +18791,14784649,"TERMINAL",0,0,"4\t",,terminal_output +18792,14784921,"TERMINAL",0,0,"499",,terminal_output +18793,14785758,"TERMINAL",0,0,"5\t",,terminal_output +18794,14785966,"TERMINAL",0,0,"54040",,terminal_output +18795,14787136,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M_actionspace_32\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --num_latents=32 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation modelsize actionspace"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +18796,14787137,"TERMINAL",0,0,"6\t",,terminal_output +18797,14787182,"TERMINAL",0,0,"611",,terminal_output +18798,14787746,"TERMINAL",0,0,"7\t",,terminal_output +18799,14788160,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",825,0,"",shellscript,selection_mouse +18800,14788172,"TERMINAL",0,0,"722",,terminal_output +18801,14788814,"TERMINAL",0,0,"8\t",,terminal_output +18802,14789111,"TERMINAL",0,0,"833",,terminal_output +18803,14789641,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",2045,0,"",shellscript,selection_mouse +18804,14789818,"TERMINAL",0,0,"9\t",,terminal_output +18805,14790153,"TERMINAL",0,0,"944",,terminal_output +18806,14790664,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",2044,1,"",shellscript,content +18807,14790755,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",2043,1,"",shellscript,content +18808,14790893,"TERMINAL",0,0,"40\t",,terminal_output +18809,14791143,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",2043,0,"6",shellscript,content +18810,14791143,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",2044,0,"",shellscript,selection_keyboard +18811,14791176,"TERMINAL",0,0,"4055",,terminal_output +18812,14791368,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",2044,0,"4",shellscript,content +18813,14791369,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",2045,0,"",shellscript,selection_keyboard +18814,14791958,"TERMINAL",0,0,"1\t",,terminal_output +18815,14792284,"TERMINAL",0,0,"166",,terminal_output +18816,14792933,"TERMINAL",0,0,"2\t",,terminal_output +18817,14793245,"TERMINAL",0,0,"277",,terminal_output +18818,14793422,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",448,0,"",shellscript,selection_mouse +18819,14793718,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",447,1,"",shellscript,content +18820,14793918,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",446,1,"",shellscript,content +18821,14793984,"TERMINAL",0,0,"3\t",,terminal_output +18822,14794291,"TERMINAL",0,0,"399",,terminal_output +18823,14794605,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",446,0,"6",shellscript,content +18824,14794605,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",447,0,"",shellscript,selection_keyboard +18825,14794729,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",447,0,"4",shellscript,content +18826,14794730,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",448,0,"",shellscript,selection_keyboard +18827,14795049,"TERMINAL",0,0,"4\t",,terminal_output +18828,14795335,"TERMINAL",0,0,"55050",,terminal_output +18829,14796084,"TERMINAL",0,0,"5\t",,terminal_output +18830,14796835,"TERMINAL",0,0,"611",,terminal_output +18831,14797092,"TERMINAL",0,0,"6\t",,terminal_output +18832,14797531,"TERMINAL",0,0,"722",,terminal_output +18833,14798737,"TERMINAL",0,0,"7\t",,terminal_output +18834,14798787,"TERMINAL",0,0,"833",,terminal_output +18835,14799163,"TERMINAL",0,0,"8\t",,terminal_output +18836,14799868,"TERMINAL",0,0,"944",,terminal_output +18837,14800466,"TERMINAL",0,0,"9\t",,terminal_output +18838,14800596,"TERMINAL",0,0,"5055",,terminal_output +18839,14801314,"TERMINAL",0,0,"50\t",,terminal_output +18840,14801859,"TERMINAL",0,0,"166",,terminal_output +18841,14802341,"TERMINAL",0,0,"1\t",,terminal_output +18842,14802653,"TERMINAL",0,0,"277",,terminal_output +18843,14803341,"TERMINAL",0,0,"3\t",,terminal_output +18844,14803792,"TERMINAL",0,0,"388",,terminal_output +18845,14803847,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64 copy.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M_actionspace_64\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --num_latents=64 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation modelsize actionspace"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +18846,14804377,"TERMINAL",0,0,"4\t",,terminal_output +18847,14804802,"TERMINAL",0,0,"499",,terminal_output +18848,14805425,"TERMINAL",0,0,"5\t",,terminal_output +18849,14805737,"TERMINAL",0,0,"57:007:00",,terminal_output +18850,14806491,"TERMINAL",0,0,"6\t",,terminal_output +18851,14806831,"TERMINAL",0,0,"611",,terminal_output +18852,14807479,"TERMINAL",0,0,"7\t",,terminal_output +18853,14807824,"TERMINAL",0,0,"722",,terminal_output +18854,14808584,"TERMINAL",0,0,"8\t",,terminal_output +18855,14808873,"TERMINAL",0,0,"833",,terminal_output +18856,14809558,"TERMINAL",0,0,"9\t",,terminal_output +18857,14810018,"TERMINAL",0,0,"944",,terminal_output +18858,14810632,"TERMINAL",0,0,"3:00\t",,terminal_output +18859,14811041,"TERMINAL",0,0,"3:0055",,terminal_output +18860,14811758,"TERMINAL",0,0,"1\t",,terminal_output +18861,14812001,"TERMINAL",0,0,"166",,terminal_output +18862,14812802,"TERMINAL",0,0,"25",,terminal_output +18863,14813053,"TERMINAL",0,0,"277",,terminal_output +18864,14813834,"TERMINAL",0,0,"3\t",,terminal_output +18865,14814159,"TERMINAL",0,0,"388",,terminal_output +18866,14814990,"TERMINAL",0,0,"4\t",,terminal_output +18867,14815201,"TERMINAL",0,0,"499",,terminal_output +18868,14816013,"TERMINAL",0,0,"5\t",,terminal_output +18869,14816273,"TERMINAL",0,0,"51010",,terminal_output +18870,14818484,"TERMINAL",0,0,"633",,terminal_output +18871,14818494,"TERMINAL",0,0,"6\t",,terminal_output +18872,14819517,"TERMINAL",0,0,"944",,terminal_output +18873,14819526,"TERMINAL",0,0,"9\t",,terminal_output +18874,14820565,"TERMINAL",0,0,"1055",,terminal_output +18875,14820568,"TERMINAL",0,0,"10\t",,terminal_output +18876,14821598,"TERMINAL",0,0,"166",,terminal_output +18877,14821598,"TERMINAL",0,0,"1\t",,terminal_output +18878,14822507,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M_actionspace_64\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --num_latents=64 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation modelsize actionspace"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +18879,14822702,"TERMINAL",0,0,"2\t",,terminal_output +18880,14822703,"TERMINAL",0,0,"277",,terminal_output +18881,14823711,"TERMINAL",0,0,"3\t",,terminal_output +18882,14823711,"TERMINAL",0,0,"388",,terminal_output +18883,14824243,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",448,0,"",shellscript,selection_mouse +18884,14824281,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",447,0,"",shellscript,selection_command +18885,14824755,"TERMINAL",0,0,"4\t",,terminal_output +18886,14824798,"TERMINAL",0,0,"499",,terminal_output +18887,14824896,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",448,0,"",shellscript,selection_command +18888,14824968,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",447,1,"",shellscript,content +18889,14825331,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",446,1,"",shellscript,content +18890,14825753,"TERMINAL",0,0,"5\t",,terminal_output +18891,14825761,"TERMINAL",0,0,"52020",,terminal_output +18892,14825796,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",446,0,"1",shellscript,content +18893,14825796,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",447,0,"",shellscript,selection_keyboard +18894,14826085,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",447,0,"2",shellscript,content +18895,14826085,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",448,0,"",shellscript,selection_keyboard +18896,14826271,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",448,0,"8",shellscript,content +18897,14826272,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",449,0,"",shellscript,selection_keyboard +18898,14826601,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",448,0,"",shellscript,selection_command +18899,14826790,"TERMINAL",0,0,"6\t",,terminal_output +18900,14826836,"TERMINAL",0,0,"611",,terminal_output +18901,14827847,"TERMINAL",0,0,"7\t",,terminal_output +18902,14827869,"TERMINAL",0,0,"722",,terminal_output +18903,14828859,"TERMINAL",0,0,"8\t",,terminal_output +18904,14828945,"TERMINAL",0,0,"833",,terminal_output +18905,14829906,"TERMINAL",0,0,"9\t",,terminal_output +18906,14829931,"TERMINAL",0,0,"944",,terminal_output +18907,14830126,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",2046,0,"",shellscript,selection_mouse +18908,14830949,"TERMINAL",0,0,"20\t",,terminal_output +18909,14830983,"TERMINAL",0,0,"2055",,terminal_output +18910,14831325,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",2045,1,"",shellscript,content +18911,14831457,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",2044,1,"",shellscript,content +18912,14831669,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",2044,0,"1",shellscript,content +18913,14831669,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",2045,0,"",shellscript,selection_keyboard +18914,14831793,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",2045,0,"2",shellscript,content +18915,14831794,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",2046,0,"",shellscript,selection_keyboard +18916,14831986,"TERMINAL",0,0,"1\t",,terminal_output +18917,14832027,"TERMINAL",0,0,"166",,terminal_output +18918,14832396,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",2046,0,"8",shellscript,content +18919,14832397,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",2047,0,"",shellscript,selection_keyboard +18920,14832516,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",2046,0,"",shellscript,selection_command +18921,14833023,"TERMINAL",0,0,"2\t",,terminal_output +18922,14833072,"TERMINAL",0,0,"277",,terminal_output +18923,14834060,"TERMINAL",0,0,"3\t",,terminal_output +18924,14834100,"TERMINAL",0,0,"388",,terminal_output +18925,14835118,"TERMINAL",0,0,"4\t",,terminal_output +18926,14835156,"TERMINAL",0,0,"499",,terminal_output +18927,14836145,"TERMINAL",0,0,"5\t",,terminal_output +18928,14836168,"TERMINAL",0,0,"53030",,terminal_output +18929,14837198,"TERMINAL",0,0,"6\t",,terminal_output +18930,14837198,"TERMINAL",0,0,"611",,terminal_output +18931,14838223,"TERMINAL",0,0,"7\t",,terminal_output +18932,14838244,"TERMINAL",0,0,"722",,terminal_output +18933,14839255,"TERMINAL",0,0,"8\t",,terminal_output +18934,14839271,"TERMINAL",0,0,"844",,terminal_output +18935,14839870,"TERMINAL",0,0,"bash",,terminal_focus +18936,14840314,"TERMINAL",0,0,"9\t",,terminal_output +18937,14840336,"TERMINAL",0,0,"3055",,terminal_output +18938,14841346,"TERMINAL",0,0,"31\t",,terminal_output +18939,14841389,"TERMINAL",0,0,"166",,terminal_output +18940,14842376,"TERMINAL",0,0,"2\t",,terminal_output +18941,14842505,"TERMINAL",0,0,"277",,terminal_output +18942,14843445,"TERMINAL",0,0,"3\t",,terminal_output +18943,14843477,"TERMINAL",0,0,"388",,terminal_output +18944,14843625,"TERMINAL",0,0,"git branch",,terminal_command +18945,14843724,"TERMINAL",0,0,"]633;Cfatal: not a git repository (or any parent up to mount point /hkfs)\r\nStopping at filesystem boundary (GIT_DISCOVERY_ACROSS_FILESYSTEM not set).\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints",,terminal_output +18946,14844441,"TERMINAL",0,0,"4\t",,terminal_output +18947,14844483,"TERMINAL",0,0,"499",,terminal_output +18948,14845482,"TERMINAL",0,0,"5\t",,terminal_output +18949,14845558,"TERMINAL",0,0,"54040",,terminal_output +18950,14845892,"TERMINAL",0,0,"de",,terminal_command +18951,14845987,"TERMINAL",0,0,"]633;Cbash: de: command not found...\r\nSimilar command is: 'ed'\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints",,terminal_output +18952,14846518,"TERMINAL",0,0,"6\t",,terminal_output +18953,14846575,"TERMINAL",0,0,"611",,terminal_output +18954,14847153,"TERMINAL",0,0,"dev",,terminal_command +18955,14847263,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +18956,14847619,"TERMINAL",0,0,"7\t",,terminal_output +18957,14847619,"TERMINAL",0,0,"722",,terminal_output +18958,14848590,"TERMINAL",0,0,"86",,terminal_output +18959,14848729,"TERMINAL",0,0,"833",,terminal_output +18960,14849757,"TERMINAL",0,0,"9\t",,terminal_output +18961,14849757,"TERMINAL",0,0,"944",,terminal_output +18962,14850773,"TERMINAL",0,0,"40\t",,terminal_output +18963,14850775,"TERMINAL",0,0,"4055",,terminal_output +18964,14851762,"TERMINAL",0,0,"1\t",,terminal_output +18965,14851791,"TERMINAL",0,0,"166",,terminal_output +18966,14851867,"TERMINAL",0,0,"git branch",,terminal_command +18967,14851982,"TERMINAL",0,0,"]633;C[?1h=\r add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n* input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-tmp\r\n revised-dataloader\r\n:",,terminal_output +18968,14852736,"TERMINAL",0,0,"2\t",,terminal_output +18969,14852835,"TERMINAL",0,0,"277",,terminal_output +18970,14853776,"TERMINAL",0,0,"3\t",,terminal_output +18971,14853869,"TERMINAL",0,0,"388",,terminal_output +18972,14854811,"TERMINAL",0,0,"4\t",,terminal_output +18973,14854898,"TERMINAL",0,0,"499",,terminal_output +18974,14855515,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +18975,14855876,"TERMINAL",0,0,"5\t",,terminal_output +18976,14855959,"TERMINAL",0,0,"55050",,terminal_output +18977,14856957,"TERMINAL",0,0,"6\t",,terminal_output +18978,14857022,"TERMINAL",0,0,"611",,terminal_output +18979,14857943,"TERMINAL",0,0,"7\t",,terminal_output +18980,14858065,"TERMINAL",0,0,"722",,terminal_output +18981,14858972,"TERMINAL",0,0,"8\t",,terminal_output +18982,14859126,"TERMINAL",0,0,"833",,terminal_output +18983,14860011,"TERMINAL",0,0,"9\t",,terminal_output +18984,14860124,"TERMINAL",0,0,"944",,terminal_output +18985,14860808,"TERMINAL",0,0,"git checkout validation-loss",,terminal_command +18986,14860893,"TERMINAL",0,0,"]633;C",,terminal_output +18987,14860987,"TERMINAL",0,0,"Switched to branch 'validation-loss'\r\nYour branch is up to date with 'origin/validation-loss'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +18988,14861095,"TERMINAL",0,0,"50\t",,terminal_output +18989,14861191,"TERMINAL",0,0,"5055",,terminal_output +18990,14861343,"",0,0,"Switched from branch 'input_pipeline/add-npy2array_record' to 'validation-loss'",,git_branch_checkout +18991,14862086,"TERMINAL",0,0,"1\t",,terminal_output +18992,14862276,"TERMINAL",0,0,"166",,terminal_output +18993,14863170,"TERMINAL",0,0,"2\t",,terminal_output +18994,14863238,"TERMINAL",0,0,"277",,terminal_output +18995,14864195,"TERMINAL",0,0,"3\t",,terminal_output +18996,14864374,"TERMINAL",0,0,"399",,terminal_output +18997,14865220,"TERMINAL",0,0,"4\t",,terminal_output +18998,14865351,"TERMINAL",0,0,"58:008:00",,terminal_output +18999,14866229,"TERMINAL",0,0,"5\t",,terminal_output +19000,14866387,"TERMINAL",0,0,"611",,terminal_output +19001,14867374,"TERMINAL",0,0,"6\t",,terminal_output +19002,14867421,"TERMINAL",0,0,"722",,terminal_output +19003,14868306,"TERMINAL",0,0,"7\t",,terminal_output +19004,14868463,"TERMINAL",0,0,"833",,terminal_output +19005,14869342,"TERMINAL",0,0,"9\t",,terminal_output +19006,14869504,"TERMINAL",0,0,"944",,terminal_output +19007,14870402,"TERMINAL",0,0,"4:00\t",,terminal_output +19008,14870549,"TERMINAL",0,0,"4:0055",,terminal_output +19009,14871486,"TERMINAL",0,0,"1\t",,terminal_output +19010,14871577,"TERMINAL",0,0,"166",,terminal_output +19011,14872464,"TERMINAL",0,0,"2\t",,terminal_output +19012,14872621,"TERMINAL",0,0,"277",,terminal_output +19013,14873506,"TERMINAL",0,0,"3\t",,terminal_output +19014,14873668,"TERMINAL",0,0,"388",,terminal_output +19015,14874520,"TERMINAL",0,0,"4\t",,terminal_output +19016,14874745,"TERMINAL",0,0,"499",,terminal_output +19017,14875558,"TERMINAL",0,0,"5\t",,terminal_output +19018,14875746,"TERMINAL",0,0,"51010",,terminal_output +19019,14876684,"TERMINAL",0,0,"6\t",,terminal_output +19020,14876819,"TERMINAL",0,0,"611",,terminal_output +19021,14877636,"TERMINAL",0,0,"7\t",,terminal_output +19022,14877830,"TERMINAL",0,0,"722",,terminal_output +19023,14878729,"TERMINAL",0,0,"8\t",,terminal_output +19024,14878870,"TERMINAL",0,0,"833",,terminal_output +19025,14879752,"TERMINAL",0,0,"9\t",,terminal_output +19026,14879911,"TERMINAL",0,0,"944",,terminal_output +19027,14880777,"TERMINAL",0,0,"107",,terminal_output +19028,14880934,"TERMINAL",0,0,"1055",,terminal_output +19029,14881815,"TERMINAL",0,0,"1\t",,terminal_output +19030,14881982,"TERMINAL",0,0,"166",,terminal_output +19031,14882821,"TERMINAL",0,0,"2\t",,terminal_output +19032,14883017,"TERMINAL",0,0,"277",,terminal_output +19033,14883857,"TERMINAL",0,0,"3\t",,terminal_output +19034,14884056,"TERMINAL",0,0,"388",,terminal_output +19035,14884893,"TERMINAL",0,0,"4\t",,terminal_output +19036,14885096,"TERMINAL",0,0,"499",,terminal_output +19037,14886001,"TERMINAL",0,0,"5\t",,terminal_output +19038,14886141,"TERMINAL",0,0,"52020",,terminal_output +19039,14886965,"TERMINAL",0,0,"6\t",,terminal_output +19040,14887227,"TERMINAL",0,0,"611",,terminal_output +19041,14888052,"TERMINAL",0,0,"7\t",,terminal_output +19042,14888218,"TERMINAL",0,0,"722",,terminal_output +19043,14889074,"TERMINAL",0,0,"8\t",,terminal_output +19044,14889251,"TERMINAL",0,0,"833",,terminal_output +19045,14890202,"TERMINAL",0,0,"9\t",,terminal_output +19046,14890338,"TERMINAL",0,0,"955",,terminal_output +19047,14891229,"TERMINAL",0,0,"20\t",,terminal_output +19048,14891374,"TERMINAL",0,0,"2166",,terminal_output +19049,14892165,"TERMINAL",0,0,"1\t",,terminal_output +19050,14892367,"TERMINAL",0,0,"277",,terminal_output +19051,14893233,"TERMINAL",0,0,"2\t",,terminal_output +19052,14893445,"TERMINAL",0,0,"388",,terminal_output +19053,14894352,"TERMINAL",0,0,"sync-runner",,terminal_command +19054,14894353,"TERMINAL",0,0,"3\t",,terminal_output +19055,14894396,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +19056,14894451,"TERMINAL",0,0,"499",,terminal_output +19057,14894595,"TERMINAL",0,0,"./\r\ndiff.diff\r\ngenerate_dataset.py\r\ngenie.py\r\nsample.py\r\ntrain_dynamics.py\r\ntrain_lam.py\r\ntrain_tokenizer.py\r\n",,terminal_output +19058,14895296,"TERMINAL",0,0,"4\t",,terminal_output +19059,14895491,"TERMINAL",0,0,"53030",,terminal_output +19060,14895941,"TERMINAL",0,0,"input_pipeline/preprocess/\r\ninput_pipeline/preprocess/pngs_to_array_records.py\r\ninput_pipeline/preprocess/video_to_array_records.py\r\nmodels/\r\nmodels/dynamics.py\r\nmodels/lam.py\r\nmodels/tokenizer.py\r\nslurm/dev/alfred/berlin/topology/\r\nslurm/dev/alfred/berlin/topology/train_tokenizer_overfit_1gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/train_tokenizer_overfit_2_gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/train_tokenizer_restore_1gpu_to_1gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/train_tokenizer_restore_1gpu_to_2gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/train_tokenizer_restore_2gpu_to_1gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/train_tokenizer_restore_2gpu_to_2gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/nnx/\r\nslurm/dev/alfred/berlin/topology/nnx/train_tokenizer_overfit_1.sbatch\r\nslurm/dev/alfred/berlin/topology/nnx/train_tokenizer_overfit_2_gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/nnx/train_tokenizer_restore_2gpu_to_1gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/nnx/train_tokenizer_restore_2gpu_to_2gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/prennx/\r\nslurm/dev/alfred/berlin/topology/prennx/train_tokenizer_overfit_2_gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/prennx/train_tokenizer_restore_2gpu_to_1gpu.sbatch\r\nslurm/dev/alfred/berlin/topology/prennx/train_tokenizer_restore_2gpu_to_2gpu.sbatch\r\nslurm/dev/mihir/horeka/\r\nslurm/dev/mihir/horeka/generate_dataset_10m.sh\r\nslurm/dev/mihir/horeka/chunked_ablation/\r\nslurm/dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch\r\nslurm/dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_ablation/\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_baseline.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_mixed_prec.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd_3e-5_3e-6.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd_3e-6.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd_8e-6.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/coinrun_dynamics_reproduction.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/coinrun_dynamics_reproduction_cotrain.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/coinrun_lam_big_run.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/coinrun_tokenizer_repoduction_ffn_512_n_blocks_8_full_prec.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/generate_data.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/sample.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_bigrun/sample_cotrain.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_reproduction_10k/\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_reproduction_10k/coinrun_lam_reproduction_ffn_512_num_blocks_8_full_prec.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_reproduction_10k/coinrun_tokenizer_repoduction_ffn_512_n_blocks_8_full_prec.sbatch\r\nslurm/jobs/alfred/helmholtz_cluster/jafar_og_reproduction/\r\nslurm/jobs/alfred/helmholtz_cluster/jafar_og_reproduction/generate_dataset_10m.sbatch\r\nslurm/jobs/mihir/horeka/\r\nslurm/jobs/mihir/horeka/coinrun/\r\nslurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\nslurm/jobs/mihir/horeka/jafar_og_reproduction/\r\nslurm/jobs/mihir/horeka/jafar_og_reproduction/generate_dataset.sbatch\r\nslurm/jobs/mihir/horeka/jafar_og_reproduction/generate_dataset_10m.sbatch\r\nslurm/jobs/mihir/horeka/jafar_og_reproduction/generate_dataset_10m_gt_actions.sbatch\r\nslurm/jobs/mihir/horeka/jafar_og_reproduction/generate_dataset_10m_gt_actions_train_val_test.sbatch\r\nslurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch\r\nslurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_lam_reproduction.sbatch\r\nslurm/jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_tokenizer_repoduction.sbatch\r\nslurm/jobs/mihir/horeka/lam/\r\nslurm/jobs/mihir/horeka/lam/coinrun/\r\nslurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch\r\nslurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch\r\nslurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch\r\nslurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch\r\nslurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch\r\nslurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch\r\nslurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch\r\nslurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch\r\nslurm/jobs/mihir/horeka/lam/minecraft/\r\nslurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_1node_dev.sbatch\r\nslurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch\r\nslurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch\r\nslurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch\r\nslurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch\r\nslurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node.sbatch\r\nutils/\r\nutils/dataloader.py\r\nutils/lr_utils.py\r\nutils/nn.py\r\nutils/parameter_utils.py\r\n",,terminal_output +19061,14896341,"TERMINAL",0,0,"5\t",,terminal_output +19062,14896520,"TERMINAL",0,0,"\r\nsent 382,345 bytes received 1,655 bytes 109,714.29 bytes/sec\r\ntotal size is 128,549,810 speedup is 334.77\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +19063,14896591,"TERMINAL",0,0,"611",,terminal_output +19064,14897373,"TERMINAL",0,0,"7\t",,terminal_output +19065,14897570,"TERMINAL",0,0,"722",,terminal_output +19066,14898493,"TERMINAL",0,0,"8\t",,terminal_output +19067,14898602,"TERMINAL",0,0,"833",,terminal_output +19068,14899414,"TERMINAL",0,0,"9\t",,terminal_output +19069,14899644,"TERMINAL",0,0,"944",,terminal_output +19070,14900518,"TERMINAL",0,0,"308",,terminal_output +19071,14900775,"TERMINAL",0,0,"3055",,terminal_output +19072,14901209,"TERMINAL",0,0,"runner",,terminal_command +19073,14901312,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +19074,14901483,"TERMINAL",0,0,"1\t",,terminal_output +19075,14901791,"TERMINAL",0,0,"166",,terminal_output +19076,14902597,"TERMINAL",0,0,"2\t",,terminal_output +19077,14902769,"TERMINAL",0,0,"277",,terminal_output +19078,14903554,"TERMINAL",0,0,"3\t",,terminal_output +19079,14903805,"TERMINAL",0,0,"388",,terminal_output +19080,14904591,"TERMINAL",0,0,"4\t",,terminal_output +19081,14904848,"TERMINAL",0,0,"499",,terminal_output +19082,14905686,"TERMINAL",0,0,"5\t",,terminal_output +19083,14905893,"TERMINAL",0,0,"54040",,terminal_output +19084,14906725,"TERMINAL",0,0,"6\t",,terminal_output +19085,14906926,"TERMINAL",0,0,"611",,terminal_output +19086,14907745,"TERMINAL",0,0,"7\t",,terminal_output +19087,14907962,"TERMINAL",0,0,"722",,terminal_output +19088,14908843,"TERMINAL",0,0,"8\t",,terminal_output +19089,14909026,"TERMINAL",0,0,"833",,terminal_output +19090,14909858,"TERMINAL",0,0,"9\t",,terminal_output +19091,14910051,"TERMINAL",0,0,"944",,terminal_output +19092,14910810,"TERMINAL",0,0,"40\t",,terminal_output +19093,14911203,"TERMINAL",0,0,"4055",,terminal_output +19094,14911848,"TERMINAL",0,0,"1\t",,terminal_output +19095,14912316,"TERMINAL",0,0,"166",,terminal_output +19096,14912881,"TERMINAL",0,0,"2\t",,terminal_output +19097,14913268,"TERMINAL",0,0,"277",,terminal_output +19098,14913924,"TERMINAL",0,0,"3\t",,terminal_output +19099,14914338,"TERMINAL",0,0,"388",,terminal_output +19100,14915023,"TERMINAL",0,0,"4\t",,terminal_output +19101,14915285,"TERMINAL",0,0,"499",,terminal_output +19102,14916068,"TERMINAL",0,0,"5\t",,terminal_output +19103,14916311,"TERMINAL",0,0,"55151",,terminal_output +19104,14917151,"TERMINAL",0,0,"6\t",,terminal_output +19105,14917318,"TERMINAL",0,0,"722",,terminal_output +19106,14918159,"TERMINAL",0,0,"7\t",,terminal_output +19107,14918404,"TERMINAL",0,0,"833",,terminal_output +19108,14919108,"TERMINAL",0,0,"8\t",,terminal_output +19109,14919395,"TERMINAL",0,0,"944",,terminal_output +19110,14920151,"TERMINAL",0,0,"9\t",,terminal_output +19111,14920611,"TERMINAL",0,0,"5055",,terminal_output +19112,14921230,"TERMINAL",0,0,"50\t",,terminal_output +19113,14921744,"TERMINAL",0,0,"166",,terminal_output +19114,14922250,"TERMINAL",0,0,"1\t",,terminal_output +19115,14922742,"TERMINAL",0,0,"277",,terminal_output +19116,14923267,"TERMINAL",0,0,"2\t",,terminal_output +19117,14923785,"TERMINAL",0,0,"388",,terminal_output +19118,14924558,"TERMINAL",0,0,"32",,terminal_output +19119,14924814,"TERMINAL",0,0,"499",,terminal_output +19120,14925860,"TERMINAL",0,0,"5\t",,terminal_output +19121,14925861,"TERMINAL",0,0,"59:009:00",,terminal_output +19122,14926642,"TERMINAL",0,0,"6\t",,terminal_output +19123,14926876,"TERMINAL",0,0,"611",,terminal_output +19124,14927743,"TERMINAL",0,0,"7\t",,terminal_output +19125,14927907,"TERMINAL",0,0,"722",,terminal_output +19126,14928804,"TERMINAL",0,0,"8\t",,terminal_output +19127,14928943,"TERMINAL",0,0,"833",,terminal_output +19128,14929749,"TERMINAL",0,0,"9\t",,terminal_output +19129,14929992,"TERMINAL",0,0,"944",,terminal_output +19130,14930784,"TERMINAL",0,0,"5:00\t",,terminal_output +19131,14931082,"TERMINAL",0,0,"5:0055",,terminal_output +19132,14931827,"TERMINAL",0,0,"1\t",,terminal_output +19133,14932186,"TERMINAL",0,0,"166",,terminal_output +19134,14932868,"TERMINAL",0,0,"2\t",,terminal_output +19135,14933211,"TERMINAL",0,0,"277",,terminal_output +19136,14933879,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M_actionspace_6\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation modelsize actionspace"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +19137,14934152,"TERMINAL",0,0,"3\t",,terminal_output +19138,14934184,"TERMINAL",0,0,"388",,terminal_output +19139,14934952,"TERMINAL",0,0,"4\t",,terminal_output +19140,14935195,"TERMINAL",0,0,"499",,terminal_output +19141,14936243,"TERMINAL",0,0,"5\t",,terminal_output +19142,14936442,"TERMINAL",0,0,"51010",,terminal_output +19143,14937057,"TERMINAL",0,0,"6\t",,terminal_output +19144,14937293,"TERMINAL",0,0,"622",,terminal_output +19145,14938076,"TERMINAL",0,0,"7\t",,terminal_output +19146,14938487,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +19147,14938668,"TERMINAL",0,0,"833",,terminal_output +19148,14939131,"TERMINAL",0,0,"8\t",,terminal_output +19149,14939284,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",393,0," ",shellscript,content +19150,14940962,"slurm/jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/holiday/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3412401\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --dyna_type=maskgit \\n --num_latent_actions=100 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskgit-8-node-$slurm_job_id \\n --tags dynamics maskgit 8-node post-launch-main \\n --entity instant-uv \\n --project jafar \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +19151,14941373,"TERMINAL",0,0,"966",,terminal_output +19152,14941424,"TERMINAL",0,0,"94",,terminal_output +19153,14942481,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +19154,14942660,"TERMINAL",0,0,"1277",,terminal_output +19155,14942661,"TERMINAL",0,0,"12\t",,terminal_output +19156,14943490,"TERMINAL",0,0,"3\t",,terminal_output +19157,14943490,"TERMINAL",0,0,"388",,terminal_output +19158,14944518,"TERMINAL",0,0,"4\t",,terminal_output +19159,14944545,"TERMINAL",0,0,"499",,terminal_output +19160,14945564,"TERMINAL",0,0,"5\t",,terminal_output +19161,14945604,"TERMINAL",0,0,"52020",,terminal_output +19162,14945646,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",448,0,"",shellscript,selection_command +19163,14946565,"TERMINAL",0,0,"6\t",,terminal_output +19164,14946597,"TERMINAL",0,0,"611",,terminal_output +19165,14946785,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=2-00:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_coinrun_37M_actionspace_16\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/coinrun/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=24 \\n --image_height=64 \\n --image_width=64 \\n --max_lr=3e-6 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --log \\n --num_latents=16 \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ""coinrun lam_ablation modelsize actionspace"" \\n --entity instant-uv \\n --project jafar \\n --lr_schedule wsd \\n --val_data_dir $array_records_dir_val \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +19166,14947599,"TERMINAL",0,0,"7\t",,terminal_output +19167,14947611,"TERMINAL",0,0,"722",,terminal_output +19168,14948680,"TERMINAL",0,0,"8\t",,terminal_output +19169,14948680,"TERMINAL",0,0,"833",,terminal_output +19170,14949724,"TERMINAL",0,0,"9\t",,terminal_output +19171,14949724,"TERMINAL",0,0,"944",,terminal_output +19172,14950761,"TERMINAL",0,0,"2055",,terminal_output +19173,14950761,"TERMINAL",0,0,"20\t",,terminal_output +19174,14951765,"TERMINAL",0,0,"166",,terminal_output +19175,14951766,"TERMINAL",0,0,"1\t",,terminal_output +19176,14953148,"TERMINAL",0,0,"2\t",,terminal_output +19177,14953148,"TERMINAL",0,0,"277",,terminal_output +19178,14953877,"TERMINAL",0,0,"3\t",,terminal_output +19179,14953877,"TERMINAL",0,0,"388",,terminal_output +19180,14954882,"TERMINAL",0,0,"4\t",,terminal_output +19181,14954885,"TERMINAL",0,0,"499",,terminal_output +19182,14955810,"slurm/jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/lam/%x_%j.log\n#SBATCH --job-name=train_lam_minecraft_8node_darkness_filter_400M\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\n# slurm_job_id=$SLURM_JOB_ID\nslurm_job_id=3454954\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --save_ckpt \\n --restore_ckpt \\n --wandb_id $slurm_job_id \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=160 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --darkness_threshold=50 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=lam-minecraft-8-node-darkness-filter-400M-$slurm_job_id \\n --tags lam minecraft 8-node darkness-filter 400M \\n --entity instant-uv \\n --project jafar \\n --num_latents=100 \\n --model_dim=1024 \\n --num_blocks=12 \\n --num_heads=16 \\n --latent_dim=64 \\n --ffn_dim=4096 \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +19183,14956022,"TERMINAL",0,0,"5\t",,terminal_output +19184,14956022,"TERMINAL",0,0,"53030",,terminal_output +19185,14957141,"TERMINAL",0,0,"6\t",,terminal_output +19186,14957141,"TERMINAL",0,0,"611",,terminal_output +19187,14958219,"TERMINAL",0,0,"7\t",,terminal_output +19188,14958219,"TERMINAL",0,0,"722",,terminal_output +19189,14959061,"TERMINAL",0,0,"8\t",,terminal_output +19190,14959098,"TERMINAL",0,0,"833",,terminal_output +19191,14960066,"TERMINAL",0,0,"9\t",,terminal_output +19192,14960110,"TERMINAL",0,0,"944",,terminal_output +19193,14961706,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,0,"",plaintext,tab +19194,14961751,"TERMINAL",0,0,"30\t",,terminal_output +19195,14961752,"TERMINAL",0,0,"3055",,terminal_output +19196,14962230,"TERMINAL",0,0,"1\t",,terminal_output +19197,14962230,"TERMINAL",0,0,"166",,terminal_output +19198,14963201,"TERMINAL",0,0,"2\t",,terminal_output +19199,14963234,"TERMINAL",0,0,"277",,terminal_output +19200,14964239,"TERMINAL",0,0,"3\t",,terminal_output +19201,14964319,"TERMINAL",0,0,"388",,terminal_output +19202,14965306,"TERMINAL",0,0,"4\t",,terminal_output +19203,14965310,"TERMINAL",0,0,"54040",,terminal_output +19204,14966287,"TERMINAL",0,0,"5\t",,terminal_output +19205,14966357,"TERMINAL",0,0,"611",,terminal_output +19206,14967407,"TERMINAL",0,0,"7\t",,terminal_output +19207,14967408,"TERMINAL",0,0,"722",,terminal_output +19208,14968431,"TERMINAL",0,0,"8\t",,terminal_output +19209,14968456,"TERMINAL",0,0,"833",,terminal_output +19210,14969999,"TERMINAL",0,0,"9\t",,terminal_output +19211,14970064,"TERMINAL",0,0,"944",,terminal_output +19212,14970526,"TERMINAL",0,0,"40\t",,terminal_output +19213,14970560,"TERMINAL",0,0,"4055",,terminal_output +19214,14971233,"TERMINAL",0,0,"ls -l slurm/jobs/mihir/horeka/lam/coinrun",,terminal_command +19215,14971369,"TERMINAL",0,0,"]633;Ctotal 8\r\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch\r\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch\r\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch\r\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch\r\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch\r\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch\r\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch\r\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +19216,14971509,"TERMINAL",0,0,"1\t",,terminal_output +19217,14971607,"TERMINAL",0,0,"166",,terminal_output +19218,14972636,"TERMINAL",0,0,"2\t",,terminal_output +19219,14972636,"TERMINAL",0,0,"277",,terminal_output +19220,14973566,"TERMINAL",0,0,"3\t",,terminal_output +19221,14973646,"TERMINAL",0,0,"388",,terminal_output +19222,14974825,"TERMINAL",0,0,"4\t",,terminal_output +19223,14974826,"TERMINAL",0,0,"499",,terminal_output +19224,14975204,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,0,"",plaintext,tab +19225,14975656,"TERMINAL",0,0,"5\t",,terminal_output +19226,14975744,"TERMINAL",0,0,"55050",,terminal_output +19227,14976615,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,0,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch",plaintext,content +19228,14976701,"TERMINAL",0,0,"6\t",,terminal_output +19229,14976814,"TERMINAL",0,0,"611",,terminal_output +19230,14977393,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",698,0,"",plaintext,selection_command +19231,14977795,"TERMINAL",0,0,"7\t",,terminal_output +19232,14977865,"TERMINAL",0,0,"722",,terminal_output +19233,14978292,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",613,86,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch",shellscript,selection_command +19234,14978473,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",525,174,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch",shellscript,selection_command +19235,14978624,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",437,262,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch",shellscript,selection_command +19236,14978784,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",349,350,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch",shellscript,selection_command +19237,14978820,"TERMINAL",0,0,"8\t",,terminal_output +19238,14978904,"TERMINAL",0,0,"833",,terminal_output +19239,14978911,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",260,439,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch",shellscript,selection_command +19240,14979045,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",173,526,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch",shellscript,selection_command +19241,14979192,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",87,612,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch",shellscript,selection_command +19242,14979411,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,699,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch",shellscript,selection_command +19243,14979651,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,0,"",shellscript,selection_command +19244,14979807,"TERMINAL",0,0,"9\t",,terminal_output +19245,14979945,"TERMINAL",0,0,"944",,terminal_output +19246,14980546,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,0,"",shellscript,selection_command +19247,14980873,"TERMINAL",0,0,"50\t",,terminal_output +19248,14980945,"TERMINAL",0,0,"5055",,terminal_output +19249,14981413,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,86,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch",shellscript,selection_command +19250,14981892,"TERMINAL",0,0,"1\t",,terminal_output +19251,14981989,"TERMINAL",0,0,"166",,terminal_output +19252,14982314,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,172,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch",shellscript,selection_command +19253,14982443,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,259,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch",shellscript,selection_command +19254,14982617,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,348,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch",shellscript,selection_command +19255,14982767,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,436,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch",shellscript,selection_command +19256,14982880,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,524,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch",shellscript,selection_command +19257,14983026,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,612,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch",shellscript,selection_command +19258,14983026,"TERMINAL",0,0,"2\t",,terminal_output +19259,14983101,"TERMINAL",0,0,"277",,terminal_output +19260,14983153,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,699,"-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_133M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2391 Sep 5 17:30 coinrun_lam_311.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2392 Sep 5 17:30 coinrun_lam_400M.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2334 Sep 5 17:33 coinrun_lam_as_128.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_16.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_32.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2332 Sep 5 17:32 coinrun_lam_as_64.sbatch\n-rw-r--r-- 1 tum_cte0515 hk-project-p0023960 2308 Sep 5 17:31 coinrun_lam_base.sbatch",shellscript,selection_command +19261,14983826,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",86,0,"",shellscript,selection_command +19262,14983973,"TERMINAL",0,0,"3\t",,terminal_output +19263,14984112,"TERMINAL",0,0,"388",,terminal_output +19264,14985023,"TERMINAL",0,0,"4\t",,terminal_output +19265,14985127,"TERMINAL",0,0,"499",,terminal_output +19266,14985994,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",673,3,"",shellscript,content +19267,14985995,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",585,3,"",shellscript,content +19268,14985995,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",497,3,"",shellscript,content +19269,14985995,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",409,3,"",shellscript,content +19270,14985995,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",320,3,"",shellscript,content +19271,14985995,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",233,3,"",shellscript,content +19272,14985995,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",147,3,"",shellscript,content +19273,14985995,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",60,3,"",shellscript,content +19274,14986079,"TERMINAL",0,0,"5\t",,terminal_output +19275,14986264,"TERMINAL",0,0,"520:0020:00",,terminal_output +19276,14986479,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",651,1,"",shellscript,content +19277,14986480,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",566,1,"",shellscript,content +19278,14986480,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",481,1,"",shellscript,content +19279,14986480,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",396,1,"",shellscript,content +19280,14986480,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",310,1,"",shellscript,content +19281,14986480,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",226,1,"",shellscript,content +19282,14986480,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",143,1,"",shellscript,content +19283,14986480,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",59,1,"",shellscript,content +19284,14986520,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",642,2,"",shellscript,content +19285,14986520,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",558,2,"",shellscript,content +19286,14986520,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",474,2,"",shellscript,content +19287,14986520,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",390,2,"",shellscript,content +19288,14986520,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",305,2,"",shellscript,content +19289,14986520,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",222,2,"",shellscript,content +19290,14986520,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",140,2,"",shellscript,content +19291,14986520,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",57,2,"",shellscript,content +19292,14986535,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",626,2,"",shellscript,content +19293,14986535,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",544,2,"",shellscript,content +19294,14986536,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",462,2,"",shellscript,content +19295,14986536,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",380,2,"",shellscript,content +19296,14986536,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",297,2,"",shellscript,content +19297,14986536,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",216,2,"",shellscript,content +19298,14986536,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",136,2,"",shellscript,content +19299,14986536,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",55,2,"",shellscript,content +19300,14986568,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",610,2,"",shellscript,content +19301,14986568,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",530,2,"",shellscript,content +19302,14986568,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",450,2,"",shellscript,content +19303,14986568,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",370,2,"",shellscript,content +19304,14986569,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",289,2,"",shellscript,content +19305,14986569,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",210,2,"",shellscript,content +19306,14986569,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",132,2,"",shellscript,content +19307,14986569,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",53,2,"",shellscript,content +19308,14986727,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",593,3,"",shellscript,content +19309,14986727,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",515,3,"",shellscript,content +19310,14986727,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",437,3,"",shellscript,content +19311,14986727,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",359,3,"",shellscript,content +19312,14986727,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",280,3,"",shellscript,content +19313,14986727,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",203,3,"",shellscript,content +19314,14986727,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",127,3,"",shellscript,content +19315,14986727,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",50,3,"",shellscript,content +19316,14986730,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",567,5,"",shellscript,content +19317,14986730,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",492,5,"",shellscript,content +19318,14986730,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",417,5,"",shellscript,content +19319,14986730,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",342,5,"",shellscript,content +19320,14986731,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",266,5,"",shellscript,content +19321,14986731,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",192,5,"",shellscript,content +19322,14986731,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",119,5,"",shellscript,content +19323,14986731,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",45,5,"",shellscript,content +19324,14986772,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",523,9,"",shellscript,content +19325,14986773,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",453,9,"",shellscript,content +19326,14986773,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",383,9,"",shellscript,content +19327,14986773,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",313,9,"",shellscript,content +19328,14986773,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",242,9,"",shellscript,content +19329,14986773,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",173,9,"",shellscript,content +19330,14986773,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",105,9,"",shellscript,content +19331,14986773,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",36,9,"",shellscript,content +19332,14986775,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",459,1,"",shellscript,content +19333,14986775,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",398,1,"",shellscript,content +19334,14986775,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",337,1,"",shellscript,content +19335,14986775,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",276,1,"",shellscript,content +19336,14986775,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",214,1,"",shellscript,content +19337,14986775,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",154,1,"",shellscript,content +19338,14986775,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",95,1,"",shellscript,content +19339,14986775,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",35,1,"",shellscript,content +19340,14986978,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",445,7,"",shellscript,content +19341,14986978,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",385,7,"",shellscript,content +19342,14986978,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",325,7,"",shellscript,content +19343,14986978,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",265,7,"",shellscript,content +19344,14986978,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",204,7,"",shellscript,content +19345,14986978,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",145,7,"",shellscript,content +19346,14986979,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",87,7,"",shellscript,content +19347,14986979,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",28,7,"",shellscript,content +19348,14987086,"TERMINAL",0,0,"6\t",,terminal_output +19349,14987233,"TERMINAL",0,0,"611",,terminal_output +19350,14987366,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",395,1,"",shellscript,content +19351,14987366,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",342,1,"",shellscript,content +19352,14987366,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",289,1,"",shellscript,content +19353,14987366,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",236,1,"",shellscript,content +19354,14987366,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",182,1,"",shellscript,content +19355,14987366,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",130,1,"",shellscript,content +19356,14987366,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",79,1,"",shellscript,content +19357,14987366,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",27,1,"",shellscript,content +19358,14987728,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",386,2,"",shellscript,content +19359,14987728,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",334,2,"",shellscript,content +19360,14987728,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",282,2,"",shellscript,content +19361,14987728,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",230,2,"",shellscript,content +19362,14987728,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",177,2,"",shellscript,content +19363,14987728,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",126,2,"",shellscript,content +19364,14987728,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",76,2,"",shellscript,content +19365,14987728,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",25,2,"",shellscript,content +19366,14988019,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",360,12,"",shellscript,content +19367,14988020,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",310,12,"",shellscript,content +19368,14988020,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",260,12,"",shellscript,content +19369,14988020,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",210,12,"",shellscript,content +19370,14988020,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",159,12,"",shellscript,content +19371,14988020,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",110,12,"",shellscript,content +19372,14988020,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",62,12,"",shellscript,content +19373,14988020,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",13,12,"",shellscript,content +19374,14988134,"TERMINAL",0,0,"7\t",,terminal_output +19375,14988309,"TERMINAL",0,0,"722",,terminal_output +19376,14988440,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",274,2,"",shellscript,content +19377,14988440,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",236,2,"",shellscript,content +19378,14988440,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",198,2,"",shellscript,content +19379,14988441,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",160,2,"",shellscript,content +19380,14988441,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",121,2,"",shellscript,content +19381,14988441,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",84,2,"",shellscript,content +19382,14988441,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",48,2,"",shellscript,content +19383,14988441,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",11,2,"",shellscript,content +19384,14988738,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",257,3,"",shellscript,content +19385,14988738,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",221,3,"",shellscript,content +19386,14988739,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",185,3,"",shellscript,content +19387,14988739,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",149,3,"",shellscript,content +19388,14988739,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",112,3,"",shellscript,content +19389,14988739,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",77,3,"",shellscript,content +19390,14988739,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",43,3,"",shellscript,content +19391,14988739,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",8,3,"",shellscript,content +19392,14989025,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",235,1,"",shellscript,content +19393,14989025,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",202,1,"",shellscript,content +19394,14989025,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",169,1,"",shellscript,content +19395,14989025,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",136,1,"",shellscript,content +19396,14989025,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",102,1,"",shellscript,content +19397,14989025,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",70,1,"",shellscript,content +19398,14989025,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",39,1,"",shellscript,content +19399,14989025,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",7,1,"",shellscript,content +19400,14989172,"TERMINAL",0,0,"8\t",,terminal_output +19401,14989324,"TERMINAL",0,0,"844",,terminal_output +19402,14989342,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",226,2,"",shellscript,content +19403,14989342,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",194,2,"",shellscript,content +19404,14989343,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",162,2,"",shellscript,content +19405,14989343,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",130,2,"",shellscript,content +19406,14989343,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",97,2,"",shellscript,content +19407,14989343,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",66,2,"",shellscript,content +19408,14989343,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",36,2,"",shellscript,content +19409,14989343,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",5,2,"",shellscript,content +19410,14989759,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",211,1,"",shellscript,content +19411,14989759,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",181,1,"",shellscript,content +19412,14989759,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",151,1,"",shellscript,content +19413,14989759,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",121,1,"",shellscript,content +19414,14989760,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",90,1,"",shellscript,content +19415,14989760,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",61,1,"",shellscript,content +19416,14989760,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",33,1,"",shellscript,content +19417,14989760,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",4,1,"",shellscript,content +19418,14990102,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",203,1,"",shellscript,content +19419,14990103,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",174,1,"",shellscript,content +19420,14990103,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",145,1,"",shellscript,content +19421,14990103,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",116,1,"",shellscript,content +19422,14990103,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",86,1,"",shellscript,content +19423,14990103,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",58,1,"",shellscript,content +19424,14990103,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",31,1,"",shellscript,content +19425,14990103,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",3,1,"",shellscript,content +19426,14990209,"TERMINAL",0,0,"9\t",,terminal_output +19427,14990365,"TERMINAL",0,0,"6:0055",,terminal_output +19428,14990524,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",195,1,"",shellscript,content +19429,14990524,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",167,1,"",shellscript,content +19430,14990525,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",139,1,"",shellscript,content +19431,14990525,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",111,1,"",shellscript,content +19432,14990525,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",82,1,"",shellscript,content +19433,14990525,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",55,1,"",shellscript,content +19434,14990525,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",29,1,"",shellscript,content +19435,14990525,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",2,1,"",shellscript,content +19436,14990660,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",187,1,"",shellscript,content +19437,14990660,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",160,1,"",shellscript,content +19438,14990660,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",133,1,"",shellscript,content +19439,14990660,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",106,1,"",shellscript,content +19440,14990660,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",78,1,"",shellscript,content +19441,14990660,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",52,1,"",shellscript,content +19442,14990660,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",27,1,"",shellscript,content +19443,14990660,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",1,1,"",shellscript,content +19444,14990851,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",179,1,"",shellscript,content +19445,14990851,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",153,1,"",shellscript,content +19446,14990852,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",127,1,"",shellscript,content +19447,14990852,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",101,1,"",shellscript,content +19448,14990852,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",74,1,"",shellscript,content +19449,14990852,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",49,1,"",shellscript,content +19450,14990852,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",25,1,"",shellscript,content +19451,14990852,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,1,"",shellscript,content +19452,14991251,"TERMINAL",0,0,"6:00\t",,terminal_output +19453,14991394,"TERMINAL",0,0,"166",,terminal_output +19454,14991487,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",172,0,"s",shellscript,content +19455,14991487,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",147,0,"s",shellscript,content +19456,14991487,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",122,0,"s",shellscript,content +19457,14991487,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",97,0,"s",shellscript,content +19458,14991488,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",71,0,"s",shellscript,content +19459,14991488,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",47,0,"s",shellscript,content +19460,14991488,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",24,0,"s",shellscript,content +19461,14991488,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,0,"s",shellscript,content +19462,14991488,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",1,0,"",shellscript,selection_keyboard +19463,14991615,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",180,0,"b",shellscript,content +19464,14991615,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",154,0,"b",shellscript,content +19465,14991615,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",128,0,"b",shellscript,content +19466,14991615,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",102,0,"b",shellscript,content +19467,14991615,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",75,0,"b",shellscript,content +19468,14991615,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",50,0,"b",shellscript,content +19469,14991615,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",26,0,"b",shellscript,content +19470,14991615,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",1,0,"b",shellscript,content +19471,14991616,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",2,0,"",shellscript,selection_keyboard +19472,14991743,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",188,0,"a",shellscript,content +19473,14991744,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",161,0,"a",shellscript,content +19474,14991744,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",134,0,"a",shellscript,content +19475,14991744,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",107,0,"a",shellscript,content +19476,14991744,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",79,0,"a",shellscript,content +19477,14991744,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",53,0,"a",shellscript,content +19478,14991744,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",28,0,"a",shellscript,content +19479,14991744,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",2,0,"a",shellscript,content +19480,14991745,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",3,0,"",shellscript,selection_keyboard +19481,14991818,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",196,0,"t",shellscript,content +19482,14991818,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",168,0,"t",shellscript,content +19483,14991819,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",140,0,"t",shellscript,content +19484,14991819,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",112,0,"t",shellscript,content +19485,14991819,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",83,0,"t",shellscript,content +19486,14991819,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",56,0,"t",shellscript,content +19487,14991819,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",30,0,"t",shellscript,content +19488,14991819,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",3,0,"t",shellscript,content +19489,14991819,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",4,0,"",shellscript,selection_keyboard +19490,14991975,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",204,0,"c",shellscript,content +19491,14991976,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",175,0,"c",shellscript,content +19492,14991976,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",146,0,"c",shellscript,content +19493,14991976,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",117,0,"c",shellscript,content +19494,14991976,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",87,0,"c",shellscript,content +19495,14991976,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",59,0,"c",shellscript,content +19496,14991976,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",32,0,"c",shellscript,content +19497,14991976,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",4,0,"c",shellscript,content +19498,14991976,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",5,0,"",shellscript,selection_keyboard +19499,14992025,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",212,0,"h",shellscript,content +19500,14992026,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",182,0,"h",shellscript,content +19501,14992026,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",152,0,"h",shellscript,content +19502,14992026,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",122,0,"h",shellscript,content +19503,14992026,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",91,0,"h",shellscript,content +19504,14992026,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",62,0,"h",shellscript,content +19505,14992026,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",34,0,"h",shellscript,content +19506,14992026,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",5,0,"h",shellscript,content +19507,14992026,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",6,0,"",shellscript,selection_keyboard +19508,14992150,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",220,0," ",shellscript,content +19509,14992150,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",189,0," ",shellscript,content +19510,14992150,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",158,0," ",shellscript,content +19511,14992150,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",127,0," ",shellscript,content +19512,14992150,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",95,0," ",shellscript,content +19513,14992150,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",65,0," ",shellscript,content +19514,14992150,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",36,0," ",shellscript,content +19515,14992150,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",6,0," ",shellscript,content +19516,14992151,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",7,0,"",shellscript,selection_keyboard +19517,14992296,"TERMINAL",0,0,"1\t",,terminal_output +19518,14992440,"TERMINAL",0,0,"277",,terminal_output +19519,14993328,"TERMINAL",0,0,"3\t",,terminal_output +19520,14993502,"TERMINAL",0,0,"388",,terminal_output +19521,14993891,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",6,0,"",shellscript,selection_command +19522,14994383,"TERMINAL",0,0,"4\t",,terminal_output +19523,14994524,"TERMINAL",0,0,"499",,terminal_output +19524,14994942,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,30,"sbatch coinrun_lam_133M.sbatch",shellscript,selection_command +19525,14995185,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,60,"sbatch coinrun_lam_133M.sbatch\nsbatch coinrun_lam_311.sbatch",shellscript,selection_command +19526,14995338,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,91,"sbatch coinrun_lam_133M.sbatch\nsbatch coinrun_lam_311.sbatch\nsbatch coinrun_lam_400M.sbatch",shellscript,selection_command +19527,14995419,"TERMINAL",0,0,"5\t",,terminal_output +19528,14995461,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,124,"sbatch coinrun_lam_133M.sbatch\nsbatch coinrun_lam_311.sbatch\nsbatch coinrun_lam_400M.sbatch\nsbatch coinrun_lam_as_128.sbatch",shellscript,selection_command +19529,14995618,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,156,"sbatch coinrun_lam_133M.sbatch\nsbatch coinrun_lam_311.sbatch\nsbatch coinrun_lam_400M.sbatch\nsbatch coinrun_lam_as_128.sbatch\nsbatch coinrun_lam_as_16.sbatch",shellscript,selection_command +19530,14995619,"TERMINAL",0,0,"51010",,terminal_output +19531,14995752,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,188,"sbatch coinrun_lam_133M.sbatch\nsbatch coinrun_lam_311.sbatch\nsbatch coinrun_lam_400M.sbatch\nsbatch coinrun_lam_as_128.sbatch\nsbatch coinrun_lam_as_16.sbatch\nsbatch coinrun_lam_as_32.sbatch",shellscript,selection_command +19532,14995909,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,220,"sbatch coinrun_lam_133M.sbatch\nsbatch coinrun_lam_311.sbatch\nsbatch coinrun_lam_400M.sbatch\nsbatch coinrun_lam_as_128.sbatch\nsbatch coinrun_lam_as_16.sbatch\nsbatch coinrun_lam_as_32.sbatch\nsbatch coinrun_lam_as_64.sbatch",shellscript,selection_command +19533,14996291,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,251,"sbatch coinrun_lam_133M.sbatch\nsbatch coinrun_lam_311.sbatch\nsbatch coinrun_lam_400M.sbatch\nsbatch coinrun_lam_as_128.sbatch\nsbatch coinrun_lam_as_16.sbatch\nsbatch coinrun_lam_as_32.sbatch\nsbatch coinrun_lam_as_64.sbatch\nsbatch coinrun_lam_base.sbatch",shellscript,selection_command +19534,14996451,"TERMINAL",0,0,"6\t",,terminal_output +19535,14996620,"TERMINAL",0,0,"611",,terminal_output +19536,14997580,"TERMINAL",0,0,"7\t",,terminal_output +19537,14997657,"TERMINAL",0,0,"722",,terminal_output +19538,14998518,"TERMINAL",0,0,"8\t",,terminal_output +19539,14998677,"TERMINAL",0,0,"833",,terminal_output +19540,14999673,"TERMINAL",0,0,"9\t",,terminal_output +19541,14999733,"TERMINAL",0,0,"944",,terminal_output +19542,15000633,"TERMINAL",0,0,"10\t",,terminal_output +19543,15000795,"TERMINAL",0,0,"1055",,terminal_output +19544,15001633,"TERMINAL",0,0,"1\t",,terminal_output +19545,15001823,"TERMINAL",0,0,"166",,terminal_output +19546,15002668,"TERMINAL",0,0,"2\t",,terminal_output +19547,15002838,"TERMINAL",0,0,"277",,terminal_output +19548,15003764,"TERMINAL",0,0,"3\t",,terminal_output +19549,15003916,"TERMINAL",0,0,"388",,terminal_output +19550,15004793,"TERMINAL",0,0,"4\t",,terminal_output +19551,15004972,"TERMINAL",0,0,"sync-runner",,terminal_command +19552,15005014,"TERMINAL",0,0,"499",,terminal_output +19553,15005057,"TERMINAL",0,0,"]633;Csending incremental file list\r\nslurm/jobs/mihir/horeka/lam/coinrun/\r\nslurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch\r\nslurm/jobs/mihir/horeka/lam/coinrun/tmp\r\n",,terminal_output +19554,15005148,"TERMINAL",0,0,"\r\nsent 34,426 bytes received 244 bytes 69,340.00 bytes/sec\r\ntotal size is 128,550,065 speedup is 3,707.82\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +19555,15005782,"TERMINAL",0,0,"5\t",,terminal_output +19556,15005965,"TERMINAL",0,0,"52020",,terminal_output +19557,15006842,"TERMINAL",0,0,"6\t",,terminal_output +19558,15007010,"TERMINAL",0,0,"611",,terminal_output +19559,15007862,"TERMINAL",0,0,"7\t",,terminal_output +19560,15008046,"TERMINAL",0,0,"722",,terminal_output +19561,15008902,"TERMINAL",0,0,"8\t",,terminal_output +19562,15009085,"TERMINAL",0,0,"833",,terminal_output +19563,15009943,"TERMINAL",0,0,"9\t",,terminal_output +19564,15010166,"TERMINAL",0,0,"944",,terminal_output +19565,15010867,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",0,0,"",shellscript,tab +19566,15011105,"TERMINAL",0,0,"20\t",,terminal_output +19567,15011191,"TERMINAL",0,0,"2055",,terminal_output +19568,15012027,"TERMINAL",0,0,"1\t",,terminal_output +19569,15012205,"TERMINAL",0,0,"166",,terminal_output +19570,15013053,"TERMINAL",0,0,"2\t",,terminal_output +19571,15013097,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",398,0,"",shellscript,selection_mouse +19572,15013283,"TERMINAL",0,0,"277",,terminal_output +19573,15013915,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",397,0,"",shellscript,selection_command +19574,15014088,"TERMINAL",0,0,"3\t",,terminal_output +19575,15014285,"TERMINAL",0,0,"399",,terminal_output +19576,15014344,"slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",393,4,"",shellscript,content +19577,15015130,"TERMINAL",0,0,"4\t",,terminal_output +19578,15015343,"TERMINAL",0,0,"53030",,terminal_output +19579,15016201,"TERMINAL",0,0,"5\t",,terminal_output +19580,15016389,"TERMINAL",0,0,"611",,terminal_output +19581,15017224,"TERMINAL",0,0,"6\t",,terminal_output +19582,15017495,"TERMINAL",0,0,"722",,terminal_output +19583,15018278,"TERMINAL",0,0,"7\t",,terminal_output +19584,15018457,"TERMINAL",0,0,"833",,terminal_output +19585,15019325,"TERMINAL",0,0,"sync-runner",,terminal_command +19586,15019409,"TERMINAL",0,0,"8\t",,terminal_output +19587,15019462,"TERMINAL",0,0,"]633;Csending incremental file list\r\nslurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch\r\n\r\nsent 34,126 bytes received 222 bytes 22,898.67 bytes/sec\r\ntotal size is 128,550,061 speedup is 3,742.58\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +19588,15019558,"TERMINAL",0,0,"944",,terminal_output +19589,15020317,"TERMINAL",0,0,"30\t",,terminal_output +19590,15020541,"TERMINAL",0,0,"3055",,terminal_output +19591,15021349,"TERMINAL",0,0,"1\t",,terminal_output +19592,15021584,"TERMINAL",0,0,"166",,terminal_output +19593,15022384,"TERMINAL",0,0,"2\t",,terminal_output +19594,15022623,"TERMINAL",0,0,"277",,terminal_output +19595,15023523,"TERMINAL",0,0,"3\t",,terminal_output +19596,15023686,"TERMINAL",0,0,"388",,terminal_output +19597,15023849,"TERMINAL",0,0,"sbatch coinrun_lam_133M.sbatchsbatch coinrun_lam_311.sbatchsbatch coinrun_lam_400M.sbatchsbatch coinrun_lam_as_128.sbatchsbatch coinrun_lam_as_16.sbatchsbatch coinrun_lam_as_32.sbatchsbatch coinrun_lam_as_64.sbatchsbatch coinrun_lam_base.sbatch",,terminal_command +19598,15024049,"TERMINAL",0,0,"]633;Csbatch: error: Unable to open file coinrun_lam_133M.sbatch\r\nsbatch: error: Unable to open file coinrun_lam_311.sbatch\r\nsbatch: error: Unable to open file coinrun_lam_400M.sbatch\r\nsbatch: error: Unable to open file coinrun_lam_as_128.sbatch\r\nsbatch: error: Unable to open file coinrun_lam_as_16.sbatch\r\nsbatch: error: Unable to open file coinrun_lam_as_32.sbatch\r\nsbatch: error: Unable to open file coinrun_lam_as_64.sbatch\r\nsbatch: error: Unable to open file coinrun_lam_base.sbatch\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +19599,15024453,"TERMINAL",0,0,"4\t",,terminal_output +19600,15024709,"TERMINAL",0,0,"499",,terminal_output +19601,15025576,"TERMINAL",0,0,"5\t",,terminal_output +19602,15025738,"TERMINAL",0,0,"54040",,terminal_output +19603,15026602,"TERMINAL",0,0,"6\t",,terminal_output +19604,15026779,"TERMINAL",0,0,"611",,terminal_output +19605,15027620,"TERMINAL",0,0,"7\t",,terminal_output +19606,15027862,"TERMINAL",0,0,"722",,terminal_output +19607,15028643,"TERMINAL",0,0,"8\t",,terminal_output +19608,15028859,"TERMINAL",0,0,"833",,terminal_output +19609,15029706,"TERMINAL",0,0,"9\t",,terminal_output +19610,15029902,"TERMINAL",0,0,"944",,terminal_output +19611,15030744,"TERMINAL",0,0,"40\t",,terminal_output +19612,15030984,"TERMINAL",0,0,"4055",,terminal_output +19613,15031818,"TERMINAL",0,0,"1\t",,terminal_output +19614,15031985,"TERMINAL",0,0,"166",,terminal_output +19615,15032776,"TERMINAL",0,0,"2\t",,terminal_output +19616,15033084,"TERMINAL",0,0,"277",,terminal_output +19617,15033865,"TERMINAL",0,0,"3\t",,terminal_output +19618,15034059,"TERMINAL",0,0,"388",,terminal_output +19619,15034974,"TERMINAL",0,0,"46",,terminal_output +19620,15035113,"TERMINAL",0,0,"499",,terminal_output +19621,15036068,"TERMINAL",0,0,"5\t",,terminal_output +19622,15036184,"TERMINAL",0,0,"55050",,terminal_output +19623,15036563,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,0,"",shellscript,tab +19624,15036563,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",251,0,"",shellscript,selection_mouse +19625,15036701,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",250,0,"",shellscript,selection_command +19626,15037054,"TERMINAL",0,0,"6\t",,terminal_output +19627,15037199,"TERMINAL",0,0,"611",,terminal_output +19628,15037618,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",221,30,"sbatch coinrun_lam_base.sbatch",shellscript,selection_command +19629,15037815,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",189,62,"sbatch coinrun_lam_as_64.sbatch\nsbatch coinrun_lam_base.sbatch",shellscript,selection_command +19630,15037941,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",157,94,"sbatch coinrun_lam_as_32.sbatch\nsbatch coinrun_lam_as_64.sbatch\nsbatch coinrun_lam_base.sbatch",shellscript,selection_command +19631,15038099,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",125,126,"sbatch coinrun_lam_as_16.sbatch\nsbatch coinrun_lam_as_32.sbatch\nsbatch coinrun_lam_as_64.sbatch\nsbatch coinrun_lam_base.sbatch",shellscript,selection_command +19632,15038141,"TERMINAL",0,0,"7\t",,terminal_output +19633,15038259,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",92,159,"sbatch coinrun_lam_as_128.sbatch\nsbatch coinrun_lam_as_16.sbatch\nsbatch coinrun_lam_as_32.sbatch\nsbatch coinrun_lam_as_64.sbatch\nsbatch coinrun_lam_base.sbatch",shellscript,selection_command +19634,15038259,"TERMINAL",0,0,"722",,terminal_output +19635,15038398,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",61,190,"sbatch coinrun_lam_400M.sbatch\nsbatch coinrun_lam_as_128.sbatch\nsbatch coinrun_lam_as_16.sbatch\nsbatch coinrun_lam_as_32.sbatch\nsbatch coinrun_lam_as_64.sbatch\nsbatch coinrun_lam_base.sbatch",shellscript,selection_command +19636,15038502,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",31,220,"sbatch coinrun_lam_311.sbatch\nsbatch coinrun_lam_400M.sbatch\nsbatch coinrun_lam_as_128.sbatch\nsbatch coinrun_lam_as_16.sbatch\nsbatch coinrun_lam_as_32.sbatch\nsbatch coinrun_lam_as_64.sbatch\nsbatch coinrun_lam_base.sbatch",shellscript,selection_command +19637,15038628,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,251,"sbatch coinrun_lam_133M.sbatch\nsbatch coinrun_lam_311.sbatch\nsbatch coinrun_lam_400M.sbatch\nsbatch coinrun_lam_as_128.sbatch\nsbatch coinrun_lam_as_16.sbatch\nsbatch coinrun_lam_as_32.sbatch\nsbatch coinrun_lam_as_64.sbatch\nsbatch coinrun_lam_base.sbatch",shellscript,selection_command +19638,15038952,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,0,"",shellscript,selection_command +19639,15039161,"TERMINAL",0,0,"8\t",,terminal_output +19640,15039347,"TERMINAL",0,0,"833",,terminal_output +19641,15039890,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",7,0,"",shellscript,selection_command +19642,15040162,"TERMINAL",0,0,"9\t",,terminal_output +19643,15040303,"TERMINAL",0,0,"955",,terminal_output +19644,15040808,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",228,0,"slurm/jobs/mihir/horeka/lam/coinrun",shellscript,content +19645,15040808,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",196,0,"slurm/jobs/mihir/horeka/lam/coinrun",shellscript,content +19646,15040808,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",164,0,"slurm/jobs/mihir/horeka/lam/coinrun",shellscript,content +19647,15040808,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",132,0,"slurm/jobs/mihir/horeka/lam/coinrun",shellscript,content +19648,15040808,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",99,0,"slurm/jobs/mihir/horeka/lam/coinrun",shellscript,content +19649,15040808,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",68,0,"slurm/jobs/mihir/horeka/lam/coinrun",shellscript,content +19650,15040808,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",38,0,"slurm/jobs/mihir/horeka/lam/coinrun",shellscript,content +19651,15040808,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",7,0,"slurm/jobs/mihir/horeka/lam/coinrun",shellscript,content +19652,15041203,"TERMINAL",0,0,"50\t",,terminal_output +19653,15041342,"TERMINAL",0,0,"5166",,terminal_output +19654,15042249,"TERMINAL",0,0,"1\t",,terminal_output +19655,15042365,"TERMINAL",0,0,"277",,terminal_output +19656,15042800,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",508,0,"/",shellscript,content +19657,15042800,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",441,0,"/",shellscript,content +19658,15042800,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",374,0,"/",shellscript,content +19659,15042800,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",307,0,"/",shellscript,content +19660,15042800,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",239,0,"/",shellscript,content +19661,15042800,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",173,0,"/",shellscript,content +19662,15042800,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",108,0,"/",shellscript,content +19663,15042800,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",42,0,"/",shellscript,content +19664,15042801,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",43,0,"",shellscript,selection_keyboard +19665,15043280,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",42,0,"",shellscript,selection_command +19666,15043324,"TERMINAL",0,0,"2\t",,terminal_output +19667,15043453,"TERMINAL",0,0,"388",,terminal_output +19668,15044319,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,66,"sbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch",shellscript,selection_command +19669,15044553,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,132,"sbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch",shellscript,selection_command +19670,15044696,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,199,"sbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch",shellscript,selection_command +19671,15044829,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,268,"sbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch",shellscript,selection_command +19672,15044836,"TERMINAL",0,0,"499",,terminal_output +19673,15044836,"TERMINAL",0,0,"400",,terminal_output +19674,15044915,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,336,"sbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch",shellscript,selection_command +19675,15045052,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,404,"sbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch",shellscript,selection_command +19676,15045185,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,472,"sbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch",shellscript,selection_command +19677,15045355,"slurm/jobs/mihir/horeka/lam/coinrun/tmp",0,539,"sbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch\nsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",shellscript,selection_command +19678,15045866,"TERMINAL",0,0,"51:001:00",,terminal_output +19679,15045866,"TERMINAL",0,0,"5\t",,terminal_output +19680,15046910,"TERMINAL",0,0,"6\t",,terminal_output +19681,15046910,"TERMINAL",0,0,"611",,terminal_output +19682,15048166,"TERMINAL",0,0,"7\t",,terminal_output +19683,15048167,"TERMINAL",0,0,"722",,terminal_output +19684,15049048,"TERMINAL",0,0,"8\t",,terminal_output +19685,15049048,"TERMINAL",0,0,"833",,terminal_output +19686,15050045,"TERMINAL",0,0,"9\t",,terminal_output +19687,15050046,"TERMINAL",0,0,"944",,terminal_output +19688,15050786,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatchsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatchsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatchsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatchsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatchsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatchsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatchsbatch slurm/jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch",,terminal_command +19689,15050955,"TERMINAL",0,0,"]633;CSubmitted batch job 3469568\r\nSubmitted batch job 3469569\r\nSubmitted batch job 3469570\r\nSubmitted batch job 3469571\r\nSubmitted batch job 3469572\r\nSubmitted batch job 3469573\r\nSubmitted batch job 3469574\r\n",,terminal_output +19690,15051024,"TERMINAL",0,0,"Submitted batch job 3469575\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +19691,15051148,"TERMINAL",0,0,"7:00\t",,terminal_output +19692,15051149,"TERMINAL",0,0,"7:009575laPD 0:00(Priority)9574PD 0:00(Priority)3469573 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469572 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469571 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469570 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469569 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)\t",,terminal_output +19693,15052211,"TERMINAL",0,0,"1\t",,terminal_output +19694,15052211,"TERMINAL",0,0,"1\t",,terminal_output +19695,15053233,"TERMINAL",0,0,"2\t",,terminal_output +19696,15053233,"TERMINAL",0,0,"2\t",,terminal_output +19697,15054163,"TERMINAL",0,0,"3\t",,terminal_output +19698,15054194,"TERMINAL",0,0,"3\t",,terminal_output +19699,15055201,"TERMINAL",0,0,"4\t",,terminal_output +19700,15055258,"TERMINAL",0,0,"4\t",,terminal_output +19701,15056176,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 17:37:05 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 16 nodes idle\rPartition dev_accelerated:\t 3 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 0 nodes idle",,terminal_output +19702,15056179,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 17:37:05 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469360 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3469575 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469574 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469573 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469572 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469571 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469570 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469569 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469568 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)\t",,terminal_output +19703,15057228,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 17:37:06 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 16 nodes idle\rPartition dev_accelerated:\t 3 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 0 nodes idle",,terminal_output +19704,15057229,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 17:37:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469360 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3469575 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469574 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469573 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469572 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469571 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469570 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469569 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469568 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 20:21:11\t 1 hkn07363466287 accelerat train_la tum_cte0 R 20:21:11\t 1 hkn0736\t",,terminal_output +19705,15058384,"TERMINAL",0,0,"7\t",,terminal_output +19706,15058385,"TERMINAL",0,0,"722",,terminal_output +19707,15059334,"TERMINAL",0,0,"8\t",,terminal_output +19708,15059335,"TERMINAL",0,0,"844",,terminal_output +19709,15060345,"TERMINAL",0,0,"10\t",,terminal_output +19710,15060345,"TERMINAL",0,0,"1055",,terminal_output +19711,15061052,"TERMINAL",0,0,"watch",,terminal_focus +19712,15061393,"TERMINAL",0,0,"1\t",,terminal_output +19713,15061393,"TERMINAL",0,0,"166",,terminal_output +19714,15063760,"TERMINAL",0,0,"bash",,terminal_focus +19715,15064177,"TERMINAL",0,0,"288",,terminal_output +19716,15064178,"TERMINAL",0,0,"24",,terminal_output +19717,15065129,"TERMINAL",0,0,"4\t",,terminal_output +19718,15065129,"TERMINAL",0,0,"41245687099",,terminal_output +19719,15066169,"TERMINAL",0,0,"5\t",,terminal_output +19720,15066169,"TERMINAL",0,0,"52020",,terminal_output +19721,15067201,"TERMINAL",0,0,"6\t",,terminal_output +19722,15067202,"TERMINAL",0,0,"611",,terminal_output +19723,15068300,"TERMINAL",0,0,"7\t",,terminal_output +19724,15068300,"TERMINAL",0,0,"722",,terminal_output +19725,15068429,"TERMINAL",0,0,"dev",,terminal_command +19726,15068544,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +19727,15069276,"TERMINAL",0,0,"844",,terminal_output +19728,15069285,"TERMINAL",0,0,"8\t",,terminal_output +19729,15070308,"TERMINAL",0,0,"2055",,terminal_output +19730,15070354,"TERMINAL",0,0,"20\t",,terminal_output +19731,15070412,"TERMINAL",0,0,"salloc_node",,terminal_command +19732,15070479,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3469576\r\n",,terminal_output +19733,15070599,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +19734,15071449,"TERMINAL",0,0,"1663469576 dev_accel interact tum_cte0 R\t0:01\t 1 hkn0401\t",,terminal_output +19735,15071456,"TERMINAL",0,0,"12",,terminal_output +19736,15072382,"TERMINAL",0,0,"2772\t\t\t",,terminal_output +19737,15072394,"TERMINAL",0,0,"2\t",,terminal_output +19738,15073437,"TERMINAL",0,0,"3883\t\t\t",,terminal_output +19739,15073438,"TERMINAL",0,0,"3\t",,terminal_output +19740,15074464,"TERMINAL",0,0,"4\t",,terminal_output +19741,15074465,"TERMINAL",0,0,"4994\t\t\t",,terminal_output +19742,15075690,"TERMINAL",0,0,"5\t",,terminal_output +19743,15075691,"TERMINAL",0,0,"530305\t\t\t",,terminal_output +19744,15076546,"TERMINAL",0,0,"6\t",,terminal_output +19745,15076546,"TERMINAL",0,0,"6116\t\t\t",,terminal_output +19746,15077712,"TERMINAL",0,0,"7\t",,terminal_output +19747,15077717,"TERMINAL",0,0,"7227\t\t\t",,terminal_output +19748,15078625,"TERMINAL",0,0,"8\t",,terminal_output +19749,15078637,"TERMINAL",0,0,"8338\t\t\t",,terminal_output +19750,15079673,"TERMINAL",0,0,"9\t",,terminal_output +19751,15079674,"TERMINAL",0,0,"9449\t\t\t",,terminal_output +19752,15080003,"TERMINAL",0,0,"watch",,terminal_focus +19753,15080822,"TERMINAL",0,0,"30\t",,terminal_output +19754,15080823,"TERMINAL",0,0,"305510\t\t\t",,terminal_output +19755,15081789,"TERMINAL",0,0,"1\t",,terminal_output +19756,15081794,"TERMINAL",0,0,"1661\t\t\t",,terminal_output +19757,15082496,"TERMINAL",0,0,"salloc",,terminal_focus +19758,15082774,"TERMINAL",0,0,"2\t",,terminal_output +19759,15082803,"TERMINAL",0,0,"2772\t\t\t",,terminal_output +19760,15083841,"TERMINAL",0,0,"s",,terminal_output +19761,15083841,"TERMINAL",0,0,"3\t",,terminal_output +19762,15083861,"TERMINAL",0,0,"3883\t\t\t",,terminal_output +19763,15083934,"TERMINAL",0,0,"o",,terminal_output +19764,15084034,"TERMINAL",0,0,"u",,terminal_output +19765,15084117,"TERMINAL",0,0,"r",,terminal_output +19766,15084326,"TERMINAL",0,0,"c",,terminal_output +19767,15084471,"TERMINAL",0,0,"e",,terminal_output +19768,15084683,"TERMINAL",0,0," ",,terminal_output +19769,15084826,"TERMINAL",0,0,".",,terminal_output +19770,15084841,"TERMINAL",0,0,"4\t",,terminal_output +19771,15084924,"TERMINAL",0,0,"4994\t\t\t",,terminal_output +19772,15085023,"TERMINAL",0,0,"v",,terminal_output +19773,15085254,"TERMINAL",0,0,"e",,terminal_output +19774,15085358,"TERMINAL",0,0,"n",,terminal_output +19775,15085496,"TERMINAL",0,0,"v",,terminal_output +19776,15085887,"TERMINAL",0,0,"5\t",,terminal_output +19777,15085932,"TERMINAL",0,0,"540405\t\t\t",,terminal_output +19778,15086148,"TERMINAL",0,0,"/",,terminal_output +19779,15086438,"TERMINAL",0,0,"b",,terminal_output +19780,15086551,"TERMINAL",0,0,"i",,terminal_output +19781,15086689,"TERMINAL",0,0,"n",,terminal_output +19782,15086933,"TERMINAL",0,0,"6\t",,terminal_output +19783,15086979,"TERMINAL",0,0,"6116\t\t\t",,terminal_output +19784,15087225,"TERMINAL",0,0,"/",,terminal_output +19785,15087375,"TERMINAL",0,0,"a",,terminal_output +19786,15087485,"TERMINAL",0,0,"c",,terminal_output +19787,15087692,"TERMINAL",0,0,"t",,terminal_output +19788,15087875,"TERMINAL",0,0,"i",,terminal_output +19789,15087963,"TERMINAL",0,0,"7\t",,terminal_output +19790,15088047,"TERMINAL",0,0,"v",,terminal_output +19791,15088086,"TERMINAL",0,0,"7227\t\t\t",,terminal_output +19792,15088137,"TERMINAL",0,0,"a",,terminal_output +19793,15088219,"TERMINAL",0,0,"t",,terminal_output +19794,15088273,"TERMINAL",0,0,"e",,terminal_output +19795,15088513,"TERMINAL",0,0,"\r\n",,terminal_output +19796,15089023,"TERMINAL",0,0,"8\t",,terminal_output +19797,15089073,"TERMINAL",0,0,"8338\t\t\t",,terminal_output +19798,15089787,"TERMINAL",0,0,"(jasmine) [tum_cte0515@hkn1991 jasmine_jobs]$ dev\r\n(jasmine) [tum_cte0515@hkn1991 jasmine]$ salloc_node",,terminal_output +19799,15090102,"TERMINAL",0,0,"9\t",,terminal_output +19800,15090115,"TERMINAL",0,0,"9449\t\t\t",,terminal_output +19801,15091108,"TERMINAL",0,0,"40\t",,terminal_output +19802,15091162,"TERMINAL",0,0,"405520\t\t\t",,terminal_output +19803,15092155,"TERMINAL",0,0,"1\t",,terminal_output +19804,15092209,"TERMINAL",0,0,"1661\t\t\t",,terminal_output +19805,15093191,"TERMINAL",0,0,"2\t",,terminal_output +19806,15093245,"TERMINAL",0,0,"2772\t\t\t",,terminal_output +19807,15094282,"TERMINAL",0,0,"3\t",,terminal_output +19808,15094297,"TERMINAL",0,0,"3994\t\t\t",,terminal_output +19809,15095307,"TERMINAL",0,0,"4\t",,terminal_output +19810,15095335,"TERMINAL",0,0,"550505\t\t\t",,terminal_output +19811,15095941,"TERMINAL",0,0," ",,terminal_output +19812,15096131,"TERMINAL",0,0," ",,terminal_output +19813,15096241,"TERMINAL",0,0," ",,terminal_output +19814,15096459,"TERMINAL",0,0,"6\t",,terminal_output +19815,15096584,"TERMINAL",0,0,"6116\t\t\t",,terminal_output +19816,15096584,"TERMINAL",0,0,"  ",,terminal_output +19817,15096728,"TERMINAL",0,0," ",,terminal_output +19818,15096831,"TERMINAL",0,0," ",,terminal_output +19819,15097254,"TERMINAL",0,0,"    ",,terminal_output +19820,15097391,"TERMINAL",0,0,"7\t",,terminal_output +19821,15097434,"TERMINAL",0,0,"7227\t\t\t",,terminal_output +19822,15097589,"TERMINAL",0,0,"salloc: Nodes hkn0401 are ready for job\r\n",,terminal_output +19823,15097749,"TERMINAL",0,0,"source .venv/bin/activate\r\n(jasmine) [tum_cte0515@hkn1991 jasmine_jobs]$ dev\r\n(jasmine) [tum_cte0515@hkn1991 jasmine]$ ",,terminal_output +19824,15098456,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ source .venv/bin/activate\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ (jasmine) [tum_cte0515@hkn1991 jasmine_jobs]$ dev\r\n[?2004l\rbash: syntax error near unexpected token `[tum_cte0515@hkn1991'\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ (jasmine) [tum_cte0515@hkn1991 jasmine]$ ",,terminal_output +19825,15098484,"TERMINAL",0,0,"8\t",,terminal_output +19826,15098484,"TERMINAL",0,0,"8338\t\t\t",,terminal_output +19827,15099460,"TERMINAL",0,0,"9\t",,terminal_output +19828,15099518,"TERMINAL",0,0,"9449\t\t\t",,terminal_output +19829,15100528,"TERMINAL",0,0,"50\t",,terminal_output +19830,15100559,"TERMINAL",0,0,"505530\t\t\t",,terminal_output +19831,15101526,"TERMINAL",0,0,"1\t",,terminal_output +19832,15101622,"TERMINAL",0,0,"1661\t\t\t",,terminal_output +19833,15102679,"TERMINAL",0,0,"2\t",,terminal_output +19834,15102680,"TERMINAL",0,0,"2772\t\t\t",,terminal_output +19835,15103703,"TERMINAL",0,0,"3\t",,terminal_output +19836,15103703,"TERMINAL",0,0,"3883\t\t\t",,terminal_output +19837,15104082,"TERMINAL",0,0,"l",,terminal_output +19838,15104135,"TERMINAL",0,0,"s",,terminal_output +19839,15104318,"TERMINAL",0,0,"\r\n[?2004l\rbash: syntax error near unexpected token `[tum_cte0515@hkn1991'\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +19840,15104735,"TERMINAL",0,0,"4\t",,terminal_output +19841,15104776,"TERMINAL",0,0,"4994\t\t\t",,terminal_output +19842,15105679,"TERMINAL",0,0,"5\t",,terminal_output +19843,15105775,"TERMINAL",0,0,"52:002:005\t\t\t",,terminal_output +19844,15106407,"TERMINAL",0,0,"s",,terminal_output +19845,15106488,"TERMINAL",0,0,"m",,terminal_output +19846,15106585,"TERMINAL",0,0,"i",,terminal_output +19847,15106728,"TERMINAL",0,0,"65",,terminal_output +19848,15106829,"TERMINAL",0,0,"6116\t\t\t",,terminal_output +19849,15107248,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: nvidia-smihkn0401.localdomain: Fri Sep 5 17:37:56 2025Fri Sep 5 17:37:56 2025\r+-----------------------------------------------------------------------------------------+\r| NVIDIA-SMI 570.133.20Driver Version: 570.133.20 CUDA Version: 12.8|\r|-----------------------------------------+------------------------+----------------------+\r| GPU NamePersistence-M | Bus-IdDisp.A | Volatile Uncorr. ECC |\r| Fan Temp PerfPwr:Usage/Cap |Memory-Usage | GPU-Util Compute M. |\r|||MIG M. |\r|=========================================+========================+======================|\r| 0 NVIDIA A100-SXM4-40GBOn | 00000000:31:00.0 Off |\r0 |\r| N/A 44C P052W / 300W |\t 30MiB / 40960MiB |\t 0%\t Default |\r|||\t\t Disabled |\r+-----------------------------------------+------------------------+----------------------+\r+-----------------------------------------------------------------------------------------+",,terminal_output +19850,15107801,"TERMINAL",0,0,"7\t",,terminal_output +19851,15107862,"TERMINAL",0,0,"7227\t\t\t",,terminal_output +19852,15108311,"TERMINAL",0,0,"773",,terminal_output +19853,15108824,"TERMINAL",0,0,"8\t",,terminal_output +19854,15108918,"TERMINAL",0,0,"8338\t\t\t",,terminal_output +19855,15109307,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +19856,15109841,"TERMINAL",0,0,"9\t",,terminal_output +19857,15109963,"TERMINAL",0,0,"9449\t\t\t",,terminal_output +19858,15110384,"TERMINAL",0,0,"s",,terminal_output +19859,15110618,"TERMINAL",0,0,"c",,terminal_output +19860,15110714,"TERMINAL",0,0,"a",,terminal_output +19861,15110834,"TERMINAL",0,0,"n",,terminal_output +19862,15110925,"TERMINAL",0,0,"8:00\t",,terminal_output +19863,15110964,"TERMINAL",0,0,"c",,terminal_output +19864,15111013,"TERMINAL",0,0,"8:005540\t\t\t",,terminal_output +19865,15111042,"TERMINAL",0,0,"e",,terminal_output +19866,15111097,"TERMINAL",0,0,"l",,terminal_output +19867,15111193,"TERMINAL",0,0," ",,terminal_output +19868,15111498,"TERMINAL",0,0,"3469360",,terminal_output +19869,15111960,"TERMINAL",0,0,"1\t",,terminal_output +19870,15111992,"TERMINAL",0,0,"3469360\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +19871,15112025,"TERMINAL",0,0,"\r1661",,terminal_output +19872,15112949,"TERMINAL",0,0,"2\t",,terminal_output +19873,15113099,"TERMINAL",0,0,"2772",,terminal_output +19874,15114122,"TERMINAL",0,0,"3\t",,terminal_output +19875,15114215,"TERMINAL",0,0,"3883",,terminal_output +19876,15115079,"TERMINAL",0,0,"4\t",,terminal_output +19877,15115240,"TERMINAL",0,0,"4994",,terminal_output +19878,15116102,"TERMINAL",0,0,"5\t",,terminal_output +19879,15116265,"TERMINAL",0,0,"510105",,terminal_output +19880,15117202,"TERMINAL",0,0,"6\t",,terminal_output +19881,15117231,"TERMINAL",0,0,"6116",,terminal_output +19882,15118260,"TERMINAL",0,0,"7\t",,terminal_output +19883,15118272,"TERMINAL",0,0,"7227",,terminal_output +19884,15119373,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3468835\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=10 \\n --log_checkpoint_interval=100 \\n --log \\n --name=coinrun-dyn-dev-$slurm_job_id \\n --tags dyn coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 200 \\n --val_data_dir $array_records_dir_val \\n --val_interval 10 \\n --val_steps 50 \\n --data_dir $array_records_dir_train \\n --lam_checkpoint $lam_checkpoint \\n --tokenizer_checkpoint $tokenizer_checkpoint\n",shellscript,tab +19885,15119547,"TERMINAL",0,0,"8\t",,terminal_output +19886,15119599,"TERMINAL",0,0,"8449",,terminal_output +19887,15120497,"TERMINAL",0,0,"9\t",,terminal_output +19888,15120497,"TERMINAL",0,0,"105550",,terminal_output +19889,15121294,"TERMINAL",0,0,"10\t",,terminal_output +19890,15121392,"TERMINAL",0,0,"1661",,terminal_output +19891,15122321,"TERMINAL",0,0,"1\t",,terminal_output +19892,15123102,"TERMINAL",0,0,"2772",,terminal_output +19893,15123334,"TERMINAL",0,0,"3\t",,terminal_output +19894,15123486,"TERMINAL",0,0,"3883",,terminal_output +19895,15124383,"TERMINAL",0,0,"4\t",,terminal_output +19896,15124525,"TERMINAL",0,0,"4994",,terminal_output +19897,15125407,"TERMINAL",0,0,"5\t",,terminal_output +19898,15125559,"TERMINAL",0,0,"520205",,terminal_output +19899,15126538,"TERMINAL",0,0,"6\t",,terminal_output +19900,15126600,"TERMINAL",0,0,"6116",,terminal_output +19901,15127563,"TERMINAL",0,0,"7\t",,terminal_output +19902,15127648,"TERMINAL",0,0,"7227",,terminal_output +19903,15128577,"TERMINAL",0,0,"8\t",,terminal_output +19904,15128711,"TERMINAL",0,0,"8338",,terminal_output +19905,15129610,"TERMINAL",0,0,"9\t",,terminal_output +19906,15129792,"TERMINAL",0,0,"9449",,terminal_output +19907,15130709,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1832,0,"",shellscript,selection_mouse +19908,15130710,"TERMINAL",0,0,"2032",,terminal_output +19909,15130800,"TERMINAL",0,0,"20551:00",,terminal_output +19910,15131677,"TERMINAL",0,0,"16",,terminal_output +19911,15131815,"TERMINAL",0,0,"1661",,terminal_output +19912,15132490,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1920,0,"",shellscript,selection_mouse +19913,15132670,"TERMINAL",0,0,"2\t",,terminal_output +19914,15132851,"TERMINAL",0,0,"2772",,terminal_output +19915,15133716,"TERMINAL",0,0,"3\t",,terminal_output +19916,15133921,"TERMINAL",0,0,"3883",,terminal_output +19917,15134755,"TERMINAL",0,0,"4\t",,terminal_output +19918,15134948,"TERMINAL",0,0,"4994",,terminal_output +19919,15135390,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1894,0,"",shellscript,selection_mouse +19920,15135429,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1893,0,"",shellscript,selection_command +19921,15135780,"TERMINAL",0,0,"51",,terminal_output +19922,15135948,"TERMINAL",0,0,"530305",,terminal_output +19923,15136199,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1894,0,"\n ",shellscript,content +19924,15136413,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1899,0,"-",shellscript,content +19925,15136414,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1900,0,"",shellscript,selection_keyboard +19926,15136730,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1900,0,"-",shellscript,content +19927,15136731,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1901,0,"",shellscript,selection_keyboard +19928,15136834,"TERMINAL",0,0,"6\t",,terminal_output +19929,15136926,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1901,0,"w",shellscript,content +19930,15136927,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1902,0,"",shellscript,selection_keyboard +19931,15137041,"TERMINAL",0,0,"6116",,terminal_output +19932,15137130,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1902,0,"a",shellscript,content +19933,15137131,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1903,0,"",shellscript,selection_keyboard +19934,15137221,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1903,0,"n",shellscript,content +19935,15137221,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1904,0,"",shellscript,selection_keyboard +19936,15137365,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1904,0,"d",shellscript,content +19937,15137366,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1905,0,"",shellscript,selection_keyboard +19938,15137433,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1905,0,"b",shellscript,content +19939,15137434,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1906,0,"",shellscript,selection_keyboard +19940,15137876,"TERMINAL",0,0,"7\t",,terminal_output +19941,15138023,"TERMINAL",0,0,"7227",,terminal_output +19942,15138889,"TERMINAL",0,0,"8\t",,terminal_output +19943,15139064,"TERMINAL",0,0,"8338",,terminal_output +19944,15139935,"TERMINAL",0,0,"9\t",,terminal_output +19945,15140137,"TERMINAL",0,0,"9449",,terminal_output +19946,15140647,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1906,0,"_",shellscript,content +19947,15140648,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1907,0,"",shellscript,selection_keyboard +19948,15140944,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1907,0,"i",shellscript,content +19949,15140945,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1908,0,"",shellscript,selection_keyboard +19950,15140985,"TERMINAL",0,0,"307",,terminal_output +19951,15141068,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1908,0,"d",shellscript,content +19952,15141068,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1909,0,"",shellscript,selection_keyboard +19953,15141136,"TERMINAL",0,0,"305510",,terminal_output +19954,15141656,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1909,0,"=",shellscript,content +19955,15141656,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1910,0,"",shellscript,selection_keyboard +19956,15142029,"TERMINAL",0,0,"1\t",,terminal_output +19957,15142183,"TERMINAL",0,0,"1661",,terminal_output +19958,15143091,"TERMINAL",0,0,"23",,terminal_output +19959,15143232,"TERMINAL",0,0,"2772",,terminal_output +19960,15144056,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1910,0,"§",shellscript,content +19961,15144059,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1911,0,"",shellscript,selection_keyboard +19962,15144118,"TERMINAL",0,0,"3\t",,terminal_output +19963,15144294,"TERMINAL",0,0,"3883",,terminal_output +19964,15144729,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1910,1,"",shellscript,content +19965,15145130,"TERMINAL",0,0,"4\t",,terminal_output +19966,15145317,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1910,0,"$",shellscript,content +19967,15145318,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1911,0,"",shellscript,selection_keyboard +19968,15145358,"TERMINAL",0,0,"440405",,terminal_output +19969,15146183,"TERMINAL",0,0,"5\t",,terminal_output +19970,15146356,"TERMINAL",0,0,"6116",,terminal_output +19971,15146595,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1911,0," ",shellscript,content +19972,15146596,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1912,0,"",shellscript,selection_keyboard +19973,15146808,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1912,0,"\",shellscript,content +19974,15146809,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1913,0,"",shellscript,selection_keyboard +19975,15147032,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1912,0,"",shellscript,selection_command +19976,15147199,"TERMINAL",0,0,"6\t",,terminal_output +19977,15147260,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1911,0,"",shellscript,selection_command +19978,15147397,"TERMINAL",0,0,"7227",,terminal_output +19979,15148267,"TERMINAL",0,0,"7\t",,terminal_output +19980,15148434,"TERMINAL",0,0,"8338",,terminal_output +19981,15149085,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1911,0,"s",shellscript,content +19982,15149086,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1912,0,"",shellscript,selection_keyboard +19983,15149143,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1912,0,"l",shellscript,content +19984,15149144,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1913,0,"",shellscript,selection_keyboard +19985,15149277,"TERMINAL",0,0,"8\t",,terminal_output +19986,15149484,"TERMINAL",0,0,"9449",,terminal_output +19987,15150355,"TERMINAL",0,0,"40\t",,terminal_output +19988,15150567,"TERMINAL",0,0,"405520",,terminal_output +19989,15151128,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1911,2,"slurm_job_id",shellscript,content +19990,15151341,"TERMINAL",0,0,"1\t",,terminal_output +19991,15151546,"TERMINAL",0,0,"1661",,terminal_output +19992,15152422,"TERMINAL",0,0,"2\t",,terminal_output +19993,15152600,"TERMINAL",0,0,"2772",,terminal_output +19994,15153470,"TERMINAL",0,0,"3\t",,terminal_output +19995,15153620,"TERMINAL",0,0,"3883",,terminal_output +19996,15154450,"TERMINAL",0,0,"4\t",,terminal_output +19997,15154659,"TERMINAL",0,0,"4994",,terminal_output +19998,15155501,"TERMINAL",0,0,"5\t",,terminal_output +19999,15155703,"TERMINAL",0,0,"550505",,terminal_output +20000,15156552,"TERMINAL",0,0,"6\t",,terminal_output +20001,15156746,"TERMINAL",0,0,"6116",,terminal_output +20002,15157560,"TERMINAL",0,0,"7\t",,terminal_output +20003,15157793,"TERMINAL",0,0,"7227",,terminal_output +20004,15158706,"TERMINAL",0,0,"88",,terminal_output +20005,15158849,"TERMINAL",0,0,"8338",,terminal_output +20006,15159658,"TERMINAL",0,0,"9\t",,terminal_output +20007,15159935,"TERMINAL",0,0,"9449",,terminal_output +20008,15160685,"TERMINAL",0,0,"s",,terminal_output +20009,15160717,"TERMINAL",0,0,"50\t",,terminal_output +20010,15160763,"TERMINAL",0,0,"h",,terminal_output +20011,15160852,"TERMINAL",0,0," ",,terminal_output +20012,15160945,"TERMINAL",0,0,"505530",,terminal_output +20013,15161194,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",,terminal_output +20014,15161726,"TERMINAL",0,0,"1\t",,terminal_output +20015,15162002,"TERMINAL",0,0,"1661",,terminal_output +20016,15162812,"TERMINAL",0,0,"2\t",,terminal_output +20017,15162999,"TERMINAL",0,0,"2772",,terminal_output +20018,15163274,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3468835\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=100 \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 200 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --wandb_id=$slurm_job_id \\r\n --data_dir $array_records_dir_train \\r\n --lam_checkpoint $lam_checkpoint \\r\n --tokenizer_checkpoint $tokenizer_checkpoint\r\n",,terminal_output +20019,15163450,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3210454\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1757086640\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757090240\r\nSLURM_PMI2_SRUN_PORT=41191\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3469576\r\nSLURM_PTY_PORT=43733\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=29\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=88\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=34641\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3469576\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34641\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +20020,15163586,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +20021,15163824,"TERMINAL",0,0,"3\t",,terminal_output +20022,15164044,"TERMINAL",0,0,"3883",,terminal_output +20023,15164847,"TERMINAL",0,0,"4\t",,terminal_output +20024,15165059,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +20025,15165282,"TERMINAL",0,0,"4994",,terminal_output +20026,15165880,"TERMINAL",0,0,"50",,terminal_output +20027,15166125,"TERMINAL",0,0,"53:003:005",,terminal_output +20028,15166964,"TERMINAL",0,0,"6\t",,terminal_output +20029,15167162,"TERMINAL",0,0,"6116",,terminal_output +20030,15167955,"TERMINAL",0,0,"7\t",,terminal_output +20031,15168215,"TERMINAL",0,0,"7227",,terminal_output +20032,15169036,"TERMINAL",0,0,"8\t",,terminal_output +20033,15169244,"TERMINAL",0,0,"8338",,terminal_output +20034,15170166,"TERMINAL",0,0,"9\t",,terminal_output +20035,15170311,"TERMINAL",0,0,"95540",,terminal_output +20036,15171085,"TERMINAL",0,0,"9:00\t",,terminal_output +20037,15171392,"TERMINAL",0,0,"9:01661",,terminal_output +20038,15172215,"TERMINAL",0,0,"1\t",,terminal_output +20039,15172434,"TERMINAL",0,0,"2772",,terminal_output +20040,15173188,"TERMINAL",0,0,"2\t",,terminal_output +20041,15173438,"TERMINAL",0,0,"3883",,terminal_output +20042,15174200,"TERMINAL",0,0,"3\t",,terminal_output +20043,15174463,"TERMINAL",0,0,"4994",,terminal_output +20044,15175275,"TERMINAL",0,0,"4\t",,terminal_output +20045,15175507,"TERMINAL",0,0,"510105",,terminal_output +20046,15176318,"TERMINAL",0,0,"5\t",,terminal_output +20047,15176538,"TERMINAL",0,0,"6116",,terminal_output +20048,15177317,"TERMINAL",0,0,"7\t",,terminal_output +20049,15177592,"TERMINAL",0,0,"7227",,terminal_output +20050,15178465,"TERMINAL",0,0,"8\t",,terminal_output +20051,15178639,"TERMINAL",0,0,"8338",,terminal_output +20052,15179398,"TERMINAL",0,0,"9\t",,terminal_output +20053,15179731,"TERMINAL",0,0,"9449",,terminal_output +20054,15180433,"TERMINAL",0,0,"10\t",,terminal_output +20055,15180717,"TERMINAL",0,0,"105550",,terminal_output +20056,15181499,"TERMINAL",0,0,"1\t",,terminal_output +20057,15181786,"TERMINAL",0,0,"1661",,terminal_output +20058,15182504,"TERMINAL",0,0,"2\t",,terminal_output +20059,15182796,"TERMINAL",0,0,"2772",,terminal_output +20060,15183575,"TERMINAL",0,0,"3\t",,terminal_output +20061,15183884,"TERMINAL",0,0,"3883",,terminal_output +20062,15184586,"TERMINAL",0,0,"4\t",,terminal_output +20063,15186804,"TERMINAL",0,0,"45421706821216",,terminal_output +20064,15186804,"TERMINAL",0,0,"542",,terminal_output +20065,15187740,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +20066,15187876,"TERMINAL",0,0,"723456971227",,terminal_output +20067,15187877,"TERMINAL",0,0,"71",,terminal_output +20068,15188700,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +20069,15188903,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_173917-3469576\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-dyn-dev-3469576\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/3469576\r\n",,terminal_output +20070,15188904,"TERMINAL",0,0,"8338",,terminal_output +20071,15188915,"TERMINAL",0,0,"8\t",,terminal_output +20072,15189960,"TERMINAL",0,0,"9449",,terminal_output +20073,15189960,"TERMINAL",0,0,"9\t",,terminal_output +20074,15190975,"TERMINAL",0,0,"20552:00",,terminal_output +20075,15190976,"TERMINAL",0,0,"20\t",,terminal_output +20076,15192503,"TERMINAL",0,0,"1661",,terminal_output +20077,15192504,"TERMINAL",0,0,"1\t",,terminal_output +20078,15192664,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 17:39:22 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 14 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated: 32 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 1 nodes idle",,terminal_output +20079,15192664,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 17:39:22 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469572 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469573 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469574 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469575 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469569 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469570 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469571 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469568 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 20:23:27\t 1 hkn07363466287 accelerat train_la tum_cte0 R 20:23:27\t 1 hkn07363469576 dev_accel interact tum_cte0 R\t2:02\t 1 hkn0401\t",,terminal_output +20080,15193733,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 17:39:23 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 14 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated: 32 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 1 nodes idle",,terminal_output +20081,15193734,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 17:39:23 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469572 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469573 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469574 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469575 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469569 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469570 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469571 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469568 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 20:23:28\t 1 hkn07363466287 accelerat train_la tum_cte0 R 20:23:28\t 1 hkn07363469576 dev_accel interact tum_cte0 R\t2:03\t 1 hkn0401\t",,terminal_output +20082,15194293,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 17:39:23 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 14 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated: 32 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 1 nodes idle",,terminal_output +20083,15194293,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 17:39:23 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469572 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469573 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469574 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469575 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469569 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469570 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469571 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469568 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 20:23:29\t 1 hkn07363466287 accelerat train_la tum_cte0 R 20:23:29\t 1 hkn07363469576 dev_accel interact tum_cte0 R\t2:04\t 1 hkn0401\t",,terminal_output +20084,15194700,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 17:39:24 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 14 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated: 32 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 1 nodes idle",,terminal_output +20085,15194708,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 17:39:24 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469572 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469573 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469574 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469575 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469569 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469570 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469571 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469568 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 20:23:29\t 1 hkn07363466287 accelerat train_la tum_cte0 R 20:23:29\t 1 hkn07363469576 dev_accel interact tum_cte0 R\t2:04\t 1 hkn0401\t",,terminal_output +20086,15194994,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +20087,15195565,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1991.localdomain: Fri Sep 5 17:39:25 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3469572 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469573 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469574 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469575 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469569 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469570 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469571 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3469568 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3466286 accelerat train_to tum_cte0 R 20:23:30\t 1 hkn07363466287 accelerat train_la tum_cte0 R 20:23:30\t 1 hkn07363469576 dev_accel interact tum_cte0 R\t2:05\t 1 hkn0401\t",,terminal_output +20088,15195566,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Fri Sep 5 17:39:25 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 14 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated: 32 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 1 nodes idle",,terminal_output +20089,15196489,"TERMINAL",0,0,"watch",,terminal_focus +20090,15196580,"TERMINAL",0,0,"6\t",,terminal_output +20091,15196581,"TERMINAL",0,0,"6116",,terminal_output +20092,15197595,"TERMINAL",0,0,"7\t",,terminal_output +20093,15197598,"TERMINAL",0,0,"7227",,terminal_output +20094,15198732,"TERMINAL",0,0,"8\t",,terminal_output +20095,15198732,"TERMINAL",0,0,"8338",,terminal_output +20096,15199728,"TERMINAL",0,0,"9\t",,terminal_output +20097,15199728,"TERMINAL",0,0,"9449",,terminal_output +20098,15200716,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35115232, 'tokenizer': 33750256, 'total': 95421392}\r\nStarting training from step 0...\r\n",,terminal_output +20099,15200759,"TERMINAL",0,0,"30\t",,terminal_output +20100,15200760,"TERMINAL",0,0,"305510",,terminal_output +20101,15201759,"TERMINAL",0,0,"1\t",,terminal_output +20102,15201764,"TERMINAL",0,0,"1661",,terminal_output +20103,15202800,"TERMINAL",0,0,"2\t",,terminal_output +20104,15202841,"TERMINAL",0,0,"2772",,terminal_output +20105,15203851,"TERMINAL",0,0,"3\t",,terminal_output +20106,15203892,"TERMINAL",0,0,"3883",,terminal_output +20107,15204883,"TERMINAL",0,0,"4\t",,terminal_output +20108,15204890,"TERMINAL",0,0,"4994",,terminal_output +20109,15205926,"TERMINAL",0,0,"5\t",,terminal_output +20110,15205934,"TERMINAL",0,0,"540405",,terminal_output +20111,15206968,"TERMINAL",0,0,"6\t",,terminal_output +20112,15206975,"TERMINAL",0,0,"6116",,terminal_output +20113,15208049,"TERMINAL",0,0,"7\t",,terminal_output +20114,15208051,"TERMINAL",0,0,"7227",,terminal_output +20115,15209049,"TERMINAL",0,0,"8\t",,terminal_output +20116,15209093,"TERMINAL",0,0,"8338",,terminal_output +20117,15210097,"TERMINAL",0,0,"9\t",,terminal_output +20118,15210127,"TERMINAL",0,0,"9449",,terminal_output +20119,15211236,"TERMINAL",0,0,"40\t",,terminal_output +20120,15211236,"TERMINAL",0,0,"405520",,terminal_output +20121,15212258,"TERMINAL",0,0,"1\t",,terminal_output +20122,15212264,"TERMINAL",0,0,"1661",,terminal_output +20123,15213182,"TERMINAL",0,0,"2025-09-05 17:39:42.857154: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 17:39:42.857574: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 17:39:42.858131: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 17:39:42.858656: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 17:39:42.859902: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-09-05 17:39:42.861225: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +20124,15213214,"TERMINAL",0,0,"2772",,terminal_output +20125,15213214,"TERMINAL",0,0,"2\t",,terminal_output +20126,15214249,"TERMINAL",0,0,"3\t",,terminal_output +20127,15214295,"TERMINAL",0,0,"3883",,terminal_output +20128,15215325,"TERMINAL",0,0,"4\t",,terminal_output +20129,15215325,"TERMINAL",0,0,"450505",,terminal_output +20130,15216345,"TERMINAL",0,0,"6\t",,terminal_output +20131,15216345,"TERMINAL",0,0,"6116",,terminal_output +20132,15217471,"TERMINAL",0,0,"7\t",,terminal_output +20133,15217471,"TERMINAL",0,0,"7227",,terminal_output +20134,15218496,"TERMINAL",0,0,"8\t",,terminal_output +20135,15218497,"TERMINAL",0,0,"8338",,terminal_output +20136,15219437,"TERMINAL",0,0,"9\t",,terminal_output +20137,15219444,"TERMINAL",0,0,"9449",,terminal_output +20138,15220474,"TERMINAL",0,0,"50\t",,terminal_output +20139,15220494,"TERMINAL",0,0,"505530",,terminal_output +20140,15221566,"TERMINAL",0,0,"1\t",,terminal_output +20141,15221567,"TERMINAL",0,0,"1661",,terminal_output +20142,15222630,"TERMINAL",0,0,"2\t",,terminal_output +20143,15222631,"TERMINAL",0,0,"2772",,terminal_output +20144,15223614,"TERMINAL",0,0,"3\t",,terminal_output +20145,15223615,"TERMINAL",0,0,"3883",,terminal_output +20146,15224741,"TERMINAL",0,0,"40",,terminal_output +20147,15224741,"TERMINAL",0,0,"4994",,terminal_output +20148,15225777,"TERMINAL",0,0,"5\t",,terminal_output +20149,15225777,"TERMINAL",0,0,"54:004:005",,terminal_output +20150,15226788,"TERMINAL",0,0,"6\t",,terminal_output +20151,15226831,"TERMINAL",0,0,"6116",,terminal_output +20152,15227744,"TERMINAL",0,0,"7\t",,terminal_output +20153,15227764,"TERMINAL",0,0,"7227",,terminal_output +20154,15228846,"TERMINAL",0,0,"8338",,terminal_output +20155,15228847,"TERMINAL",0,0,"8\t",,terminal_output +20156,15229862,"TERMINAL",0,0,"9\t",,terminal_output +20157,15229881,"TERMINAL",0,0,"9449",,terminal_output +20158,15230898,"TERMINAL",0,0,"40:00\t",,terminal_output +20159,15230898,"TERMINAL",0,0,"40:005540",,terminal_output +20160,15231919,"TERMINAL",0,0,"11",,terminal_output +20161,15231920,"TERMINAL",0,0,"1661",,terminal_output +20162,15232947,"TERMINAL",0,0,"2\t",,terminal_output +20163,15232950,"TERMINAL",0,0,"2772",,terminal_output +20164,15234059,"TERMINAL",0,0,"3\t",,terminal_output +20165,15234059,"TERMINAL",0,0,"3883",,terminal_output +20166,15235018,"TERMINAL",0,0,"4\t",,terminal_output +20167,15235030,"TERMINAL",0,0,"4994",,terminal_output +20168,15236060,"TERMINAL",0,0,"5\t",,terminal_output +20169,15236064,"TERMINAL",0,0,"510105",,terminal_output +20170,15237098,"TERMINAL",0,0,"6\t",,terminal_output +20171,15237104,"TERMINAL",0,0,"6116",,terminal_output +20172,15238166,"TERMINAL",0,0,"7\t",,terminal_output +20173,15238167,"TERMINAL",0,0,"7227",,terminal_output +20174,15239282,"TERMINAL",0,0,"8\t",,terminal_output +20175,15239283,"TERMINAL",0,0,"8338",,terminal_output +20176,15240261,"TERMINAL",0,0,"9\t",,terminal_output +20177,15240306,"TERMINAL",0,0,"9449",,terminal_output +20178,15241332,"TERMINAL",0,0,"10\t",,terminal_output +20179,15241332,"TERMINAL",0,0,"105550",,terminal_output +20180,15242355,"TERMINAL",0,0,"1\t",,terminal_output +20181,15242355,"TERMINAL",0,0,"2772",,terminal_output +20182,15243378,"TERMINAL",0,0,"3\t",,terminal_output +20183,15243379,"TERMINAL",0,0,"3883",,terminal_output +20184,15244344,"TERMINAL",0,0,"4\t",,terminal_output +20185,15244402,"TERMINAL",0,0,"4994",,terminal_output +20186,15245379,"TERMINAL",0,0,"55",,terminal_output +20187,15245429,"TERMINAL",0,0,"520205",,terminal_output +20188,15246414,"TERMINAL",0,0,"6\t",,terminal_output +20189,15246467,"TERMINAL",0,0,"6116",,terminal_output +20190,15247473,"TERMINAL",0,0,"7\t",,terminal_output +20191,15247511,"TERMINAL",0,0,"7227",,terminal_output +20192,15248600,"TERMINAL",0,0,"8\t",,terminal_output +20193,15248600,"TERMINAL",0,0,"8338",,terminal_output +20194,15249535,"TERMINAL",0,0,"9\t",,terminal_output +20195,15249631,"TERMINAL",0,0,"9449",,terminal_output +20196,15250599,"TERMINAL",0,0,"20\t",,terminal_output +20197,15250632,"TERMINAL",0,0,"20553:00",,terminal_output +20198,15251604,"TERMINAL",0,0,"1\t",,terminal_output +20199,15251702,"TERMINAL",0,0,"1661",,terminal_output +20200,15252661,"TERMINAL",0,0,"2\t",,terminal_output +20201,15252713,"TERMINAL",0,0,"2772",,terminal_output +20202,15253694,"TERMINAL",0,0,"3\t",,terminal_output +20203,15253746,"TERMINAL",0,0,"3883",,terminal_output +20204,15254784,"TERMINAL",0,0,"4\t",,terminal_output +20205,15254790,"TERMINAL",0,0,"4994",,terminal_output +20206,15255874,"TERMINAL",0,0,"5\t",,terminal_output +20207,15255875,"TERMINAL",0,0,"530305",,terminal_output +20208,15256801,"TERMINAL",0,0,"66",,terminal_output +20209,15256867,"TERMINAL",0,0,"6116",,terminal_output +20210,15257825,"TERMINAL",0,0,"7\t",,terminal_output +20211,15257901,"TERMINAL",0,0,"7227",,terminal_output +20212,15258861,"TERMINAL",0,0,"8\t",,terminal_output +20213,15258989,"TERMINAL",0,0,"8338",,terminal_output +20214,15259910,"TERMINAL",0,0,"9\t",,terminal_output +20215,15260000,"TERMINAL",0,0,"9449",,terminal_output +20216,15260934,"TERMINAL",0,0,"30\t",,terminal_output +20217,15261029,"TERMINAL",0,0,"305510",,terminal_output +20218,15262007,"TERMINAL",0,0,"1\t",,terminal_output +20219,15262129,"TERMINAL",0,0,"1661",,terminal_output +20220,15263111,"TERMINAL",0,0,"2\t",,terminal_output +20221,15263126,"TERMINAL",0,0,"2772",,terminal_output +20222,15264064,"TERMINAL",0,0,"3\t",,terminal_output +20223,15264160,"TERMINAL",0,0,"3883",,terminal_output +20224,15265093,"TERMINAL",0,0,"4\t",,terminal_output +20225,15265241,"TERMINAL",0,0,"4994",,terminal_output +20226,15266131,"TERMINAL",0,0,"5\t",,terminal_output +20227,15266236,"TERMINAL",0,0,"540405",,terminal_output +20228,15267247,"TERMINAL",0,0,"6\t",,terminal_output +20229,15267282,"TERMINAL",0,0,"6227",,terminal_output +20230,15268209,"TERMINAL",0,0,"7\t",,terminal_output +20231,15268309,"TERMINAL",0,0,"8338",,terminal_output +20232,15269286,"TERMINAL",0,0,"8\t",,terminal_output +20233,15269378,"TERMINAL",0,0,"9449",,terminal_output +20234,15270309,"TERMINAL",0,0,"9\t",,terminal_output +20235,15270407,"TERMINAL",0,0,"405520",,terminal_output +20236,15271318,"TERMINAL",0,0,"41\t",,terminal_output +20237,15271456,"TERMINAL",0,0,"1661",,terminal_output +20238,15272461,"TERMINAL",0,0,"2\t",,terminal_output +20239,15272486,"TERMINAL",0,0,"2772",,terminal_output +20240,15273485,"TERMINAL",0,0,"3\t",,terminal_output +20241,15273529,"TERMINAL",0,0,"3883",,terminal_output +20242,15274435,"TERMINAL",0,0,"4\t",,terminal_output +20243,15274565,"TERMINAL",0,0,"4994",,terminal_output +20244,15275564,"TERMINAL",0,0,"5\t",,terminal_output +20245,15275611,"TERMINAL",0,0,"550505",,terminal_output +20246,15276557,"TERMINAL",0,0,"6\t",,terminal_output +20247,15276673,"TERMINAL",0,0,"6116",,terminal_output +20248,15277558,"TERMINAL",0,0,"7\t",,terminal_output +20249,15277686,"TERMINAL",0,0,"7227",,terminal_output +20250,15278706,"TERMINAL",0,0,"8\t",,terminal_output +20251,15278734,"TERMINAL",0,0,"8338",,terminal_output +20252,15279735,"TERMINAL",0,0,"9\t",,terminal_output +20253,15279777,"TERMINAL",0,0,"9449",,terminal_output +20254,15280100,"TERMINAL",0,0,"Step 0, loss: 13.707112312316895\r\nStep 1, loss: 12.787692070007324\r\nStep 2, loss: 14.445314407348633\r\nStep 3, loss: 6.839857578277588\r\nStep 4, loss: 5.966392993927002\r\nStep 5, loss: 9.156126022338867\r\nStep 6, loss: 8.733874320983887\r\nStep 7, loss: 7.992668151855469\r\nStep 8, loss: 5.01971960067749\r\nStep 9, loss: 4.375117778778076\r\nCalculating validation metrics...\r\n",,terminal_output +20255,15280697,"TERMINAL",0,0,"50\t",,terminal_output +20256,15280833,"TERMINAL",0,0,"505530",,terminal_output +20257,15281778,"TERMINAL",0,0,"1\t",,terminal_output +20258,15281875,"TERMINAL",0,0,"1661",,terminal_output +20259,15282747,"TERMINAL",0,0,"2\t",,terminal_output +20260,15282914,"TERMINAL",0,0,"2772",,terminal_output +20261,15283838,"TERMINAL",0,0,"3\t",,terminal_output +20262,15283928,"TERMINAL",0,0,"3883",,terminal_output +20263,15284277,"TERMINAL",0,0,"srun",,terminal_focus +20264,15284917,"TERMINAL",0,0,"40",,terminal_output +20265,15284970,"TERMINAL",0,0,"4994",,terminal_output +20266,15285865,"TERMINAL",0,0,"5\t",,terminal_output +20267,15286006,"TERMINAL",0,0,"55:005:005",,terminal_output +20268,15286908,"TERMINAL",0,0,"6\t",,terminal_output +20269,15287078,"TERMINAL",0,0,"6116",,terminal_output +20270,15287945,"TERMINAL",0,0,"7\t",,terminal_output +20271,15288084,"TERMINAL",0,0,"7227",,terminal_output +20272,15288981,"TERMINAL",0,0,"8\t",,terminal_output +20273,15289134,"TERMINAL",0,0,"8338",,terminal_output +20274,15290015,"TERMINAL",0,0,"9\t",,terminal_output +20275,15290180,"TERMINAL",0,0,"9449",,terminal_output +20276,15291051,"TERMINAL",0,0,"1:00\t",,terminal_output +20277,15291210,"TERMINAL",0,0,"1:005540",,terminal_output +20278,15292120,"TERMINAL",0,0,"17",,terminal_output +20279,15292264,"TERMINAL",0,0,"1661",,terminal_output +20280,15293152,"TERMINAL",0,0,"2\t",,terminal_output +20281,15293296,"TERMINAL",0,0,"2883",,terminal_output +20282,15294271,"TERMINAL",0,0,"3\t",,terminal_output +20283,15294329,"TERMINAL",0,0,"4994",,terminal_output +20284,15295197,"TERMINAL",0,0,"4\t",,terminal_output +20285,15295368,"TERMINAL",0,0,"510105",,terminal_output +20286,15296230,"TERMINAL",0,0,"5\t",,terminal_output +20287,15296406,"TERMINAL",0,0,"6116",,terminal_output +20288,15297273,"TERMINAL",0,0,"6\t",,terminal_output +20289,15297445,"TERMINAL",0,0,"7227",,terminal_output +20290,15298372,"TERMINAL",0,0,"8\t",,terminal_output +20291,15298510,"TERMINAL",0,0,"8338",,terminal_output +20292,15299404,"TERMINAL",0,0,"98",,terminal_output +20293,15299543,"TERMINAL",0,0,"9449",,terminal_output +20294,15300385,"TERMINAL",0,0,"10\t",,terminal_output +20295,15300567,"TERMINAL",0,0,"105550",,terminal_output +20296,15301547,"TERMINAL",0,0,"1\t",,terminal_output +20297,15301607,"TERMINAL",0,0,"1661",,terminal_output +20298,15302573,"TERMINAL",0,0,"2\t",,terminal_output +20299,15302673,"TERMINAL",0,0,"2772",,terminal_output +20300,15303591,"TERMINAL",0,0,"3\t",,terminal_output +20301,15303692,"TERMINAL",0,0,"3883",,terminal_output +20302,15304532,"TERMINAL",0,0,"4\t",,terminal_output +20303,15304730,"TERMINAL",0,0,"4994",,terminal_output +20304,15305648,"TERMINAL",0,0,"5\t",,terminal_output +20305,15305831,"TERMINAL",0,0,"520205",,terminal_output +20306,15306662,"TERMINAL",0,0,"6\t",,terminal_output +20307,15306836,"TERMINAL",0,0,"6116",,terminal_output +20308,15307697,"TERMINAL",0,0,"7\t",,terminal_output +20309,15307860,"TERMINAL",0,0,"7227",,terminal_output +20310,15309734,"TERMINAL",0,0,"8449",,terminal_output +20311,15309744,"TERMINAL",0,0,"85",,terminal_output +20312,15310865,"TERMINAL",0,0,"20554:00",,terminal_output +20313,15310867,"TERMINAL",0,0,"20\t",,terminal_output +20314,15311902,"TERMINAL",0,0,"1661",,terminal_output +20315,15311902,"TERMINAL",0,0,"1\t",,terminal_output +20316,15312885,"TERMINAL",0,0,"2772",,terminal_output +20317,15312885,"TERMINAL",0,0,"2\t",,terminal_output +20318,15313895,"TERMINAL",0,0,"3883",,terminal_output +20319,15313923,"TERMINAL",0,0,"3\t",,terminal_output +20320,15314927,"TERMINAL",0,0,"4\t",,terminal_output +20321,15314930,"TERMINAL",0,0,"4994",,terminal_output +20322,15315969,"TERMINAL",0,0,"5\t",,terminal_output +20323,15316001,"TERMINAL",0,0,"530305",,terminal_output +20324,15316995,"TERMINAL",0,0,"6\t",,terminal_output +20325,15317003,"TERMINAL",0,0,"6116",,terminal_output +20326,15318029,"TERMINAL",0,0,"7\t",,terminal_output +20327,15318044,"TERMINAL",0,0,"7227",,terminal_output +20328,15319063,"TERMINAL",0,0,"8\t",,terminal_output +20329,15319081,"TERMINAL",0,0,"8338",,terminal_output +20330,15320178,"TERMINAL",0,0,"9\t",,terminal_output +20331,15320179,"TERMINAL",0,0,"9449",,terminal_output +20332,15321205,"TERMINAL",0,0,"30\t",,terminal_output +20333,15321205,"TERMINAL",0,0,"305510",,terminal_output +20334,15322235,"TERMINAL",0,0,"1\t",,terminal_output +20335,15322235,"TERMINAL",0,0,"1661",,terminal_output +20336,15323255,"TERMINAL",0,0,"2\t",,terminal_output +20337,15323260,"TERMINAL",0,0,"2772",,terminal_output +20338,15324278,"TERMINAL",0,0,"3\t",,terminal_output +20339,15324284,"TERMINAL",0,0,"3994",,terminal_output +20340,15325316,"TERMINAL",0,0,"46",,terminal_output +20341,15325327,"TERMINAL",0,0,"540405",,terminal_output +20342,15326375,"TERMINAL",0,0,"6\t",,terminal_output +20343,15326382,"TERMINAL",0,0,"6116",,terminal_output +20344,15327466,"TERMINAL",0,0,"7\t",,terminal_output +20345,15327485,"TERMINAL",0,0,"7227",,terminal_output +20346,15328429,"TERMINAL",0,0,"8\t",,terminal_output +20347,15328464,"TERMINAL",0,0,"8338",,terminal_output +20348,15329497,"TERMINAL",0,0,"9\t",,terminal_output +20349,15329533,"TERMINAL",0,0,"9449",,terminal_output +20350,15330645,"TERMINAL",0,0,"40\t",,terminal_output +20351,15330645,"TERMINAL",0,0,"405520",,terminal_output +20352,15331656,"TERMINAL",0,0,"1\t",,terminal_output +20353,15331656,"TERMINAL",0,0,"1661",,terminal_output +20354,15332681,"TERMINAL",0,0,"2\t",,terminal_output +20355,15332682,"TERMINAL",0,0,"2772",,terminal_output +20356,15333706,"TERMINAL",0,0,"3\t",,terminal_output +20357,15333708,"TERMINAL",0,0,"3883",,terminal_output +20358,15334721,"TERMINAL",0,0,"4\t",,terminal_output +20359,15334735,"TERMINAL",0,0,"4994",,terminal_output +20360,15335953,"TERMINAL",0,0,"550505",,terminal_output +20361,15335954,"TERMINAL",0,0,"5\t",,terminal_output +20362,15336953,"TERMINAL",0,0,"6\t",,terminal_output +20363,15336962,"TERMINAL",0,0,"6116",,terminal_output +20364,15337997,"TERMINAL",0,0,"7\t",,terminal_output +20365,15338012,"TERMINAL",0,0,"7227",,terminal_output +20366,15339031,"TERMINAL",0,0,"8\t",,terminal_output +20367,15339041,"TERMINAL",0,0,"8338",,terminal_output +20368,15340075,"TERMINAL",0,0,"9\t",,terminal_output +20369,15340085,"TERMINAL",0,0,"9449",,terminal_output +20370,15341120,"TERMINAL",0,0,"50\t",,terminal_output +20371,15341130,"TERMINAL",0,0,"505530",,terminal_output +20372,15342203,"TERMINAL",0,0,"1\t",,terminal_output +20373,15342206,"TERMINAL",0,0,"1661",,terminal_output +20374,15343227,"TERMINAL",0,0,"2\t",,terminal_output +20375,15343227,"TERMINAL",0,0,"2772",,terminal_output +20376,15344249,"TERMINAL",0,0,"37",,terminal_output +20377,15344271,"TERMINAL",0,0,"3883",,terminal_output +20378,15345369,"TERMINAL",0,0,"45",,terminal_output +20379,15345370,"TERMINAL",0,0,"46:006:005",,terminal_output +20380,15346393,"TERMINAL",0,0,"6\t",,terminal_output +20381,15346393,"TERMINAL",0,0,"6116",,terminal_output +20382,15347359,"TERMINAL",0,0,"7\t",,terminal_output +20383,15347388,"TERMINAL",0,0,"7227",,terminal_output +20384,15348401,"TERMINAL",0,0,"8\t",,terminal_output +20385,15348430,"TERMINAL",0,0,"8338",,terminal_output +20386,15349437,"TERMINAL",0,0,"9\t",,terminal_output +20387,15349469,"TERMINAL",0,0,"9449",,terminal_output +20388,15350477,"TERMINAL",0,0,"2:00\t",,terminal_output +20389,15350516,"TERMINAL",0,0,"2:005540",,terminal_output +20390,15351515,"TERMINAL",0,0,"1\t",,terminal_output +20391,15351558,"TERMINAL",0,0,"1661",,terminal_output +20392,15352556,"TERMINAL",0,0,"28",,terminal_output +20393,15352596,"TERMINAL",0,0,"2772",,terminal_output +20394,15353678,"TERMINAL",0,0,"3\t",,terminal_output +20395,15353678,"TERMINAL",0,0,"3883",,terminal_output +20396,15354697,"TERMINAL",0,0,"4\t",,terminal_output +20397,15354697,"TERMINAL",0,0,"4994",,terminal_output +20398,15355672,"TERMINAL",0,0,"5\t",,terminal_output +20399,15355774,"TERMINAL",0,0,"510105",,terminal_output +20400,15356743,"TERMINAL",0,0,"6\t",,terminal_output +20401,15356799,"TERMINAL",0,0,"6116",,terminal_output +20402,15357748,"TERMINAL",0,0,"7\t",,terminal_output +20403,15357818,"TERMINAL",0,0,"7227",,terminal_output +20404,15358790,"TERMINAL",0,0,"8\t",,terminal_output +20405,15358892,"TERMINAL",0,0,"8338",,terminal_output +20406,15359916,"TERMINAL",0,0,"9\t",,terminal_output +20407,15359918,"TERMINAL",0,0,"9449",,terminal_output +20408,15360867,"TERMINAL",0,0,"10\t",,terminal_output +20409,15360943,"TERMINAL",0,0,"105550",,terminal_output +20410,15361906,"TERMINAL",0,0,"1\t",,terminal_output +20411,15362021,"TERMINAL",0,0,"1661",,terminal_output +20412,15362954,"TERMINAL",0,0,"2\t",,terminal_output +20413,15363029,"TERMINAL",0,0,"2772",,terminal_output +20414,15363983,"TERMINAL",0,0,"3\t",,terminal_output +20415,15364077,"TERMINAL",0,0,"3883",,terminal_output +20416,15365024,"TERMINAL",0,0,"4\t",,terminal_output +20417,15365120,"TERMINAL",0,0,"4994",,terminal_output +20418,15366061,"TERMINAL",0,0,"5\t",,terminal_output +20419,15366205,"TERMINAL",0,0,"520205",,terminal_output +20420,15367192,"TERMINAL",0,0,"6\t",,terminal_output +20421,15367226,"TERMINAL",0,0,"6116",,terminal_output +20422,15368223,"TERMINAL",0,0,"7\t",,terminal_output +20423,15368255,"TERMINAL",0,0,"7227",,terminal_output +20424,15369381,"TERMINAL",0,0,"8449",,terminal_output +20425,15369382,"TERMINAL",0,0,"8\t",,terminal_output +20426,15370392,"TERMINAL",0,0,"20555:00",,terminal_output +20427,15370406,"TERMINAL",0,0,"20\t",,terminal_output +20428,15371490,"TERMINAL",0,0,"1661",,terminal_output +20429,15371491,"TERMINAL",0,0,"1\t",,terminal_output +20430,15372512,"TERMINAL",0,0,"2\t",,terminal_output +20431,15372512,"TERMINAL",0,0,"2772",,terminal_output +20432,15373514,"TERMINAL",0,0,"3\t",,terminal_output +20433,15373529,"TERMINAL",0,0,"3883",,terminal_output +20434,15374670,"TERMINAL",0,0,"4\t",,terminal_output +20435,15374670,"TERMINAL",0,0,"4994",,terminal_output +20436,15375681,"TERMINAL",0,0,"59",,terminal_output +20437,15375681,"TERMINAL",0,0,"530305",,terminal_output +20438,15376646,"TERMINAL",0,0,"6\t",,terminal_output +20439,15376647,"TERMINAL",0,0,"6116",,terminal_output +20440,15377670,"TERMINAL",0,0,"7\t",,terminal_output +20441,15377699,"TERMINAL",0,0,"7227",,terminal_output +20442,15378752,"TERMINAL",0,0,"8\t",,terminal_output +20443,15378752,"TERMINAL",0,0,"8338",,terminal_output +20444,15379748,"TERMINAL",0,0,"9\t",,terminal_output +20445,15379784,"TERMINAL",0,0,"9449",,terminal_output +20446,15380909,"TERMINAL",0,0,"30\t",,terminal_output +20447,15380947,"TERMINAL",0,0,"305510",,terminal_output +20448,15381851,"TERMINAL",0,0,"120",,terminal_output +20449,15381905,"TERMINAL",0,0,"1661",,terminal_output +20450,15382878,"TERMINAL",0,0,"2\t",,terminal_output +20451,15382924,"TERMINAL",0,0,"2772",,terminal_output +20452,15384082,"TERMINAL",0,0,"3\t",,terminal_output +20453,15384083,"TERMINAL",0,0,"3883",,terminal_output +20454,15384949,"TERMINAL",0,0,"4\t",,terminal_output +20455,15384985,"TERMINAL",0,0,"4994",,terminal_output +20456,15385967,"TERMINAL",0,0,"5\t",,terminal_output +20457,15386006,"TERMINAL",0,0,"540405",,terminal_output +20458,15387004,"TERMINAL",0,0,"6\t",,terminal_output +20459,15387047,"TERMINAL",0,0,"6116",,terminal_output +20460,15388038,"TERMINAL",0,0,"71",,terminal_output +20461,15388080,"TERMINAL",0,0,"7227",,terminal_output +20462,15389205,"TERMINAL",0,0,"82",,terminal_output +20463,15389206,"TERMINAL",0,0,"8338",,terminal_output +20464,15390231,"TERMINAL",0,0,"9\t",,terminal_output +20465,15390246,"TERMINAL",0,0,"9449",,terminal_output +20466,15391253,"TERMINAL",0,0,"40\t",,terminal_output +20467,15391254,"TERMINAL",0,0,"405520",,terminal_output +20468,15392269,"TERMINAL",0,0,"1\t",,terminal_output +20469,15392269,"TERMINAL",0,0,"1661",,terminal_output +20470,15393298,"TERMINAL",0,0,"2\t",,terminal_output +20471,15393298,"TERMINAL",0,0,"2883",,terminal_output +20472,15394317,"TERMINAL",0,0,"3\t",,terminal_output +20473,15394331,"TERMINAL",0,0,"4994",,terminal_output +20474,15395347,"TERMINAL",0,0,"4\t",,terminal_output +20475,15395376,"TERMINAL",0,0,"550505",,terminal_output +20476,15396379,"TERMINAL",0,0,"6\t",,terminal_output +20477,15396404,"TERMINAL",0,0,"6116",,terminal_output +20478,15397396,"TERMINAL",0,0,"7\t",,terminal_output +20479,15397448,"TERMINAL",0,0,"7227",,terminal_output +20480,15398401,"TERMINAL",0,0,"8\t",,terminal_output +20481,15398517,"TERMINAL",0,0,"8338",,terminal_output +20482,15399437,"TERMINAL",0,0,"9\t",,terminal_output +20483,15399524,"TERMINAL",0,0,"9449",,terminal_output +20484,15400483,"TERMINAL",0,0,"501",,terminal_output +20485,15400574,"TERMINAL",0,0,"505530",,terminal_output +20486,15401520,"TERMINAL",0,0,"1\t",,terminal_output +20487,15401597,"TERMINAL",0,0,"1661",,terminal_output +20488,15402572,"TERMINAL",0,0,"2\t",,terminal_output +20489,15402651,"TERMINAL",0,0,"2772",,terminal_output +20490,15403642,"TERMINAL",0,0,"3\t",,terminal_output +20491,15403680,"TERMINAL",0,0,"3883",,terminal_output +20492,15404742,"TERMINAL",0,0,"4\t",,terminal_output +20493,15404742,"TERMINAL",0,0,"4994",,terminal_output +20494,15405669,"TERMINAL",0,0,"5\t",,terminal_output +20495,15405759,"TERMINAL",0,0,"57:007:005",,terminal_output +20496,15406809,"TERMINAL",0,0,"6\t",,terminal_output +20497,15406814,"TERMINAL",0,0,"6116",,terminal_output +20498,15407735,"TERMINAL",0,0,"7\t",,terminal_output +20499,15407876,"TERMINAL",0,0,"7227",,terminal_output +20500,15408863,"TERMINAL",0,0,"8\t",,terminal_output +20501,15408884,"TERMINAL",0,0,"8338",,terminal_output +20502,15409892,"TERMINAL",0,0,"9\t",,terminal_output +20503,15409912,"TERMINAL",0,0,"9449",,terminal_output +20504,15410941,"TERMINAL",0,0,"3:00\t",,terminal_output +20505,15410950,"TERMINAL",0,0,"3:005540",,terminal_output +20506,15411919,"TERMINAL",0,0,"1\t",,terminal_output +20507,15411992,"TERMINAL",0,0,"1661",,terminal_output +20508,15412929,"TERMINAL",0,0,"2\t",,terminal_output +20509,15413030,"TERMINAL",0,0,"2772",,terminal_output +20510,15413990,"TERMINAL",0,0,"3\t",,terminal_output +20511,15414125,"TERMINAL",0,0,"3883",,terminal_output +20512,15415274,"TERMINAL",0,0,"4\t",,terminal_output +20513,15415275,"TERMINAL",0,0,"4994",,terminal_output +20514,15416142,"TERMINAL",0,0,"5\t",,terminal_output +20515,15416160,"TERMINAL",0,0,"510105",,terminal_output +20516,15417086,"TERMINAL",0,0,"6\t",,terminal_output +20517,15417224,"TERMINAL",0,0,"6116",,terminal_output +20518,15418140,"TERMINAL",0,0,"7\t",,terminal_output +20519,15418278,"TERMINAL",0,0,"7227",,terminal_output +20520,15419208,"TERMINAL",0,0,"8\t",,terminal_output +20521,15419347,"TERMINAL",0,0,"8449",,terminal_output +20522,15420326,"TERMINAL",0,0,"9\t",,terminal_output +20523,15420365,"TERMINAL",0,0,"105550",,terminal_output +20524,15421362,"TERMINAL",0,0,"10\t",,terminal_output +20525,15421399,"TERMINAL",0,0,"1661",,terminal_output +20526,15422376,"TERMINAL",0,0,"1\t",,terminal_output +20527,15422477,"TERMINAL",0,0,"2772",,terminal_output +20528,15423400,"TERMINAL",0,0,"3\t",,terminal_output +20529,15423489,"TERMINAL",0,0,"3883",,terminal_output +20530,15424386,"TERMINAL",0,0,"4\t",,terminal_output +20531,15424537,"TERMINAL",0,0,"4994",,terminal_output +20532,15425420,"TERMINAL",0,0,"5\t",,terminal_output +20533,15425581,"TERMINAL",0,0,"520205",,terminal_output +20534,15426475,"TERMINAL",0,0,"6\t",,terminal_output +20535,15426642,"TERMINAL",0,0,"6116",,terminal_output +20536,15427603,"TERMINAL",0,0,"7\t",,terminal_output +20537,15427702,"TERMINAL",0,0,"7227",,terminal_output +20538,15428537,"TERMINAL",0,0,"8\t",,terminal_output +20539,15428728,"TERMINAL",0,0,"8338",,terminal_output +20540,15429685,"TERMINAL",0,0,"91",,terminal_output +20541,15431348,"TERMINAL",0,0,"9666:01",,terminal_output +20542,15431367,"TERMINAL",0,0,"20140",,terminal_output +20543,15432410,"TERMINAL",0,0,"22772",,terminal_output +20544,15432410,"TERMINAL",0,0,"2\t",,terminal_output +20545,15433543,"TERMINAL",0,0,"3883",,terminal_output +20546,15433544,"TERMINAL",0,0,"31",,terminal_output +20547,15434467,"TERMINAL",0,0,"4994",,terminal_output +20548,15434488,"TERMINAL",0,0,"4\t",,terminal_output +20549,15435506,"TERMINAL",0,0,"530305",,terminal_output +20550,15435518,"TERMINAL",0,0,"5\t",,terminal_output +20551,15436538,"TERMINAL",0,0,"6116",,terminal_output +20552,15436560,"TERMINAL",0,0,"6\t",,terminal_output +20553,15437574,"TERMINAL",0,0,"7227",,terminal_output +20554,15437598,"TERMINAL",0,0,"7\t",,terminal_output +20555,15438648,"TERMINAL",0,0,"8338",,terminal_output +20556,15438656,"TERMINAL",0,0,"8\t",,terminal_output +20557,15439731,"TERMINAL",0,0,"9449",,terminal_output +20558,15439731,"TERMINAL",0,0,"9\t",,terminal_output +20559,15440709,"TERMINAL",0,0,"305510",,terminal_output +20560,15440709,"TERMINAL",0,0,"3050",,terminal_output +20561,15441882,"TERMINAL",0,0,"1661",,terminal_output +20562,15441882,"TERMINAL",0,0,"1\t",,terminal_output +20563,15442763,"TERMINAL",0,0,"2772",,terminal_output +20564,15442772,"TERMINAL",0,0,"2\t",,terminal_output +20565,15443884,"TERMINAL",0,0,"3883",,terminal_output +20566,15443896,"TERMINAL",0,0,"3\t",,terminal_output +20567,15444858,"TERMINAL",0,0,"4994",,terminal_output +20568,15444908,"TERMINAL",0,0,"4\t",,terminal_output +20569,15445870,"TERMINAL",0,0,"540405",,terminal_output +20570,15445879,"TERMINAL",0,0,"5\t",,terminal_output +20571,15446953,"TERMINAL",0,0,"6116",,terminal_output +20572,15446953,"TERMINAL",0,0,"6\t",,terminal_output +20573,15447950,"TERMINAL",0,0,"7\t",,terminal_output +20574,15447951,"TERMINAL",0,0,"7227",,terminal_output +20575,15449032,"TERMINAL",0,0,"8\t",,terminal_output +20576,15449038,"TERMINAL",0,0,"8338",,terminal_output +20577,15450023,"TERMINAL",0,0,"9\t",,terminal_output +20578,15450061,"TERMINAL",0,0,"9449",,terminal_output +20579,15450306,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +20580,15451102,"TERMINAL",0,0,"40\t",,terminal_output +20581,15451103,"TERMINAL",0,0,"405520",,terminal_output +20582,15451390,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1892,0,"",shellscript,selection_mouse +20583,15452100,"TERMINAL",0,0,"1\t",,terminal_output +20584,15452180,"TERMINAL",0,0,"1661",,terminal_output +20585,15452451,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1800,0,"",shellscript,selection_mouse +20586,15453157,"TERMINAL",0,0,"2\t",,terminal_output +20587,15453189,"TERMINAL",0,0,"2772",,terminal_output +20588,15453408,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1800,1,"",shellscript,content +20589,15453489,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1800,0,"3",shellscript,content +20590,15453490,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1801,0,"",shellscript,selection_keyboard +20591,15454183,"TERMINAL",0,0,"3\t",,terminal_output +20592,15454223,"TERMINAL",0,0,"3883",,terminal_output +20593,15455216,"TERMINAL",0,0,"4\t",,terminal_output +20594,15455254,"TERMINAL",0,0,"4994",,terminal_output +20595,15456251,"TERMINAL",0,0,"5\t",,terminal_output +20596,15456289,"TERMINAL",0,0,"551516",,terminal_output +20597,15457396,"TERMINAL",0,0,"6\t",,terminal_output +20598,15457396,"TERMINAL",0,0,"7227",,terminal_output +20599,15458373,"TERMINAL",0,0,"8\t",,terminal_output +20600,15458374,"TERMINAL",0,0,"8338",,terminal_output +20601,15459362,"TERMINAL",0,0,"9\t",,terminal_output +20602,15459399,"TERMINAL",0,0,"9449",,terminal_output +20603,15460427,"TERMINAL",0,0,"50\t",,terminal_output +20604,15460535,"TERMINAL",0,0,"505530",,terminal_output +20605,15461530,"TERMINAL",0,0,"1\t",,terminal_output +20606,15461532,"TERMINAL",0,0,"1661",,terminal_output +20607,15462520,"TERMINAL",0,0,"2\t",,terminal_output +20608,15462521,"TERMINAL",0,0,"2772",,terminal_output +20609,15463550,"TERMINAL",0,0,"3\t",,terminal_output +20610,15463566,"TERMINAL",0,0,"3883",,terminal_output +20611,15464772,"TERMINAL",0,0,"4994",,terminal_output +20612,15464773,"TERMINAL",0,0,"400",,terminal_output +20613,15465792,"TERMINAL",0,0,"5\t",,terminal_output +20614,15465793,"TERMINAL",0,0,"58:008:005",,terminal_output +20615,15466816,"TERMINAL",0,0,"6\t",,terminal_output +20616,15466817,"TERMINAL",0,0,"6116",,terminal_output +20617,15466902,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1925,0,"",shellscript,selection_mouse +20618,15467240,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1925,0,"\n ",shellscript,content +20619,15467815,"TERMINAL",0,0,"7\t",,terminal_output +20620,15467914,"TERMINAL",0,0,"7227",,terminal_output +20621,15468482,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1930,0,"-",shellscript,content +20622,15468484,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1931,0,"",shellscript,selection_keyboard +20623,15468747,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1931,0,"-",shellscript,content +20624,15468747,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1932,0,"",shellscript,selection_keyboard +20625,15468884,"TERMINAL",0,0,"8\t",,terminal_output +20626,15468893,"TERMINAL",0,0,"8338",,terminal_output +20627,15469030,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1932,0,"r",shellscript,content +20628,15469030,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1933,0,"",shellscript,selection_keyboard +20629,15469184,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1933,0,"e",shellscript,content +20630,15469184,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1934,0,"",shellscript,selection_keyboard +20631,15469392,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1934,0,"s",shellscript,content +20632,15469393,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1935,0,"",shellscript,selection_keyboard +20633,15469684,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1935,0,"t",shellscript,content +20634,15469685,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1936,0,"",shellscript,selection_keyboard +20635,15469799,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1936,0,"o",shellscript,content +20636,15469799,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1937,0,"",shellscript,selection_keyboard +20637,15469917,"TERMINAL",0,0,"9\t",,terminal_output +20638,15469937,"TERMINAL",0,0,"9449",,terminal_output +20639,15469969,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1937,0,"r",shellscript,content +20640,15469970,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1938,0,"",shellscript,selection_keyboard +20641,15470163,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1938,0,"e",shellscript,content +20642,15470164,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1939,0,"",shellscript,selection_keyboard +20643,15470446,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1939,0,"_",shellscript,content +20644,15470446,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1940,0,"",shellscript,selection_keyboard +20645,15470932,"TERMINAL",0,0,"4:00\t",,terminal_output +20646,15470950,"TERMINAL",0,0,"4:005540",,terminal_output +20647,15471337,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1940,0,"c",shellscript,content +20648,15471337,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1941,0,"",shellscript,selection_keyboard +20649,15471960,"TERMINAL",0,0,"1\t",,terminal_output +20650,15471981,"TERMINAL",0,0,"1661",,terminal_output +20651,15472244,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1941,0,"k",shellscript,content +20652,15472245,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1942,0,"",shellscript,selection_keyboard +20653,15472403,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1942,0,"p",shellscript,content +20654,15472403,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1943,0,"",shellscript,selection_keyboard +20655,15472501,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1943,0,"t",shellscript,content +20656,15472503,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1944,0,"",shellscript,selection_keyboard +20657,15472847,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1944,0," ",shellscript,content +20658,15472848,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1945,0,"",shellscript,selection_keyboard +20659,15473001,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1945,0,"\",shellscript,content +20660,15473001,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1946,0,"",shellscript,selection_keyboard +20661,15473042,"TERMINAL",0,0,"2\t",,terminal_output +20662,15473082,"TERMINAL",0,0,"2772",,terminal_output +20663,15473299,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1945,0,"",shellscript,selection_command +20664,15474036,"TERMINAL",0,0,"3\t",,terminal_output +20665,15474070,"TERMINAL",0,0,"3883",,terminal_output +20666,15475080,"TERMINAL",0,0,"4\t",,terminal_output +20667,15475122,"TERMINAL",0,0,"4994",,terminal_output +20668,15476125,"TERMINAL",0,0,"5\t",,terminal_output +20669,15476152,"TERMINAL",0,0,"510105",,terminal_output +20670,15477192,"TERMINAL",0,0,"6\t",,terminal_output +20671,15477192,"TERMINAL",0,0,"6116",,terminal_output +20672,15478215,"TERMINAL",0,0,"7\t",,terminal_output +20673,15478248,"TERMINAL",0,0,"7227",,terminal_output +20674,15479245,"TERMINAL",0,0,"8\t",,terminal_output +20675,15479287,"TERMINAL",0,0,"8338",,terminal_output +20676,15480265,"TERMINAL",0,0,"9\t",,terminal_output +20677,15480303,"TERMINAL",0,0,"95550",,terminal_output +20678,15481295,"TERMINAL",0,0,"10\t",,terminal_output +20679,15481337,"TERMINAL",0,0,"11661",,terminal_output +20680,15482393,"TERMINAL",0,0,"2\t",,terminal_output +20681,15482393,"TERMINAL",0,0,"2772",,terminal_output +20682,15483417,"TERMINAL",0,0,"3\t",,terminal_output +20683,15483432,"TERMINAL",0,0,"3883",,terminal_output +20684,15484424,"TERMINAL",0,0,"4\t",,terminal_output +20685,15484459,"TERMINAL",0,0,"4994",,terminal_output +20686,15485459,"TERMINAL",0,0,"5\t",,terminal_output +20687,15485498,"TERMINAL",0,0,"520205",,terminal_output +20688,15486587,"TERMINAL",0,0,"6\t",,terminal_output +20689,15486587,"TERMINAL",0,0,"6116",,terminal_output +20690,15487086,"TERMINAL",0,0,"Step 10, validation loss: 3.863799810409546\r\nStep 10, loss: 5.617998123168945\r\nStep 11, loss: 3.658097267150879\r\nStep 12, loss: 3.90524959564209\r\nStep 13, loss: 3.4213035106658936\r\nStep 14, loss: 3.9468839168548584\r\nStep 15, loss: 5.512921333312988\r\nStep 16, loss: 3.4130032062530518\r\nStep 17, loss: 3.3123934268951416\r\nStep 18, loss: 4.716584205627441\r\nStep 19, loss: 4.0172529220581055\r\nCalculating validation metrics...\r\nStep 20, validation loss: 2.7067415714263916\r\nStep 20, loss: 3.0353360176086426\r\nStep 21, loss: 2.1936519145965576\r\nStep 22, loss: 2.062737464904785\r\nStep 23, loss: 2.398864984512329\r\nStep 24, loss: 2.306864023208618\r\nStep 25, loss: 1.6384460926055908\r\nStep 26, loss: 1.5758771896362305\r\nStep 27, loss: 2.5450780391693115\r\nStep 28, loss: 1.8924871683120728\r\nStep 29, loss: 1.8137620687484741\r\nCalculating validation metrics...\r\nStep 30, validation loss: 1.903077483177185\r\nStep 30, loss: 1.6647374629974365\r\nStep 31, loss: 1.8452759981155396\r\nStep 32, loss: 1.7696956396102905\r\nStep 33, loss: 1.4142435789108276\r\nStep 34, loss: 1.6062434911727905\r\nStep 35, loss: 1.5516308546066284\r\nStep 36, loss: 1.6732888221740723\r\nStep 37, loss: 1.5981049537658691\r\nStep 38, loss: 1.840443730354309\r\nStep 39, loss: 1.9034878015518188\r\nCalculating validation metrics...\r\nStep 40, validation loss: 1.6918954849243164\r\nStep 40, loss: 1.502794623374939\r\nStep 41, loss: 1.2753715515136719\r\nStep 42, loss: 1.7678720951080322\r\nStep 43, loss: 1.2166252136230469\r\nStep 44, loss: 1.5621460676193237\r\nStep 45, loss: 1.7861303091049194\r\nStep 46, loss: 1.5240368843078613\r\nStep 47, loss: 1.7457317113876343\r\nStep 48, loss: 1.2991492748260498\r\nStep 49, loss: 1.6742016077041626\r\nCalculating validation metrics...\r\nStep 50, validation loss: 1.4555304050445557\r\nStep 50, loss: 1.4094069004058838\r\nStep 51, loss: 1.2570157051086426\r\nStep 52, loss: 1.6190993785858154\r\nStep 53, loss: 1.2895220518112183\r\nStep 54, loss: 1.476709246635437\r\nStep 55, loss: 1.6371405124664307\r\nStep 56, loss: 2.12119460105896\r\nStep 57, loss: 1.7107138633728027\r\nStep 58, loss: 1.5534651279449463\r\nStep 59, loss: 1.8407435417175293\r\nCalculating validation metrics...\r\nStep 60, validation loss: 1.908073902130127\r\nStep 60, loss: 2.191432476043701\r\nStep 61, loss: 1.9911279678344727\r\nStep 62, loss: 1.6712697744369507\r\nStep 63, loss: 1.6706980466842651\r\nStep 64, loss: 1.4645335674285889\r\nStep 65, loss: 1.877730131149292\r\nStep 66, loss: 1.5621718168258667\r\nStep 67, loss: 1.8046575784683228\r\nStep 68, loss: 1.5368709564208984\r\nStep 69, loss: 1.966619849205017\r\nCalculating validation metrics...\r\nStep 70, validation loss: 1.6616240739822388\r\nStep 70, loss: 1.9972010850906372\r\nStep 71, loss: 1.4267817735671997\r\nStep 72, loss: 1.7085415124893188\r\nStep 73, loss: 1.3815670013427734\r\nStep 74, loss: 1.4460885524749756\r\nStep 75, loss: 1.4084631204605103\r\nStep 76, loss: 1.9376200437545776\r\nStep 77, loss: 1.2114768028259277\r\nStep 78, loss: 2.3607802391052246\r\nStep 79, loss: 2.775278091430664\r\nCalculating validation metrics...\r\nStep 80, validation loss: 1.6644264459609985\r\nStep 80, loss: 2.207951784133911\r\nStep 81, loss: 2.3890230655670166\r\nStep 82, loss: 1.4944252967834473\r\nStep 83, loss: 1.5494434833526611\r\nStep 84, loss: 2.5443668365478516\r\nStep 85, loss: 1.3939130306243896\r\nStep 86, loss: 1.364274263381958\r\nStep 87, loss: 1.5937334299087524\r\nStep 88, loss: 1.4439736604690552\r\nStep 89, loss: 1.0485661029815674\r\nCalculating validation metrics...\r\nStep 90, validation loss: 1.6059762239456177\r\nStep 90, loss: 1.3769913911819458\r\nStep 91, loss: 1.4757726192474365\r\nStep 92, loss: 1.205004096031189\r\nStep 93, loss: 1.3363841772079468\r\nStep 94, loss: 1.2155795097351074\r\nStep 95, loss: 1.397845983505249\r\nStep 96, loss: 1.311159372329712\r\nStep 97, loss: 1.3186339139938354\r\nStep 98, loss: 1.4804506301879883\r\nStep 99, loss: 1.1852830648422241\r\nCalculating validation metrics...\r\nStep 100, validation loss: 1.4663442373275757\r\nSaved checkpoint at step 100\r\nStep 100, loss: 2.8366661071777344\r\nStep 101, loss: 1.4848524332046509\r\nStep 102, loss: 1.3007668256759644\r\nStep 103, loss: 1.504318356513977\r\nStep 104, loss: 1.8867294788360596\r\nStep 105, loss: 1.3187334537506104\r\nStep 106, loss: 1.5957589149475098\r\nStep 107, loss: 1.4771620035171509\r\nStep 108, loss: 1.496765375137329\r\nStep 109, loss: 1.1947681903839111\r\nCalculating validation metrics...\r\nStep 110, validation loss: 1.4599555730819702\r\nStep 110, loss: 1.607440710067749\r\nStep 111, loss: 1.2147290706634521\r\nStep 112, loss: 0.82960444688797\r\nStep 113, loss: 1.6796694993972778\r\nStep 114, loss: 2.1049532890319824\r\nStep 115, loss: 1.247590184211731\r\nStep 116, loss: 1.6736983060836792\r\nStep 117, loss: 1.6991901397705078\r\nStep 118, loss: 1.0350857973098755\r\nStep 119, loss: 1.52377450466156\r\nCalculating validation metrics...\r\nStep 120, validation loss: 1.2502925395965576\r\nStep 120, loss: 1.1400285959243774\r\nStep 121, loss: 1.466750979423523\r\nStep 122, loss: 1.2491735219955444\r\nStep 123, loss: 1.1075115203857422\r\nStep 124, loss: 1.3452149629592896\r\nStep 125, loss: 1.0298758745193481\r\nStep 126, loss: 0.9970755577087402\r\nStep 127, loss: 1.1443158388137817\r\nStep 128, loss: 1.200229525566101\r\nStep 129, loss: 1.708919882774353\r\nCalculating validation metrics...\r\nStep 130, validation loss: 1.2616736888885498\r\nStep 130, loss: 0.9127894043922424\r\nStep 131, loss: 1.7274646759033203\r\nStep 132, loss: 1.3276433944702148\r\nStep 133, loss: 1.1401402950286865\r\nStep 134, loss: 1.1722933053970337\r\nStep 135, loss: 1.1719197034835815\r\nStep 136, loss: 1.1223397254943848\r\nStep 137, loss: 1.2594343423843384\r\nStep 138, loss: 1.0202430486679077\r\nStep 139, loss: 1.1647100448608398\r\nCalculating validation metrics...\r\nStep 140, validation loss: 1.1689294576644897\r\nStep 140, loss: 0.878560483455658\r\nStep 141, loss: 0.955134928226471\r\nStep 142, loss: 1.26587975025177\r\nStep 143, loss: 1.0154823064804077\r\nStep 144, loss: 0.9343233108520508\r\nStep 145, loss: 0.873361349105835\r\nStep 146, loss: 1.069072961807251\r\nStep 147, loss: 1.2155334949493408\r\nStep 148, loss: 1.0140929222106934\r\nStep 149, loss: 1.2737185955047607\r\nCalculating validation metrics...\r\nStep 150, validation loss: 1.079149603843689\r\nStep 150, loss: 0.8457826972007751\r\nStep 151, loss: 1.1264920234680176\r\nStep 152, loss: 0.9683992862701416\r\nStep 153, loss: 1.3294810056686401\r\nStep 154, loss: 1.1411707401275635\r\nStep 155, loss: 0.9880754947662354\r\nStep 156, loss: 1.513687014579773\r\nStep 157, loss: 0.9484049081802368\r\nStep 158, loss: 0.9942203760147095\r\nStep 159, loss: 0.7200573682785034\r\nCalculating validation metrics...\r\nStep 160, validation loss: 0.9179118871688843\r\nStep 160, loss: 0.9001950621604919\r\nStep 161, loss: 0.9635970592498779\r\nStep 162, loss: 0.857836902141571\r\nStep 163, loss: 1.269330382347107\r\nStep 164, loss: 1.1029819250106812\r\nStep 165, loss: 1.0277214050292969\r\nStep 166, loss: 1.0350062847137451\r\nStep 167, loss: 1.231576681137085\r\nStep 168, loss: 1.0444978475570679\r\nStep 169, loss: 1.1213150024414062\r\nCalculating validation metrics...\r\nStep 170, validation loss: 1.1309492588043213\r\nStep 170, loss: 0.9907813668251038\r\nStep 171, loss: 1.0109531879425049\r\nStep 172, loss: 0.9464053511619568\r\nStep 173, loss: 0.8962169885635376\r\nStep 174, loss: 1.0262359380722046\r\nStep 175, loss: 1.101137638092041\r\nStep 176, loss: 0.9471585750579834\r\nStep 177, loss: 0.9893763065338135\r\nStep 178, loss: 1.1201733350753784\r\nStep 179, loss: 1.0461798906326294\r\nCalculating validation metrics...\r\nStep 180, validation loss: 1.0496798753738403\r\nStep 180, loss: 1.0859482288360596\r\nStep 181, loss: 0.953528642654419\r\nStep 182, loss: 0.8893213272094727\r\nStep 183, loss: 0.8726180791854858\r\nStep 184, loss: 0.8556340932846069\r\nStep 185, loss: 0.8723524212837219\r\nStep 186, loss: 0.9212787747383118\r\nStep 187, loss: 0.7803460359573364\r\nStep 188, loss: 0.9376819133758545\r\nStep 189, loss: 0.9409928321838379\r\nCalculating validation metrics...\r\nStep 190, validation loss: 0.9504003524780273\r\nStep 190, loss: 1.1707098484039307\r\nStep 191, loss: 0.7748500108718872\r\nStep 192, loss: 1.0091899633407593\r\nStep 193, loss: 0.911808967590332\r\nStep 194, loss: 0.7804062962532043\r\nStep 195, loss: 1.1316330432891846\r\nStep 196, loss: 0.7976279258728027\r\nStep 197, loss: 0.8320972919464111\r\nStep 198, loss: 0.8431113958358765\r\nStep 199, loss: 1.0006061792373657\r\nCalculating validation metrics...\r\nStep 200, validation loss: 0.9597995281219482\r\nSaved checkpoint at step 200\r\n",,terminal_output +20691,15487534,"TERMINAL",0,0,"7\t",,terminal_output +20692,15487576,"TERMINAL",0,0,"7227",,terminal_output +20693,15488570,"TERMINAL",0,0,"8\t",,terminal_output +20694,15488618,"TERMINAL",0,0,"8338",,terminal_output +20695,15489248,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-dyn-dev-3469576 at: https://wandb.ai/instant-uv/jafar/runs/3469576\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_173917-3469576/logs\r\n",,terminal_output +20696,15489663,"TERMINAL",0,0,"9\t",,terminal_output +20697,15489663,"TERMINAL",0,0,"9449",,terminal_output +20698,15490670,"TERMINAL",0,0,"20\t",,terminal_output +20699,15490694,"TERMINAL",0,0,"20557:00",,terminal_output +20700,15491598,"TERMINAL",0,0,"/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/multiprocessing/resource_tracker.py:224: UserWarning: resource_tracker: There appear to be 20 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +20701,15491699,"TERMINAL",0,0,"1\t",,terminal_output +20702,15491733,"TERMINAL",0,0,"1661",,terminal_output +20703,15491917,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh: line 61: tokenizer_checkpoint: command not found\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +20704,15492752,"TERMINAL",0,0,"26",,terminal_output +20705,15492792,"TERMINAL",0,0,"2772",,terminal_output +20706,15493761,"TERMINAL",0,0,"3\t",,terminal_output +20707,15493804,"TERMINAL",0,0,"3883",,terminal_output +20708,15494886,"TERMINAL",0,0,"47",,terminal_output +20709,15494893,"TERMINAL",0,0,"4994",,terminal_output +20710,15495910,"TERMINAL",0,0,"5\t",,terminal_output +20711,15495911,"TERMINAL",0,0,"530305",,terminal_output +20712,15496882,"TERMINAL",0,0,"69",,terminal_output +20713,15496925,"TERMINAL",0,0,"6116",,terminal_output +20714,15497959,"TERMINAL",0,0,"7\t",,terminal_output +20715,15497991,"TERMINAL",0,0,"7227",,terminal_output +20716,15498944,"TERMINAL",0,0,"8\t",,terminal_output +20717,15498997,"TERMINAL",0,0,"8338",,terminal_output +20718,15499978,"TERMINAL",0,0,"9\t",,terminal_output +20719,15500035,"TERMINAL",0,0,"9449",,terminal_output +20720,15501021,"TERMINAL",0,0,"301",,terminal_output +20721,15501076,"TERMINAL",0,0,"305510",,terminal_output +20722,15502062,"TERMINAL",0,0,"1\t",,terminal_output +20723,15502112,"TERMINAL",0,0,"1661",,terminal_output +20724,15503146,"TERMINAL",0,0,"2\t",,terminal_output +20725,15503164,"TERMINAL",0,0,"2772",,terminal_output +20726,15504136,"TERMINAL",0,0,"3\t",,terminal_output +20727,15504193,"TERMINAL",0,0,"3883",,terminal_output +20728,15505201,"TERMINAL",0,0,"4\t",,terminal_output +20729,15505251,"TERMINAL",0,0,"4994",,terminal_output +20730,15506219,"TERMINAL",0,0,"5\t",,terminal_output +20731,15506294,"TERMINAL",0,0,"540405",,terminal_output +20732,15506409,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",,terminal_output +20733,15507272,"TERMINAL",0,0,"6\t",,terminal_output +20734,15507309,"TERMINAL",0,0,"6227",,terminal_output +20735,15508317,"TERMINAL",0,0,"7\t",,terminal_output +20736,15508366,"TERMINAL",0,0,"8338",,terminal_output +20737,15508392,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3468835\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=100 \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 300 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --wandb_id=$slurm_job_id \\r\n --restore_ckpt \\r\n --data_dir $array_records_dir_train \\r\n --lam_checkpoint $lam_checkpoint \\r\n --tokenizer_checkpoint $tokenizer_checkpoint\r\n",,terminal_output +20738,15508534,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3210454\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1757086640\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757090240\r\nSLURM_PMI2_SRUN_PORT=41191\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3469576\r\nSLURM_PTY_PORT=43733\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=29\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=88\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=34641\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3469576\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34641\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +20739,15508674,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +20740,15509386,"TERMINAL",0,0,"9\t",,terminal_output +20741,15509386,"TERMINAL",0,0,"9449",,terminal_output +20742,15510384,"TERMINAL",0,0,"40\t",,terminal_output +20743,15510430,"TERMINAL",0,0,"405520",,terminal_output +20744,15511470,"TERMINAL",0,0,"1\t",,terminal_output +20745,15511470,"TERMINAL",0,0,"1661",,terminal_output +20746,15512506,"TERMINAL",0,0,"2\t",,terminal_output +20747,15512517,"TERMINAL",0,0,"2772",,terminal_output +20748,15513626,"TERMINAL",0,0,"3\t",,terminal_output +20749,15513626,"TERMINAL",0,0,"3883",,terminal_output +20750,15514646,"TERMINAL",0,0,"4\t",,terminal_output +20751,15514647,"TERMINAL",0,0,"4994",,terminal_output +20752,15515664,"TERMINAL",0,0,"51",,terminal_output +20753,15515664,"TERMINAL",0,0,"550505",,terminal_output +20754,15516687,"TERMINAL",0,0,"6\t",,terminal_output +20755,15516687,"TERMINAL",0,0,"6116",,terminal_output +20756,15517711,"TERMINAL",0,0,"7\t",,terminal_output +20757,15517713,"TERMINAL",0,0,"7227",,terminal_output +20758,15518521,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +20759,15518718,"TERMINAL",0,0,"8\t",,terminal_output +20760,15518747,"TERMINAL",0,0,"8338",,terminal_output +20761,15519383,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +20762,15519524,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_174448-3469576\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Resuming run coinrun-dyn-dev-3469576\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/3469576\r\n",,terminal_output +20763,15519763,"TERMINAL",0,0,"92",,terminal_output +20764,15519802,"TERMINAL",0,0,"9449",,terminal_output +20765,15520799,"TERMINAL",0,0,"50\t",,terminal_output +20766,15520827,"TERMINAL",0,0,"505530",,terminal_output +20767,15521832,"TERMINAL",0,0,"1\t",,terminal_output +20768,15521865,"TERMINAL",0,0,"1661",,terminal_output +20769,15522932,"TERMINAL",0,0,"2\t",,terminal_output +20770,15522932,"TERMINAL",0,0,"2772",,terminal_output +20771,15523962,"TERMINAL",0,0,"3\t",,terminal_output +20772,15523967,"TERMINAL",0,0,"3883",,terminal_output +20773,15524199,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['dynamics', 'lam', 'tokenizer']\r\nParameter counts:\r\n{'dynamics': 26555904, 'lam': 35115232, 'tokenizer': 33750256, 'total': 95421392}\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 375, in \r\n restored = checkpoint_manager.restore(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1666, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 571, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 306, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 328, in _restore\r\n return self._handler.restore(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 852, in restore\r\n raise KeyError(\r\nKeyError: 'Item ""dataloader_state"" was not found in the checkpoint. Available items: [\'model_state\', \'val_dataloader_state\', \'train_dataloader_state\']'\r\n",,terminal_output +20774,15524994,"TERMINAL",0,0,"4\t",,terminal_output +20775,15525002,"TERMINAL",0,0,"4994",,terminal_output +20776,15525418,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-dyn-dev-3469576 at: https://wandb.ai/instant-uv/jafar/runs/3469576\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250905_174448-3469576/logs\r\n",,terminal_output +20777,15525996,"TERMINAL",0,0,"5\t",,terminal_output +20778,15526079,"TERMINAL",0,0,"59:009:005",,terminal_output +20779,15526106,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +20780,15527021,"TERMINAL",0,0,"6\t",,terminal_output +20781,15527142,"TERMINAL",0,0,"6116",,terminal_output +20782,15528067,"TERMINAL",0,0,"7\t",,terminal_output +20783,15528110,"TERMINAL",0,0,"7227",,terminal_output +20784,15529100,"TERMINAL",0,0,"8\t",,terminal_output +20785,15529147,"TERMINAL",0,0,"8338",,terminal_output +20786,15530133,"TERMINAL",0,0,"9\t",,terminal_output +20787,15530187,"TERMINAL",0,0,"9449",,terminal_output +20788,15531175,"TERMINAL",0,0,"5:00\t",,terminal_output +20789,15531229,"TERMINAL",0,0,"5:005540",,terminal_output +20790,15532260,"TERMINAL",0,0,"1\t",,terminal_output +20791,15532273,"TERMINAL",0,0,"1661",,terminal_output +20792,15533280,"TERMINAL",0,0,"2\t",,terminal_output +20793,15533319,"TERMINAL",0,0,"2883",,terminal_output +20794,15534327,"TERMINAL",0,0,"3\t",,terminal_output +20795,15534343,"TERMINAL",0,0,"4994",,terminal_output +20796,15535334,"TERMINAL",0,0,"5\t",,terminal_output +20797,15535384,"TERMINAL",0,0,"510105",,terminal_output +20798,15536448,"TERMINAL",0,0,"6\t",,terminal_output +20799,15536449,"TERMINAL",0,0,"6116",,terminal_output +20800,15537390,"TERMINAL",0,0,"7\t",,terminal_output +20801,15537461,"TERMINAL",0,0,"7227",,terminal_output +20802,15538496,"TERMINAL",0,0,"8\t",,terminal_output +20803,15538512,"TERMINAL",0,0,"8338",,terminal_output +20804,15539475,"TERMINAL",0,0,"9\t",,terminal_output +20805,15539577,"TERMINAL",0,0,"9449",,terminal_output +20806,15540520,"TERMINAL",0,0,"103",,terminal_output +20807,15540576,"TERMINAL",0,0,"105550",,terminal_output +20808,15541544,"TERMINAL",0,0,"1\t",,terminal_output +20809,15541613,"TERMINAL",0,0,"1661",,terminal_output +20810,15542700,"TERMINAL",0,0,"2\t",,terminal_output +20811,15542701,"TERMINAL",0,0,"2772",,terminal_output +20812,15543726,"TERMINAL",0,0,"3\t",,terminal_output +20813,15543732,"TERMINAL",0,0,"3883",,terminal_output +20814,15544685,"TERMINAL",0,0,"4\t",,terminal_output +20815,15544739,"TERMINAL",0,0,"4994",,terminal_output +20816,15545768,"TERMINAL",0,0,"5\t",,terminal_output +20817,15545785,"TERMINAL",0,0,"520205",,terminal_output +20818,15546820,"TERMINAL",0,0,"6\t",,terminal_output +20819,15546832,"TERMINAL",0,0,"6116",,terminal_output +20820,15547775,"TERMINAL",0,0,"7\t",,terminal_output +20821,15547912,"TERMINAL",0,0,"7227",,terminal_output +20822,15548841,"TERMINAL",0,0,"821",,terminal_output +20823,15548929,"TERMINAL",0,0,"8338",,terminal_output +20824,15549970,"TERMINAL",0,0,"94",,terminal_output +20825,15549980,"TERMINAL",0,0,"9449",,terminal_output +20826,15550905,"TERMINAL",0,0,"20\t",,terminal_output +20827,15551006,"TERMINAL",0,0,"20558:00",,terminal_output +20828,15552925,"TERMINAL",0,0,"1772",,terminal_output +20829,15552925,"TERMINAL",0,0,"116",,terminal_output +20830,15554008,"TERMINAL",0,0,"3883",,terminal_output +20831,15554008,"TERMINAL",0,0,"3\t",,terminal_output +20832,15555007,"TERMINAL",0,0,"4994",,terminal_output +20833,15555008,"TERMINAL",0,0,"4\t",,terminal_output +20834,15556000,"TERMINAL",0,0,"530305",,terminal_output +20835,15556003,"TERMINAL",0,0,"5\t",,terminal_output +20836,15557038,"TERMINAL",0,0,"6\t",,terminal_output +20837,15557038,"TERMINAL",0,0,"6116",,terminal_output +20838,15558096,"TERMINAL",0,0,"7227",,terminal_output +20839,15558101,"TERMINAL",0,0,"7\t",,terminal_output +20840,15559105,"TERMINAL",0,0,"8338",,terminal_output +20841,15559105,"TERMINAL",0,0,"8\t",,terminal_output +20842,15560216,"TERMINAL",0,0,"9\t",,terminal_output +20843,15560232,"TERMINAL",0,0,"9449",,terminal_output +20844,15561447,"TERMINAL",0,0,"30\t",,terminal_output +20845,15561453,"TERMINAL",0,0,"305510",,terminal_output +20846,15562459,"TERMINAL",0,0,"1\t",,terminal_output +20847,15562459,"TERMINAL",0,0,"1661",,terminal_output +20848,15563287,"TERMINAL",0,0,"2\t",,terminal_output +20849,15563287,"TERMINAL",0,0,"2772",,terminal_output +20850,15564490,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import create_dataloader_iterator\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(\n model: Genie, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n """"""Compute masked dynamics loss""""""\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=True)\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@nnx.jit\ndef train_step(\n model: Genie, optimizer: nnx.Optimizer, inputs: dict\n) -> tuple[jax.Array, jax.Array, dict]:\n """"""Update state and compute metrics""""""\n\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(model)\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n@nnx.jit\ndef val_step(genie: Genie, inputs: dict) -> tuple[jax.Array, jax.Array, dict]:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n return loss, recon, metrics\n\n\ndef calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n for videos in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = val_step(genie, inputs)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}"")\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([m[key] for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n return val_loss, val_metrics, inputs, recon\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(genie, tx)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n train_iterator = create_dataloader_iterator(args.data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n if args.val_data_dir:\n val_iterator = create_dataloader_iterator(args.val_data_dir, image_shape, args.seq_len, args.batch_size, args.seed)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else: \n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n optimizer = restore_genie_components(optimizer, replicated_sharding, rng, args)\n # NOTE: We have to remove the (unused) tokenizer vq dropout due flax.nnx lazily initializing modules.\n # Specifically, the first dynamics model checkpoint will contain the vq dropout module,\n # but the first full restore will fail due to nnx not initializing the module when\n # dropout is set to 0.0.\n del optimizer.model.tokenizer.vq.drop\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in train_iterator\n )\n if args.val_data_dir:\n dataloader_val = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in val_iterator\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n inputs = dict(videos=videos, mask_rng=_rng_mask)\n loss, recon, metrics = train_step(optimizer.model, optimizer, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Validation loss ---\n if args.val_data_dir and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(f""Calculating validation metrics..."")\n val_loss, val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(dataloader_val, optimizer.model, _rng_mask_val)\n print(f""Step {step}, validation loss: {val_loss}"")\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {\n ""loss"": loss,\n ""step"": step,\n **metrics\n }\n if args.val_data_dir and step % args.val_interval == 0:\n log_dict.update({\n ""val_loss"": val_loss,\n **val_metrics\n })\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n gt_seq_val = val_gt_batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq_val = val_recon[0].clip(0, 1)\n val_comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n val_comparison_seq = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(np.asarray(gt_seq_val[0])),\n val_recon=wandb.Image(np.asarray(recon_seq_val[0])),\n val_true_vs_recon=wandb.Image(\n np.asarray(val_comparison_seq.astype(np.uint8))\n )\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n )\n )\n else: \n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n )\n )\n checkpoint_manager.save(\n step,\n args=ckpt_manager_args\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +20851,15564633,"TERMINAL",0,0,"3\t",,terminal_output +20852,15564634,"TERMINAL",0,0,"3994",,terminal_output +20853,15565479,"TERMINAL",0,0,"5\t",,terminal_output +20854,15565479,"TERMINAL",0,0,"540405",,terminal_output +20855,15566517,"TERMINAL",0,0,"6\t",,terminal_output +20856,15566517,"TERMINAL",0,0,"6116",,terminal_output +20857,15566685,"train_dynamics.py",1367,0,"",python,selection_mouse +20858,15567448,"TERMINAL",0,0,"7\t",,terminal_output +20859,15567449,"TERMINAL",0,0,"7227",,terminal_output +20860,15567952,"train_dynamics.py",1311,0,"",python,selection_mouse +20861,15568418,"train_dynamics.py",2500,0,"",python,selection_command +20862,15568461,"TERMINAL",0,0,"8\t",,terminal_output +20863,15568503,"TERMINAL",0,0,"8338",,terminal_output +20864,15569486,"TERMINAL",0,0,"97",,terminal_output +20865,15569509,"TERMINAL",0,0,"9449",,terminal_output +20866,15570532,"TERMINAL",0,0,"40\t",,terminal_output +20867,15570564,"TERMINAL",0,0,"405520",,terminal_output +20868,15571575,"TERMINAL",0,0,"1\t",,terminal_output +20869,15571598,"TERMINAL",0,0,"1661",,terminal_output +20870,15572699,"TERMINAL",0,0,"2\t",,terminal_output +20871,15572700,"TERMINAL",0,0,"2772",,terminal_output +20872,15573575,"train_dynamics.py",10360,0,"",python,selection_command +20873,15573651,"TERMINAL",0,0,"3\t",,terminal_output +20874,15573689,"TERMINAL",0,0,"3883",,terminal_output +20875,15574101,"train_dynamics.py",10549,0,"",python,selection_command +20876,15574395,"train_dynamics.py",10773,0,"",python,selection_command +20877,15574700,"TERMINAL",0,0,"4\t",,terminal_output +20878,15574710,"TERMINAL",0,0,"4994",,terminal_output +20879,15575385,"train_dynamics.py",10980,0,"",python,selection_command +20880,15575734,"TERMINAL",0,0,"5\t",,terminal_output +20881,15575751,"TERMINAL",0,0,"550505",,terminal_output +20882,15575833,"train_dynamics.py",12356,0,"",python,selection_command +20883,15576795,"TERMINAL",0,0,"6\t",,terminal_output +20884,15576796,"TERMINAL",0,0,"6116",,terminal_output +20885,15577796,"TERMINAL",0,0,"7\t",,terminal_output +20886,15577828,"TERMINAL",0,0,"7227",,terminal_output +20887,15578973,"TERMINAL",0,0,"8\t",,terminal_output +20888,15578979,"TERMINAL",0,0,"8338",,terminal_output +20889,15579245,"train_dynamics.py",12356,0,"t",python,content +20890,15579247,"train_dynamics.py",12357,0,"",python,selection_keyboard +20891,15579284,"train_dynamics.py",12357,0,"r",python,content +20892,15579284,"train_dynamics.py",12358,0,"",python,selection_keyboard +20893,15579471,"train_dynamics.py",12358,0,"a",python,content +20894,15579473,"train_dynamics.py",12359,0,"",python,selection_keyboard +20895,15579511,"train_dynamics.py",12359,0,"i",python,content +20896,15579512,"train_dynamics.py",12360,0,"",python,selection_keyboard +20897,15579601,"train_dynamics.py",12360,0,"n",python,content +20898,15579602,"train_dynamics.py",12361,0,"",python,selection_keyboard +20899,15579829,"train_dynamics.py",12361,0,"_",python,content +20900,15579830,"train_dynamics.py",12362,0,"",python,selection_keyboard +20901,15579921,"TERMINAL",0,0,"9\t",,terminal_output +20902,15579921,"TERMINAL",0,0,"9449",,terminal_output +20903,15580317,"train_dynamics.py",12473,0,"",python,selection_command +20904,15580517,"train_dynamics.py",12571,0,"",python,selection_command +20905,15580655,"train_dynamics.py",12586,0,"",python,selection_command +20906,15580812,"train_dynamics.py",12601,0,"",python,selection_command +20907,15580936,"TERMINAL",0,0,"50\t",,terminal_output +20908,15580966,"TERMINAL",0,0,"505530",,terminal_output +20909,15581001,"train_dynamics.py",12648,0,"",python,selection_command +20910,15581437,"train_dynamics.py",12746,0,"",python,selection_command +20911,15581979,"TERMINAL",0,0,"1\t",,terminal_output +20912,15581997,"TERMINAL",0,0,"1661",,terminal_output +20913,15583012,"TERMINAL",0,0,"2\t",,terminal_output +20914,15583058,"TERMINAL",0,0,"2772",,terminal_output +20915,15583098,"train_dynamics.py",12752,0,"t",python,content +20916,15583099,"train_dynamics.py",12753,0,"",python,selection_keyboard +20917,15583189,"train_dynamics.py",12753,0,"r",python,content +20918,15583190,"train_dynamics.py",12754,0,"",python,selection_keyboard +20919,15583374,"train_dynamics.py",12754,0,"a",python,content +20920,15583375,"train_dynamics.py",12755,0,"",python,selection_keyboard +20921,15583486,"train_dynamics.py",12755,0,"i",python,content +20922,15583487,"train_dynamics.py",12756,0,"",python,selection_keyboard +20923,15583512,"train_dynamics.py",12756,0,"n",python,content +20924,15583513,"train_dynamics.py",12757,0,"",python,selection_keyboard +20925,15583825,"train_dynamics.py",12757,0,"_",python,content +20926,15583826,"train_dynamics.py",12758,0,"",python,selection_keyboard +20927,15584048,"TERMINAL",0,0,"3\t",,terminal_output +20928,15584089,"TERMINAL",0,0,"3883",,terminal_output +20929,15585095,"TERMINAL",0,0,"48",,terminal_output +20930,15585142,"TERMINAL",0,0,"4994",,terminal_output +20931,15585505,"train_dynamics.py",12477,0,"",python,selection_mouse +20932,15585718,"train_dynamics.py",12467,20,"val_dataloader_state",python,selection_mouse +20933,15586156,"TERMINAL",0,0,"59",,terminal_output +20934,15586165,"TERMINAL",0,0,"530:0030:005",,terminal_output +20935,15586242,"train_dynamics.py",12368,0,"",python,selection_mouse +20936,15586412,"train_dynamics.py",12356,22,"train_dataloader_state",python,selection_mouse +20937,15587138,"train_dynamics.py",12377,0,"",python,selection_command +20938,15587168,"TERMINAL",0,0,"6\t",,terminal_output +20939,15587215,"TERMINAL",0,0,"6116",,terminal_output +20940,15588183,"TERMINAL",0,0,"7\t",,terminal_output +20941,15588244,"TERMINAL",0,0,"7227",,terminal_output +20942,15588371,"train_dynamics.py",12756,0,"",python,selection_mouse +20943,15589083,"train_dynamics.py",12366,0,"",python,selection_mouse +20944,15589212,"TERMINAL",0,0,"8\t",,terminal_output +20945,15589307,"TERMINAL",0,0,"8449",,terminal_output +20946,15590119,"train_dynamics.py",12471,0,"",python,selection_command +20947,15590251,"TERMINAL",0,0,"920",,terminal_output +20948,15590345,"TERMINAL",0,0,"6:005540",,terminal_output +20949,15590502,"train_dynamics.py",12758,0,"",python,selection_command +20950,15591015,"train_dynamics.py",13150,0,"",python,selection_command +20951,15591301,"TERMINAL",0,0,"6:00\t",,terminal_output +20952,15591381,"TERMINAL",0,0,"1661",,terminal_output +20953,15592274,"train_dynamics.py",13240,0,"",python,selection_command +20954,15592363,"TERMINAL",0,0,"26",,terminal_output +20955,15592406,"TERMINAL",0,0,"2772",,terminal_output +20956,15592952,"train_dynamics.py",18324,0,"",python,selection_command +20957,15593364,"TERMINAL",0,0,"31",,terminal_output +20958,15593443,"TERMINAL",0,0,"3883",,terminal_output +20959,15593964,"train_dynamics.py",18504,0,"",python,selection_command +20960,15594403,"TERMINAL",0,0,"4\t",,terminal_output +20961,15594501,"TERMINAL",0,0,"4994",,terminal_output +20962,15594784,"train_dynamics.py",18878,0,"",python,selection_command +20963,15595444,"TERMINAL",0,0,"5\t",,terminal_output +20964,15595554,"TERMINAL",0,0,"510105",,terminal_output +20965,15595853,"train_dynamics.py",10360,0,"",python,selection_command +20966,15596497,"TERMINAL",0,0,"6\t",,terminal_output +20967,15596572,"TERMINAL",0,0,"6116",,terminal_output +20968,15597516,"TERMINAL",0,0,"7\t",,terminal_output +20969,15597817,"TERMINAL",0,0,"7227",,terminal_output +20970,15598553,"TERMINAL",0,0,"8\t",,terminal_output +20971,15598566,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",,terminal_output +20972,15598657,"TERMINAL",0,0,"8338",,terminal_output +20973,15599560,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3468835\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3468835\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=10 \\r\n --log_checkpoint_interval=100 \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 300 \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 10 \\r\n --val_steps 50 \\r\n --wandb_id=$slurm_job_id \\r\n --restore_ckpt \\r\n --data_dir $array_records_dir_train \\r\n --lam_checkpoint $lam_checkpoint \\r\n --tokenizer_checkpoint $tokenizer_checkpoint\r\n",,terminal_output +20974,15599595,"TERMINAL",0,0,"9\t",,terminal_output +20975,15599728,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3210454\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1757086640\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1757090240\r\nSLURM_PMI2_SRUN_PORT=41191\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3469576\r\nSLURM_PTY_PORT=43733\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=29\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=88\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=34641\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3469576\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=34641\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +20976,15599728,"TERMINAL",0,0,"9449",,terminal_output +20977,15599828,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-825aa81a-f8dc-4fd3-8ed5-69638fcbfc5f1759823186564-2025_10_07-09.46.57.798/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-825aa81a-f8dc-4fd3-8ed5-69638fcbfc5f1759823186564-2025_10_07-09.46.57.798/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..aff10a96f02eeffe30fd72968fff4fb7fa1dd534 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-825aa81a-f8dc-4fd3-8ed5-69638fcbfc5f1759823186564-2025_10_07-09.46.57.798/source.csv @@ -0,0 +1,751 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,356,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"9:46:57 AM [info] Activating crowd-code\n9:46:57 AM [info] Recording started\n9:46:57 AM [info] Initializing git provider using file system watchers...\n",Log,tab +3,593,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"9:46:58 AM [info] Git repository found\n9:46:58 AM [info] Git provider initialized successfully\n9:46:58 AM [info] Initial git state: [object Object]\n",Log,content +4,4425,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +5,4829,"TERMINAL",0,0,"ls",,terminal_command +6,4836,"TERMINAL",0,0,"]633;C ali-old-branch.diff diff2.diff frames killer.sh models requirements-franz.txt tests\r\n' checklist.md' diff.diff gifs LICENSE overfit_dir.zip samples utils\r\n data doom_job_starter.sh input_pipeline log.log __pycache__ scripts_cremers uv.lock\r\n dataset_duplicates.ipynb frame-knoms.png jasmine logs pyproject.toml slurm wandb\r\n debug frame.png killer_partition.sh message.md README.md test.py\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +7,11707,"TERMINAL",0,0,"queue",,terminal_command +8,11747,"TERMINAL",0,0,"]633;C",,terminal_output +9,11825,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Tue Oct 7 09:47:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3547802 accelerat tokenize tum_cte0 R 10:45:17\t 1 hkn06353543728 accelerat preproce tum_cte0 R 1-14:09:39\t 1 hkn04253543730 accelerat preproce tum_cte0 R 1-14:09:39\t 1 hkn0632",,terminal_output +10,12920,"TERMINAL",0,0,"1084040",,terminal_output +11,13162,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +12,17398,"TERMINAL",0,0,"tmux a",,terminal_command +13,17423,"TERMINAL",0,0,"]633;C[?1049h[?1h=[?12l[?25h[?1000l[?1002l[?1003l[?1006l[?1005l(B[?12l[?25h[?1006l[?1000l[?1002l[?1003l[?2004l[>c[>q[?2004h[?25l(jasmine) [tum_cte0515@hkn1991 jasmine]$ sh doom_job_starter.sh\r\nSubmitted batch job 3548816\r\nSubmitted batch job 3548817\r\nscheduled 60x80 job\r\nSubmitted batch job 3548890\r\nSubmitted batch job 3548891\r\n(jasmine) [tum_cte0515@hkn1991 jasmine]$ \r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n[0] 0:bash* ""hkn1991.localdomain"" 09:47 07-Oct-25(B[?12l[?25h\r(jasmine) [tum_cte0515@hkn1991 jasmine]$ (B[?12l[?25h[?1006l[?1000l[?1002l[?1003l[?2004l[?2004h[?25l(jasmine) [tum_cte0515@hkn1991 jasmine]$ sh doom_job_starter.sh\r\nSubmitted batch job 3548816\r\nSubmitted batch job 3548817\r\nscheduled 60x80 job\r\nSubmitted batch job 3548890\r\nSubmitted batch job 3548891\r\n(jasmine) [tum_cte0515@hkn1991 jasmine]$ \r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n[0] 0:bash* ""hkn1991.localdomain"" 09:47 07-Oct-25(B[?12l[?25h",,terminal_output +14,17587,"TERMINAL",0,0,"[?7727h",,terminal_output +15,29337,"TERMINAL",0,0,"\r\nlogout\r\n[?2004l(B[?1l>[?12l[?25h[?1000l[?1002l[?1003l[?1006l[?1005l[?7727l[?1004l[?1049l[exited]\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +16,31046,"TERMINAL",0,0,"queue",,terminal_command +17,31124,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Tue Oct 7 09:47:28 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3547802 accelerat tokenize tum_cte0 R 10:45:36\t 1 hkn06353543728 accelerat preproce tum_cte0 R 1-14:09:58\t 1 hkn04253543730 accelerat preproce tum_cte0 R 1-14:09:58\t 1 hkn0632",,terminal_output +18,32146,"TERMINAL",0,0,"9799",,terminal_output +19,33191,"TERMINAL",0,0,"30810:0010:00",,terminal_output +20,33519,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +21,35207,"TERMINAL",0,0,"logs",,terminal_command +22,35234,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir",,terminal_output +23,35601,"TERMINAL",0,0,"ls",,terminal_command +24,35681,"TERMINAL",0,0,"]633;C",,terminal_output +25,35721,"TERMINAL",0,0,"atari train_lam_action_space_scaling_50_3329789.log\r\nbig_run train_lam_action_space_scaling_50_3329804.log\r\nbig-runs train_lam_action_space_scaling_50_3331286.log\r\nbreakout train_lam_action_space_scaling_6_3318549.log\r\ncausal train_lam_action_space_scaling_6_3320178.log\r\ncoinrun train_lam_action_space_scaling_6_3321528.log\r\ndata_coinrun train_lam_action_space_scaling_6_3329790.log\r\ndoom train_lam_action_space_scaling_6_3329805.log\r\njafar_og_reproduction train_lam_action_space_scaling_6_3331287.log\r\nlam train_lam_action_space_scaling_8_3318550.log\r\nmaskgit train_lam_action_space_scaling_8_3329791.log\r\nmaskgit-maskprob-fix train_lam_action_space_scaling_8_3329806.log\r\npreprocess train_lam_action_space_scaling_8_3331288.log\r\ntrain_dyn_causal_180M_3372931.log train_lam_batch_size_scaling_sqrt_lr_8_nodes_3528968.log\r\ntrain_dyn_causal_180M_3372963.log train_lam_minecraft_overfit_sample_3309655.log\r\ntrain_dyn_causal_180M_3372969.log train_lam_model_size_scaling_38M_3317098.log\r\ntrain_dyn_causal_180M_3373107.log train_lam_model_size_scaling_38M_3317115.log\r\ntrain_dyn_causal_255M_3372932.log train_lam_model_size_scaling_38M_3317231.log\r\ntrain_dyn_causal_255M_3372970.log train_tokenizer_batch_size_scaling_16_node_3321526.log\r\ntrain_dyn_causal_255M_3373108.log train_tokenizer_batch_size_scaling_1_node_3318551.log\r\ntrain_dyn_causal_356M_3372934.log train_tokenizer_batch_size_scaling_2_node_3318552.log\r\ntrain_dyn_causal_356M_3372971.log train_tokenizer_batch_size_scaling_2_node_3330806.log\r\ntrain_dyn_causal_356M_3373109.log train_tokenizer_batch_size_scaling_2_node_3330848.log\r\ntrain_dyn_causal_500M_3372936.log train_tokenizer_batch_size_scaling_2_node_3331282.log\r\ntrain_dyn_causal_500M_3372972.log train_tokenizer_batch_size_scaling_4_node_3318553.log\r\ntrain_dyn_causal_500M_3373110.log train_tokenizer_batch_size_scaling_4_node_3320175.log\r\ntrain_dyn_new_arch-bugfixed-spatial-shift_3359343.log train_tokenizer_batch_size_scaling_4_node_3321524.log\r\ntrain_dyn_new_arch-bugfixed-temporal-shift_3359349.log train_tokenizer_batch_size_scaling_8_node_3320176.log\r\ntrain_dyn_yolorun_3333026.log train_tokenizer_batch_size_scaling_8_node_3321525.log\r\ntrain_dyn_yolorun_3333448.log train_tokenizer_batch_size_scaling_8_node_3528969.log\r\ntrain_dyn_yolorun_3335345.log train_tokenizer_minecraft_overfit_sample_3309656.log\r\ntrain_dyn_yolorun_3335362.log train_tokenizer_model_size_scaling_127M_3317233.log\r\ntrain_dyn_yolorun_3348592.log train_tokenizer_model_size_scaling_127M_3318554.log\r\ntrain_dyn_yolorun_new_arch_3351743.log train_tokenizer_model_size_scaling_140M_3313562.log\r\ntrain_dyn_yolorun_new_arch_3352103.log train_tokenizer_model_size_scaling_140M_3316019.log\r\ntrain_dyn_yolorun_new_arch_3352115.log train_tokenizer_model_size_scaling_200M_3313563.log\r\ntrain_dyn_yolorun_new_arch_3358457.log train_tokenizer_model_size_scaling_200M_3316020.log\r\ntrain_lam_action_space_scaling_10_3320179.log train_tokenizer_model_size_scaling_227M_3317234.log\r\ntrain_lam_action_space_scaling_10_3321529.log train_tokenizer_model_size_scaling_227M_3318555.log\r\ntrain_lam_action_space_scaling_10_3329786.log train_tokenizer_model_size_scaling_227M_3320173.log\r\ntrain_lam_action_space_scaling_10_3329801.log train_tokenizer_model_size_scaling_227M_3321523.log\r\ntrain_lam_action_space_scaling_10_3331283.log train_tokenizer_model_size_scaling_37M_3313565.log\r\ntrain_lam_action_space_scaling_12_3318546.log train_tokenizer_model_size_scaling_37M_3316022.log\r\ntrain_lam_action_space_scaling_12_3320177.log train_tokenizer_model_size_scaling_37M_3317232.log\r\ntrain_lam_action_space_scaling_12_3321527.log train_tokenizer_model_size_scaling_37M_3317239.log\r\ntrain_lam_action_space_scaling_12_3329787.log train_tokenizer_model_size_scaling_37M_3318556.log\r\ntrain_lam_action_space_scaling_12_3329802.log train_tokenizer_model_size_scaling_74M_3318557.log\r\ntrain_lam_action_space_scaling_12_3331284.log train_tokenizer_model_size_scaling_74M_3320174.log\r\ntrain_lam_action_space_scaling_20_3318547.log train_tokenizer_model_size_scaling_74M_3321522.log\r\ntrain_lam_action_space_scaling_20_3329788.log train_tokenizer_model_size_scaling_80M_3313564.log\r\ntrain_lam_action_space_scaling_20_3329803.log train_tokenizer_model_size_scaling_80M_3316026.log\r\ntrain_lam_action_space_scaling_20_3331285.log yoloruns\r\ntrain_lam_action_space_scaling_50_3320180.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir",,terminal_output +26,37704,"TERMINAL",0,0,"cd doom/",,terminal_command +27,37912,"TERMINAL",0,0,"ls",,terminal_command +28,40667,"TERMINAL",0,0,"cd dynamics/",,terminal_command +29,43113,"TERMINAL",0,0,"ls",,terminal_command +30,45384,"TERMINAL",0,0,"cd maskgit/",,terminal_command +31,45780,"TERMINAL",0,0,"ls",,terminal_command +32,49271,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548890.log",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=dynamics_doom_120x160\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=doom-dynamics-120x160-$slurm_job_id \\n --image_height=120 \\n --image_width=160 \\n --tags doom dynamics maskgit default 120x160 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pidSLURM_JOB_USER=tum_cte0515\nSLURM_TASKS_PER_NODE=1\nSLURM_JOB_UID=999226\nSLURM_TASK_PID=3020545\nSLURM_JOB_GPUS=2\nSLURM_LOCALID=0\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\nSLURMD_NODENAME=hkn0735\nSLURM_JOB_START_TIME=1759814102\nSLURM_CLUSTER_NAME=hk\nSLURM_JOB_END_TIME=1759986902\nSLURM_CPUS_ON_NODE=6\nSLURM_JOB_CPUS_PER_NODE=6\nSLURM_GPUS_ON_NODE=1\nSLURM_GTIDS=0\nSLURM_JOB_PARTITION=accelerated\nSLURM_TRES_PER_TASK=cpu=5\nSLURM_OOM_KILL_STEP=0\nSLURM_JOB_NUM_NODES=1\nSLURM_JOBID=3548890\nSLURM_JOB_QOS=normal\nSLURM_PROCID=0\nSLURM_CPUS_PER_TASK=5\nSLURM_NTASKS=1\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e9.hkn0735\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\nSLURM_SCRIPT_CONTEXT=prolog_task\nSLURM_NODELIST=hkn0735\nSLURM_JOB_ACCOUNT=hk-project-p0023960\nSLURM_PRIO_PROCESS=0\nSLURM_NPROCS=1\nSLURM_NNODES=1\nSLURM_SUBMIT_HOST=hkn1991.localdomain\nSLURM_JOB_ID=3548890\nSLURM_NODEID=0\nSLURM_CONF=/etc/slurm/slurm.conf\nSLURM_JOB_NAME=dynamics_doom_120x160\nSLURM_NTASKS_PER_NODE=1\nSLURM_JOB_GID=502226\nSLURM_JOB_NODELIST=hkn0735\nGpuFreq=control_disabled\nE1007 07:15:33.328609 3020671 cuda_platform.cc:51] failed call to cuInit: INTERNAL: CUDA error: Failed call to cuInit: CUDA_ERROR_UNKNOWN: unknown error\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 812, in backends\n backend = _init_backend(platform)\n ^^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 896, in _init_backend\n backend = registration.factory()\n ^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 559, in make_pjrt_c_api_client\n return xla_client.make_c_api_client(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jaxlib/xla_client.py"", line 156, in make_c_api_client\n return _xla.get_c_api_client(\n ^^^^^^^^^^^^^^^^^^^^^^\njaxlib._jax.XlaRuntimeError: FAILED_PRECONDITION: No visible GPU devices.\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 826, in \n main(args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 413, in main\n num_devices = jax.device_count()\n ^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 975, in device_count\n return int(get_backend(backend).device_count())\n ^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 944, in get_backend\n return _get_backend_uncached(platform)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 923, in _get_backend_uncached\n bs = backends()\n ^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 828, in backends\n raise RuntimeError(err_msg)\nRuntimeError: Unable to initialize backend 'cuda': FAILED_PRECONDITION: No visible GPU devices. (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\nsrun: error: hkn0735: task 0: Exited with exit code 1\n\n============================= JOB FEEDBACK =============================\n\nJob ID: 3548890\nCluster: hk\nUser/Group: tum_cte0515/hk-project-p0023960\nAccount: hk-project-p0023960\nState: FAILED (exit code 1)\nPartition: accelerated\nNodes: 1\nCores per node: 6\nNodelist: hkn0735\nCPU Utilized: 00:00:03\nCPU Efficiency: 1.61% of 00:03:06 core-walltime\nJob Wall-clock time: 00:00:31\nStarttime: Tue Oct 7 07:15:02 2025\nEndtime: Tue Oct 7 07:15:33 2025\nMemory Utilized: 267.94 MB\nMemory Efficiency: 0.22% of 121.46 GB (121.46 GB/node)\nEnergy Consumed: 646 Joule / 0.179444444444444 Watthours\nAverage node power draw: 20.8387096774194 Watt\n",log,tab +33,58338,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548891.log",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=dynamics_doom_120x160\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=doom-dynamics-120x160-$slurm_job_id \\n --image_height=120 \\n --image_width=160 \\n --tags doom dynamics maskgit default 120x160 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pidSLURM_JOB_USER=tum_cte0515\nSLURM_TASKS_PER_NODE=1\nSLURM_JOB_UID=999226\nSLURM_TASK_PID=3020544\nSLURM_JOB_GPUS=3\nSLURM_LOCALID=0\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\nSLURMD_NODENAME=hkn0735\nSLURM_JOB_START_TIME=1759814102\nSLURM_CLUSTER_NAME=hk\nSLURM_JOB_END_TIME=1759986902\nSLURM_CPUS_ON_NODE=6\nSLURM_JOB_CPUS_PER_NODE=6\nSLURM_GPUS_ON_NODE=1\nSLURM_GTIDS=0\nSLURM_JOB_PARTITION=accelerated\nSLURM_TRES_PER_TASK=cpu=5\nSLURM_OOM_KILL_STEP=0\nSLURM_JOB_NUM_NODES=1\nSLURM_JOBID=3548891\nSLURM_JOB_QOS=normal\nSLURM_PROCID=0\nSLURM_CPUS_PER_TASK=5\nSLURM_NTASKS=1\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e9.hkn0735\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\nSLURM_SCRIPT_CONTEXT=prolog_task\nSLURM_NODELIST=hkn0735\nSLURM_JOB_ACCOUNT=hk-project-p0023960\nSLURM_PRIO_PROCESS=0\nSLURM_NPROCS=1\nSLURM_NNODES=1\nSLURM_SUBMIT_HOST=hkn1991.localdomain\nSLURM_JOB_ID=3548891\nSLURM_NODEID=0\nSLURM_CONF=/etc/slurm/slurm.conf\nSLURM_JOB_NAME=dynamics_doom_120x160\nSLURM_NTASKS_PER_NODE=1\nSLURM_JOB_GID=502226\nSLURM_JOB_NODELIST=hkn0735\nGpuFreq=control_disabled\nE1007 07:15:33.331609 3020669 cuda_platform.cc:51] failed call to cuInit: INTERNAL: CUDA error: Failed call to cuInit: CUDA_ERROR_UNKNOWN: unknown error\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 812, in backends\n backend = _init_backend(platform)\n ^^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 896, in _init_backend\n backend = registration.factory()\n ^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 559, in make_pjrt_c_api_client\n return xla_client.make_c_api_client(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jaxlib/xla_client.py"", line 156, in make_c_api_client\n return _xla.get_c_api_client(\n ^^^^^^^^^^^^^^^^^^^^^^\njaxlib._jax.XlaRuntimeError: FAILED_PRECONDITION: No visible GPU devices.\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 826, in \n main(args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 413, in main\n num_devices = jax.device_count()\n ^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 975, in device_count\n return int(get_backend(backend).device_count())\n ^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 944, in get_backend\n return _get_backend_uncached(platform)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 923, in _get_backend_uncached\n bs = backends()\n ^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 828, in backends\n raise RuntimeError(err_msg)\nRuntimeError: Unable to initialize backend 'cuda': FAILED_PRECONDITION: No visible GPU devices. (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\nsrun: error: hkn0735: task 0: Exited with exit code 1\n\n============================= JOB FEEDBACK =============================\n\nJob ID: 3548891\nCluster: hk\nUser/Group: tum_cte0515/hk-project-p0023960\nAccount: hk-project-p0023960\nState: FAILED (exit code 1)\nPartition: accelerated\nNodes: 1\nCores per node: 6\nNodelist: hkn0735\nCPU Utilized: 00:00:03\nCPU Efficiency: 1.61% of 00:03:06 core-walltime\nJob Wall-clock time: 00:00:31\nStarttime: Tue Oct 7 07:15:02 2025\nEndtime: Tue Oct 7 07:15:33 2025\nMemory Utilized: 268.09 MB\nMemory Efficiency: 0.22% of 121.46 GB (121.46 GB/node)\nEnergy Consumed: 7161 Joule / 1.98916666666667 Watthours\nAverage node power draw: 231 Watt\n",log,tab +34,71131,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548891.log",3295,0,"",log,selection_mouse +35,71173,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548891.log",3294,0,"",log,selection_command +36,79445,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=dynamics_doom_60x80\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_gpu_60x80/3547697\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=doom-dynamics-60x80-$slurm_job_id \\n --image_height=60 \\n --image_width=80 \\n --tags doom dynamics maskgit default 60x80 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pidSLURM_JOB_USER=tum_cte0515\nSLURM_TASKS_PER_NODE=1\nSLURM_JOB_UID=999226\nSLURM_TASK_PID=2881333\nSLURM_JOB_GPUS=3\nSLURM_LOCALID=0\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\nSLURMD_NODENAME=hkn0735\nSLURM_JOB_START_TIME=1759799779\nSLURM_CLUSTER_NAME=hk\nSLURM_JOB_END_TIME=1759972579\nSLURM_CPUS_ON_NODE=6\nSLURM_JOB_CPUS_PER_NODE=6\nSLURM_GPUS_ON_NODE=1\nSLURM_GTIDS=0\nSLURM_JOB_PARTITION=accelerated\nSLURM_TRES_PER_TASK=cpu=5\nSLURM_OOM_KILL_STEP=0\nSLURM_JOB_NUM_NODES=1\nSLURM_JOBID=3548817\nSLURM_JOB_QOS=normal\nSLURM_PROCID=0\nSLURM_CPUS_PER_TASK=5\nSLURM_NTASKS=1\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e9.hkn0735\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\nSLURM_SCRIPT_CONTEXT=prolog_task\nSLURM_NODELIST=hkn0735\nSLURM_JOB_ACCOUNT=hk-project-p0023960\nSLURM_PRIO_PROCESS=0\nSLURM_NPROCS=1\nSLURM_NNODES=1\nSLURM_SUBMIT_HOST=hkn1991.localdomain\nSLURM_JOB_ID=3548817\nSLURM_NODEID=0\nSLURM_CONF=/etc/slurm/slurm.conf\nSLURM_JOB_NAME=dynamics_doom_60x80\nSLURM_NTASKS_PER_NODE=1\nSLURM_JOB_GID=502226\nSLURM_JOB_NODELIST=hkn0735\nGpuFreq=control_disabled\nE1007 03:16:53.613773 2881460 cuda_platform.cc:51] failed call to cuInit: INTERNAL: CUDA error: Failed call to cuInit: CUDA_ERROR_UNKNOWN: unknown error\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 812, in backends\n backend = _init_backend(platform)\n ^^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 896, in _init_backend\n backend = registration.factory()\n ^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 559, in make_pjrt_c_api_client\n return xla_client.make_c_api_client(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jaxlib/xla_client.py"", line 156, in make_c_api_client\n return _xla.get_c_api_client(\n ^^^^^^^^^^^^^^^^^^^^^^\njaxlib._jax.XlaRuntimeError: FAILED_PRECONDITION: No visible GPU devices.\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 826, in \n main(args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 413, in main\n num_devices = jax.device_count()\n ^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 975, in device_count\n return int(get_backend(backend).device_count())\n ^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 944, in get_backend\n return _get_backend_uncached(platform)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 923, in _get_backend_uncached\n bs = backends()\n ^^^^^^^^^^\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/jax/_src/xla_bridge.py"", line 828, in backends\n raise RuntimeError(err_msg)\nRuntimeError: Unable to initialize backend 'cuda': FAILED_PRECONDITION: No visible GPU devices. (you may need to uninstall the failing plugin package, or set JAX_PLATFORMS=cpu to skip this backend.)\nsrun: error: hkn0735: task 0: Exited with exit code 1\n\n============================= JOB FEEDBACK =============================\n\nJob ID: 3548817\nCluster: hk\nUser/Group: tum_cte0515/hk-project-p0023960\nAccount: hk-project-p0023960\nState: FAILED (exit code 1)\nPartition: accelerated\nNodes: 1\nCores per node: 6\nNodelist: hkn0735\nCPU Utilized: 00:00:03\nCPU Efficiency: 1.43% of 00:03:30 core-walltime\nJob Wall-clock time: 00:00:35\nStarttime: Tue Oct 7 03:16:19 2025\nEndtime: Tue Oct 7 03:16:54 2025\nMemory Utilized: 265.33 MB\nMemory Efficiency: 0.21% of 121.46 GB (121.46 GB/node)\nEnergy Consumed: 651 Joule / 0.180833333333333 Watthours\nAverage node power draw: 18.6 Watt\n",log,tab +37,86112,"TERMINAL",0,0,"queue",,terminal_command +38,86175,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Tue Oct 7 09:48:23 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3547802 accelerat tokenize tum_cte0 R 10:46:31\t 1 hkn06353543728 accelerat preproce tum_cte0 R 1-14:10:53\t 1 hkn04253543730 accelerat preproce tum_cte0 R 1-14:10:53\t 1 hkn0632",,terminal_output +39,86918,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit",,terminal_output +40,87871,"TERMINAL",0,0,"idling",,terminal_command +41,87928,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Tue Oct 7 09:48:25 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 37 nodes idle\rPartition dev_accelerated:\t 3 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 2 nodes idle",,terminal_output +42,89012,"TERMINAL",0,0,"6",,terminal_output +43,90018,"TERMINAL",0,0,"7",,terminal_output +44,91053,"TERMINAL",0,0,"8",,terminal_output +45,92087,"TERMINAL",0,0,"9",,terminal_output +46,93165,"TERMINAL",0,0,"30",,terminal_output +47,93781,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",0,0,"",log,tab +48,94203,"TERMINAL",0,0,"1",,terminal_output +49,95215,"TERMINAL",0,0,"2",,terminal_output +50,96307,"TERMINAL",0,0,"3 4",,terminal_output +51,97068,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2977,0,"",log,selection_mouse +52,97212,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2976,5,"hkibb",log,selection_mouse +53,97365,"TERMINAL",0,0,"5",,terminal_output +54,97503,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2976,6,"hkibb.",log,selection_mouse +55,97504,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2976,13,"hkibb.hkibbi1",log,selection_mouse +56,97504,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2976,32,"hkibb.hkibbi1.hkibbi1e9.hkn0735\n",log,selection_mouse +57,97513,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2976,66,"hkibb.hkibbi1.hkibbi1e9.hkn0735\nSLURM_TOPOLOGY_ADDR_PATTERN=switch",log,selection_mouse +58,97598,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2976,67,"hkibb.hkibbi1.hkibbi1e9.hkn0735\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.",log,selection_mouse +59,97599,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2976,73,"hkibb.hkibbi1.hkibbi1e9.hkn0735\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch",log,selection_mouse +60,97699,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2976,74,"hkibb.hkibbi1.hkibbi1e9.hkn0735\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.",log,selection_mouse +61,97733,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2976,80,"hkibb.hkibbi1.hkibbi1e9.hkn0735\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch",log,selection_mouse +62,97867,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",2976,31,"hkibb.hkibbi1.hkibbi1e9.hkn0735",log,selection_mouse +63,98354,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",3005,0,"",log,selection_mouse +64,98386,"TERMINAL",0,0,"6",,terminal_output +65,99421,"TERMINAL",0,0,"7",,terminal_output +66,99855,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548891.log",0,0,"",log,tab +67,100480,"TERMINAL",0,0,"8",,terminal_output +68,101536,"TERMINAL",0,0,"9",,terminal_output +69,102574,"TERMINAL",0,0,"40",,terminal_output +70,103596,"TERMINAL",0,0,"1",,terminal_output +71,104283,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_60x80_3548817.log",0,0,"",log,tab +72,104633,"TERMINAL",0,0,"2",,terminal_output +73,105702,"TERMINAL",0,0,"3",,terminal_output +74,105954,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548891.log",0,0,"",log,tab +75,106713,"TERMINAL",0,0,"4",,terminal_output +76,107749,"TERMINAL",0,0,"5",,terminal_output +77,108141,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548890.log",0,0,"",log,tab +78,108791,"TERMINAL",0,0,"6",,terminal_output +79,109831,"TERMINAL",0,0,"7",,terminal_output +80,110875,"TERMINAL",0,0,"8",,terminal_output +81,111957,"TERMINAL",0,0,"9",,terminal_output +82,112990,"TERMINAL",0,0,"50",,terminal_output +83,114000,"TERMINAL",0,0,"1",,terminal_output +84,115024,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548890.log",3015,0,"",log,selection_mouse +85,115051,"TERMINAL",0,0,"2",,terminal_output +86,115275,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548890.log",3015,1,"h",log,selection_mouse +87,115310,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548890.log",3015,2,"hk",log,selection_mouse +88,115342,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548890.log",3015,3,"hkn",log,selection_mouse +89,115427,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548890.log",3015,4,"hkn0",log,selection_mouse +90,115432,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548890.log",3015,5,"hkn07",log,selection_mouse +91,115514,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548890.log",3015,6,"hkn073",log,selection_mouse +92,115583,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548890.log",3015,7,"hkn0735",log,selection_mouse +93,116095,"TERMINAL",0,0,"3",,terminal_output +94,117177,"TERMINAL",0,0,"4",,terminal_output +95,118167,"TERMINAL",0,0,"5",,terminal_output +96,119211,"TERMINAL",0,0,"6",,terminal_output +97,120253,"TERMINAL",0,0,"7",,terminal_output +98,121287,"TERMINAL",0,0,"8",,terminal_output +99,122084,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/dynamics_doom_120x160_3548891.log",0,0,"",log,tab +100,122324,"TERMINAL",0,0,"9",,terminal_output +101,123422,"TERMINAL",0,0,"9:01",,terminal_output +102,123952,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab +103,123953,"extension-output-pdoom-org.crowd-code-#1-crowd-code",242,0,"",Log,selection_mouse +104,124410,"TERMINAL",0,0,"2",,terminal_output +105,124795,"TERMINAL",0,0,"bash",,terminal_focus +106,125433,"TERMINAL",0,0,"3",,terminal_output +107,126496,"TERMINAL",0,0,"4 7",,terminal_output +108,127517,"TERMINAL",0,0,"5",,terminal_output +109,128557,"TERMINAL",0,0,"6",,terminal_output +110,129611,"TERMINAL",0,0,"7",,terminal_output +111,130640,"TERMINAL",0,0,"85",,terminal_output +112,131684,"TERMINAL",0,0,"9",,terminal_output +113,132710,"TERMINAL",0,0,"10",,terminal_output +114,133759,"TERMINAL",0,0,"1",,terminal_output +115,134792,"TERMINAL",0,0,"2",,terminal_output +116,135830,"TERMINAL",0,0,"3",,terminal_output +117,136876,"TERMINAL",0,0,"4",,terminal_output +118,137919,"TERMINAL",0,0,"5",,terminal_output +119,138955,"TERMINAL",0,0,"6",,terminal_output +120,139991,"TERMINAL",0,0,"7",,terminal_output +121,141098,"TERMINAL",0,0,"8",,terminal_output +122,142069,"TERMINAL",0,0,"9",,terminal_output +123,143106,"TERMINAL",0,0,"20",,terminal_output +124,144187,"TERMINAL",0,0,"1",,terminal_output +125,145198,"TERMINAL",0,0,"2",,terminal_output +126,146309,"TERMINAL",0,0,"3",,terminal_output +127,147270,"TERMINAL",0,0,"4",,terminal_output +128,148386,"TERMINAL",0,0,"5",,terminal_output +129,149366,"TERMINAL",0,0,"7",,terminal_output +130,150398,"TERMINAL",0,0,"8",,terminal_output +131,151431,"TERMINAL",0,0,"9",,terminal_output +132,152466,"TERMINAL",0,0,"30",,terminal_output +133,153097,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=dynamics_doom_60x80\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_gpu_60x80/3547697\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=doom-dynamics-60x80-$slurm_job_id \\n --image_height=60 \\n --image_width=80 \\n --tags doom dynamics maskgit default 60x80 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +134,153533,"TERMINAL",0,0,"1",,terminal_output +135,154612,"TERMINAL",0,0,"2",,terminal_output +136,155089,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",467,0,"",shellscript,selection_mouse +137,155093,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",466,0,"",shellscript,selection_command +138,155606,"TERMINAL",0,0,"3",,terminal_output +139,155973,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",449,0,"",shellscript,selection_mouse +140,155974,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",448,0,"",shellscript,selection_command +141,156658,"TERMINAL",0,0,"4",,terminal_output +142,156832,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",449,0,"\n",shellscript,content +143,157670,"TERMINAL",0,0,"5",,terminal_output +144,158628,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",450,0,"#",shellscript,content +145,158630,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",451,0,"",shellscript,selection_keyboard +146,158714,"TERMINAL",0,0,"6",,terminal_output +147,159095,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",451,0,"S",shellscript,content +148,159097,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",452,0,"",shellscript,selection_keyboard +149,159207,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",452,0,"B",shellscript,content +150,159208,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",453,0,"",shellscript,selection_keyboard +151,159384,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",453,0,"A",shellscript,content +152,159385,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",454,0,"",shellscript,selection_keyboard +153,159533,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",454,0,"T",shellscript,content +154,159534,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",455,0,"",shellscript,selection_keyboard +155,159751,"TERMINAL",0,0,"7",,terminal_output +156,160534,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",455,0,"C",shellscript,content +157,160536,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",456,0,"",shellscript,selection_keyboard +158,160627,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",456,0,"H",shellscript,content +159,160628,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",457,0,"",shellscript,selection_keyboard +160,160814,"TERMINAL",0,0,"8",,terminal_output +161,160973,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",457,0," ",shellscript,content +162,160974,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",458,0,"",shellscript,selection_keyboard +163,161267,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",458,0,"-",shellscript,content +164,161268,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",459,0,"",shellscript,selection_keyboard +165,161413,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",459,0,"-",shellscript,content +166,161414,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",460,0,"",shellscript,selection_keyboard +167,161834,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",460,0,"j",shellscript,content +168,161835,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",461,0,"",shellscript,selection_keyboard +169,161840,"TERMINAL",0,0,"9",,terminal_output +170,162047,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",461,0,"o",shellscript,content +171,162048,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",462,0,"",shellscript,selection_keyboard +172,162522,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",461,1,"",shellscript,content +173,162641,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",460,1,"",shellscript,content +174,162832,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",460,0,"e",shellscript,content +175,162833,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",461,0,"",shellscript,selection_keyboard +176,162900,"TERMINAL",0,0,"40",,terminal_output +177,163075,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",461,0,"x",shellscript,content +178,163076,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",462,0,"",shellscript,selection_keyboard +179,163321,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",462,0,"c",shellscript,content +180,163322,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",463,0,"",shellscript,selection_keyboard +181,163710,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",463,0,"l",shellscript,content +182,163711,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",464,0,"",shellscript,selection_keyboard +183,163938,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",464,0,"u",shellscript,content +184,163938,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",465,0,"",shellscript,selection_keyboard +185,163939,"TERMINAL",0,0,"1",,terminal_output +186,164004,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",465,0,"d",shellscript,content +187,164005,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",466,0,"",shellscript,selection_keyboard +188,164113,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",466,0,"e",shellscript,content +189,164114,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",467,0,"",shellscript,selection_keyboard +190,164987,"TERMINAL",0,0,"2",,terminal_output +191,165609,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",467,0,"=",shellscript,content +192,165611,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",468,0,"",shellscript,selection_keyboard +193,166041,"TERMINAL",0,0,"3",,terminal_output +194,166214,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",468,0,"[]",shellscript,content +195,166215,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",469,0,"",shellscript,selection_keyboard +196,166637,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",469,0,"hkn0735",shellscript,content +197,167055,"TERMINAL",0,0,"4",,terminal_output +198,168075,"TERMINAL",0,0,"5",,terminal_output +199,169148,"TERMINAL",0,0,"6",,terminal_output +200,169651,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=dynamics_doom_120x160\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/doom/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=doom-dynamics-120x160-$slurm_job_id \\n --image_height=120 \\n --image_width=160 \\n --tags doom dynamics maskgit default 120x160 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +201,170183,"TERMINAL",0,0,"7",,terminal_output +202,171022,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"",shellscript,tab +203,171212,"TERMINAL",0,0,"8",,terminal_output +204,172021,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",470,0,"",shellscript,selection_mouse +205,172167,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",469,0,"",shellscript,selection_command +206,172257,"TERMINAL",0,0,"9",,terminal_output +207,173274,"TERMINAL",0,0,"50",,terminal_output +208,174284,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",0,0,"",shellscript,tab +209,174328,"TERMINAL",0,0,"1",,terminal_output +210,175343,"TERMINAL",0,0,"2",,terminal_output +211,176411,"TERMINAL",0,0,"4",,terminal_output +212,176444,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",434,0,"",shellscript,selection_mouse +213,176865,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",451,0,"\n#SBATCH --exclude=[hkn0735]",shellscript,content +214,176872,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",452,0,"",shellscript,selection_command +215,177461,"TERMINAL",0,0,"5",,terminal_output +216,178498,"TERMINAL",0,0,"6",,terminal_output +217,179494,"TERMINAL",0,0,"7",,terminal_output +218,180534,"TERMINAL",0,0,"8",,terminal_output +219,181572,"TERMINAL",0,0,"9",,terminal_output +220,182609,"TERMINAL",0,0,"50:00",,terminal_output +221,183650,"TERMINAL",0,0,"1",,terminal_output +222,184693,"TERMINAL",0,0,"2",,terminal_output +223,185736,"TERMINAL",0,0,"3",,terminal_output +224,186773,"TERMINAL",0,0,"4",,terminal_output +225,187809,"TERMINAL",0,0,"5",,terminal_output +226,188847,"TERMINAL",0,0,"6",,terminal_output +227,190227,"TERMINAL",0,0,"7",,terminal_output +228,191276,"TERMINAL",0,0,"8",,terminal_output +229,192341,"TERMINAL",0,0,"9",,terminal_output +230,192677,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",479,0,"",shellscript,selection_mouse +231,192729,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",478,0,"",shellscript,selection_command +232,193348,"TERMINAL",0,0,"11",,terminal_output +233,193679,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",479,0,"",shellscript,selection_command +234,193715,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",478,1,"",shellscript,content +235,194384,"TERMINAL",0,0,"2",,terminal_output +236,194487,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",470,1,"",shellscript,content +237,195419,"TERMINAL",0,0,"3",,terminal_output +238,195534,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"",shellscript,tab +239,196458,"TERMINAL",0,0,"4",,terminal_output +240,196858,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",477,0,"",shellscript,selection_command +241,197072,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",476,1,"",shellscript,content +242,197556,"TERMINAL",0,0,"5",,terminal_output +243,197917,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",468,1,"",shellscript,content +244,198527,"TERMINAL",0,0,"6",,terminal_output +245,199565,"TERMINAL",0,0,"7",,terminal_output +246,200086,"TERMINAL",0,0,"watch",,terminal_focus +247,200605,"TERMINAL",0,0,"88",,terminal_output +248,201644,"TERMINAL",0,0,"9",,terminal_output +249,201800,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/dynamics/maskgit",,terminal_output +250,204049,"TERMINAL",0,0,"dev",,terminal_command +251,205306,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"",shellscript,tab +252,212572,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",,terminal_command +253,212581,"TERMINAL",0,0,"]633;CSubmitted batch job 3549093\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +254,214217,"TERMINAL",0,0,"queue",,terminal_command +255,214298,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Tue Oct 7 09:50:31 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3549093 accelerat dynamics tum_cte0 PD\t0:00\t 1 (Priority)3547802 accelerat tokenize tum_cte0 R 10:48:39\t 1 hkn06353543728 accelerat preproce tum_cte0 R 1-14:13:01\t 1 hkn04253543730 accelerat preproce tum_cte0 R 1-14:13:01\t 1 hkn0632",,terminal_output +256,215315,"TERMINAL",0,0,"24133",,terminal_output +257,215893,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +258,231750,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_60x80_4_patchsize\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --patch_size=4 \\n --image_height=60 \\n --image_width=80 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --name=doom-tokenizer-default-60x80-patchsize-4-$slurm_job_id \\n --tags tokenizer doom default 60x80 patchsize-4 \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val\n",shellscript,tab +259,233775,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",194,0,"",shellscript,selection_mouse +260,234513,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",310,0,"",shellscript,selection_mouse +261,235829,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",396,0,"\n#SBATCH --exclude=[hkn0735]",shellscript,content +262,235839,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",397,0,"",shellscript,selection_command +263,237222,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",424,0,"",shellscript,selection_mouse +264,237223,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",423,0,"",shellscript,selection_command +265,237789,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",471,0,"",shellscript,selection_mouse +266,237790,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",470,0,"",shellscript,selection_command +267,248209,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",504,0,"",shellscript,selection_mouse +268,250215,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"",shellscript,tab +269,251420,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",512,0,"",shellscript,selection_mouse +270,253019,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",511,0,"",shellscript,selection_command +271,253880,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",492,0,"",shellscript,selection_command +272,255464,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_lam_default_1gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=08:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/lam/%x_%j.log\n#SBATCH --job-name=train_lam_default_single_gpu_60x80\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_60x80_fixed/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n --num_latents=18 \\n --image_height=60 \\n --image_width=80 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --name=doom-lam-default-60x80-$slurm_job_id \\n --tags lam doom default 60x80 \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val\n",shellscript,tab +273,257113,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_lam_default_1gpu.sh",396,0,"",shellscript,selection_mouse +274,257627,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_lam_default_1gpu.sh",438,0,"\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout",shellscript,content +275,257629,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_lam_default_1gpu.sh",439,0,"",shellscript,selection_command +276,259578,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"",shellscript,tab +277,261271,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",550,0,"",shellscript,selection_mouse +278,261815,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,51,"# --- signal trap to requeue job before timeout ---",shellscript,selection_command +279,262048,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,67,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {",shellscript,selection_command +280,262548,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,160,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""",shellscript,selection_command +281,262592,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,207,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here",shellscript,selection_command +282,262630,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,257,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger",shellscript,selection_command +283,262631,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,292,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID",shellscript,selection_command +284,262672,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,303,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0",shellscript,selection_command +285,262723,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,305,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}",shellscript,selection_command +286,262723,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,306,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n",shellscript,selection_command +287,262768,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,331,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1",shellscript,selection_command +288,262810,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,332,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n",shellscript,selection_command +289,262811,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,377,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count",shellscript,selection_command +290,262853,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,470,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)",shellscript,selection_command +291,262894,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,471,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n",shellscript,selection_command +292,262935,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,505,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then",shellscript,selection_command +293,262935,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,547,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""",shellscript,selection_command +294,262978,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,552,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse",shellscript,selection_command +295,263020,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,591,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""",shellscript,selection_command +296,263320,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,594,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi",shellscript,selection_command +297,263488,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,595,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n",shellscript,selection_command +298,263910,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,596,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_command +299,264292,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",546,0,"",shellscript,selection_command +300,265800,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",0,0,"",shellscript,tab +301,267054,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",472,0,"",shellscript,selection_mouse +302,268433,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",472,0,"\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,content +303,268442,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",473,0,"",shellscript,selection_command +304,270157,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_lam_default_1gpu.sh",0,0,"",shellscript,tab +305,271090,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_lam_default_1gpu.sh",508,0,"",shellscript,selection_mouse +306,271714,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_lam_default_1gpu.sh",457,0,"",shellscript,selection_command +307,272528,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_lam_default_1gpu.sh",439,69,"",shellscript,content +308,273692,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",0,0,"",shellscript,tab +309,274462,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",472,0,"",shellscript,selection_mouse +310,275207,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",425,0,"",shellscript,selection_command +311,275435,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",471,0,"\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout",shellscript,content +312,275440,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",472,0,"",shellscript,selection_command +313,278538,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"",shellscript,tab +314,281603,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",0,0,"",shellscript,tab +315,284397,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"",shellscript,tab +316,288603,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2385,0,"",shellscript,selection_command +317,290742,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2370,15,"wait $child_pid",shellscript,selection_command +318,291212,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2369,16,"\nwait $child_pid",shellscript,selection_command +319,291351,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2356,29,"child_pid=$!\n\nwait $child_pid",shellscript,selection_command +320,291652,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2355,30,"\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_command +321,292560,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2355,0,"",shellscript,selection_command +322,297254,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",0,0,"",shellscript,tab +323,298966,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2164,0,"",shellscript,selection_mouse +324,299898,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2164,0,"\n\nchild_pid=$!\n\nwait $child_pid",shellscript,content +325,299943,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2165,0,"",shellscript,selection_command +326,300341,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2164,0,"",shellscript,selection_command +327,300498,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2123,0,"",shellscript,selection_command +328,300779,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2163,0,"",shellscript,selection_command +329,301305,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2163,0," ",shellscript,content +330,301306,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2164,0,"",shellscript,selection_keyboard +331,302331,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2164,0,"&",shellscript,content +332,302332,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2165,0,"",shellscript,selection_keyboard +333,302938,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2164,0,"",shellscript,selection_command +334,303123,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2166,0,"",shellscript,selection_command +335,303682,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2166,1,"",shellscript,content +336,369829,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",1854,0,"",shellscript,selection_mouse +337,369833,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",1853,0,"",shellscript,selection_command +338,374344,"TERMINAL",0,0,"queue",,terminal_command +339,374403,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Tue Oct 7 09:53:12 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3549093 accelerat dynamics tum_cte0 PD\t0:00\t 1 (Priority)3547802 accelerat tokenize tum_cte0 R 10:51:20\t 1 hkn06353543728 accelerat preproce tum_cte0 R 1-14:15:42\t 1 hkn04253543730 accelerat preproce tum_cte0 R 1-14:15:42\t 1 hkn0632",,terminal_output +340,375453,"TERMINAL",0,0,"3133",,terminal_output +341,376426,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +342,387736,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",0,0,"",shellscript,tab +343,397202,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"",shellscript,tab +344,402900,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",0,0,"",shellscript,tab +345,404390,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",0,0,"",shellscript,tab +346,405370,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",454,0,"",shellscript,selection_mouse +347,405890,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",424,0,"",shellscript,selection_mouse +348,405895,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",423,0,"",shellscript,selection_command +349,406909,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",424,0,"",shellscript,selection_command +350,407043,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",423,1,"",shellscript,content +351,407780,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",415,1,"",shellscript,content +352,413722,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",487,0,"",shellscript,selection_mouse +353,417677,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",486,0,"",shellscript,selection_command +354,433370,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",,terminal_command +355,433388,"TERMINAL",0,0,"]633;CSubmitted batch job 3549097\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +356,435047,"TERMINAL",0,0,"queue",,terminal_command +357,435133,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Tue Oct 7 09:54:12 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3549093 accelerat dynamics tum_cte0 PD\t0:00\t 1 (Priority)3549097 accelerat tokenize tum_cte0 PD\t0:00\t 1 (Priority)3547802 accelerat tokenize tum_cte0 R 10:52:20\t 1 hkn06353543730 accelerat preproce tum_cte0 R 1-14:16:42\t 1 hkn0632",,terminal_output +358,436159,"TERMINAL",0,0,"313",,terminal_output +359,437198,"TERMINAL",0,0,"424",,terminal_output +360,438263,"TERMINAL",0,0,"535",,terminal_output +361,438302,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +362,474963,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +363,475542,"TERMINAL",0,0,"ls",,terminal_command +364,475575,"TERMINAL",0,0,"]633;C",,terminal_output +365,475714,"TERMINAL",0,0,"checkpoints data data_breakout data_doom data_new logs scripts\r\ncount_items.sh data_atari data_coinrun data_minecraft huggingface possibly_corrupt_files_in_this_workspace.txt\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output +366,489121,"TERMINAL",0,0,"cd /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530",,terminal_command +367,489136,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530",,terminal_output +368,489712,"TERMINAL",0,0,"ls",,terminal_command +369,489750,"TERMINAL",0,0,"]633;C020000 040000 060000 080000 100000 120000 140000 160000 180000 200000 220000 240000 260000 280000 298000 299000 300000\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530",,terminal_output +370,505875,"TERMINAL",0,0,"rm -rf 260000",,terminal_command +371,505927,"TERMINAL",0,0,"]633;C",,terminal_output +372,507001,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530",,terminal_output +373,510047,"TERMINAL",0,0,"rm -rf 280000",,terminal_command +374,510098,"TERMINAL",0,0,"]633;C",,terminal_output +375,510989,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530",,terminal_output +376,520347,"TERMINAL",0,0,"rm -rf 298000/",,terminal_command +377,520397,"TERMINAL",0,0,"]633;C",,terminal_output +378,521441,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530",,terminal_output +379,526599,"TERMINAL",0,0,"rm -rf 299000/",,terminal_command +380,526648,"TERMINAL",0,0,"]633;C",,terminal_output +381,527766,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530",,terminal_output +382,533131,"TERMINAL",0,0,"rm -rf 300000/",,terminal_command +383,533181,"TERMINAL",0,0,"]633;C",,terminal_output +384,533840,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/train_tokenizer_default_single_node_120x160/3546530",,terminal_output +385,577845,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default_single_node_120x160\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=120 \\n --image_width=160 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --name=doom-tokenizer-default-120x160-$slurm_job_id \\n --tags tokenizer doom default 120x160 \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val",shellscript,tab +386,583832,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",849,0,"",shellscript,selection_mouse +387,583969,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",844,14,"SLURM_JOB_NAME",shellscript,selection_mouse +388,586313,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",876,0,"",shellscript,selection_mouse +389,586458,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",873,12,"SLURM_JOB_ID",shellscript,selection_mouse +390,630836,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1259,0,"",shellscript,selection_mouse +391,631478,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1308,0,"",shellscript,selection_mouse +392,632126,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",922,0,"",shellscript,selection_mouse +393,633204,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1318,0,"",shellscript,selection_mouse +394,633206,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1317,0,"",shellscript,selection_command +395,633809,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",1267,0,"",shellscript,selection_mouse +396,645377,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default_single_node_120x160\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=120 \\n --image_width=160 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --name=doom-tokenizer-default-120x160-$slurm_job_id \\n --tags tokenizer doom default 120x160 \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val",shellscript,tab +397,654338,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/doom/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default_single_node_120x160\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_120x160_fixed/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/mihir/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=120 \\n --image_width=160 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --name=doom-tokenizer-default-120x160-$slurm_job_id \\n --tags tokenizer doom default 120x160 \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val",shellscript,tab +398,656175,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",169,0,"",shellscript,selection_mouse +399,656192,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",168,0,"",shellscript,selection_command +400,657853,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",0,0,"",shellscript,tab +401,658908,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",468,0,"",shellscript,selection_mouse +402,659424,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",467,0,"",shellscript,selection_command +403,661726,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",0,0,"",shellscript,tab +404,662755,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",297,0,"",shellscript,selection_mouse +405,664171,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",396,0,"\n#SBATCH --exclude=hkn0735",shellscript,content +406,664183,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",397,0,"",shellscript,selection_command +407,664911,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",423,0,"",shellscript,selection_command +408,665411,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",486,0,"",shellscript,selection_command +409,665434,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",487,0,"",shellscript,selection_command +410,665460,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",511,0,"",shellscript,selection_command +411,665502,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",518,0,"",shellscript,selection_command +412,665529,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",519,0,"",shellscript,selection_command +413,665569,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",549,0,"",shellscript,selection_command +414,665593,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",579,0,"",shellscript,selection_command +415,665641,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",605,0,"",shellscript,selection_command +416,665673,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",606,0,"",shellscript,selection_command +417,665695,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",734,0,"",shellscript,selection_command +418,665728,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",858,0,"",shellscript,selection_command +419,665755,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",859,0,"",shellscript,selection_command +420,665797,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",860,0,"",shellscript,selection_command +421,665832,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",885,0,"",shellscript,selection_command +422,665833,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",912,0,"",shellscript,selection_command +423,665876,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",913,0,"",shellscript,selection_command +424,665917,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1036,0,"",shellscript,selection_command +425,665920,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1061,0,"",shellscript,selection_command +426,665958,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1062,0,"",shellscript,selection_command +427,666239,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1079,0,"",shellscript,selection_command +428,666743,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1080,0,"",shellscript,selection_command +429,666767,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1121,0,"",shellscript,selection_command +430,666783,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1139,0,"",shellscript,selection_command +431,666829,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1164,0,"",shellscript,selection_command +432,666988,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1139,0,"",shellscript,selection_command +433,667103,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1121,0,"",shellscript,selection_command +434,667294,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1080,0,"",shellscript,selection_command +435,667400,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1079,0,"",shellscript,selection_command +436,667536,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1062,0,"",shellscript,selection_command +437,667678,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1061,0,"",shellscript,selection_command +438,667951,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1036,0,"",shellscript,selection_command +439,668100,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",913,0,"",shellscript,selection_command +440,668254,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",912,0,"",shellscript,selection_command +441,668370,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",885,0,"",shellscript,selection_command +442,676439,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",912,0,"",shellscript,selection_mouse +443,677091,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",908,0,"",shellscript,selection_mouse +444,677485,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",911,0,"",shellscript,selection_mouse +445,677486,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",910,0,"",shellscript,selection_command +446,677745,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",911,0,"",shellscript,selection_command +447,678093,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",899,12,"",shellscript,content +448,678250,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",897,2,"",shellscript,content +449,679410,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",897,0,"=",shellscript,content +450,679411,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",898,0,"",shellscript,selection_keyboard +451,679571,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",898,0,"3546530",shellscript,content +452,695981,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1470,0,"",shellscript,selection_mouse +453,707750,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1291,0,"",shellscript,selection_mouse +454,708685,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1292,0,"",shellscript,selection_mouse +455,709706,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1292,0,"-",shellscript,content +456,709707,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1293,0,"",shellscript,selection_keyboard +457,710033,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1293,0,"r",shellscript,content +458,710034,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1294,0,"",shellscript,selection_keyboard +459,710220,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1294,0,"e",shellscript,content +460,710220,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1295,0,"",shellscript,selection_keyboard +461,710411,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1295,0,"q",shellscript,content +462,710412,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1296,0,"",shellscript,selection_keyboard +463,710495,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1296,0,"u",shellscript,content +464,710496,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1297,0,"",shellscript,selection_keyboard +465,710574,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1297,0,"e",shellscript,content +466,710575,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1298,0,"",shellscript,selection_keyboard +467,710657,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1298,0,"u",shellscript,content +468,710658,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1299,0,"",shellscript,selection_keyboard +469,710730,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1299,0,"e",shellscript,content +470,710731,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1300,0,"",shellscript,selection_keyboard +471,729914,"TERMINAL",0,0,"dev",,terminal_command +472,732820,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",,terminal_command +473,732861,"TERMINAL",0,0,"]633;CSubmitted batch job 3549139\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +474,733940,"TERMINAL",0,0,"queue",,terminal_command +475,734011,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Tue Oct 7 09:59:11 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3549093 accelerat dynamics tum_cte0 PD\t0:00\t 1 (Priority)3549097 accelerat tokenize tum_cte0 PD\t0:00\t 1 (Priority)3549139 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3547802 accelerat tokenize tum_cte0 R 10:57:19\t 1 hkn06353543730 accelerat preproce tum_cte0 R 1-14:21:41\t 1 hkn0632",,terminal_output +476,735044,"TERMINAL",0,0,"2202",,terminal_output +477,736080,"TERMINAL",0,0,"313",,terminal_output +478,737118,"TERMINAL",0,0,"424",,terminal_output +479,737538,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +480,743469,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",0,0,"",shellscript,tab +481,780170,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2119,0,"",shellscript,selection_mouse +482,780171,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2118,0,"",shellscript,selection_command +483,780850,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2164,0,"",shellscript,selection_mouse +484,781497,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2163,0,"",shellscript,selection_mouse +485,781498,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2162,0,"",shellscript,selection_command +486,782233,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2119,0,"",shellscript,selection_mouse +487,782274,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2118,0,"",shellscript,selection_command +488,783120,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2163,0,"",shellscript,selection_mouse +489,783121,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2162,0,"",shellscript,selection_command +490,783740,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2119,0,"",shellscript,selection_mouse +491,783741,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2118,0,"",shellscript,selection_command +492,784492,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2163,0,"",shellscript,selection_mouse +493,784494,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2162,0,"",shellscript,selection_command +494,785160,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2119,0,"",shellscript,selection_mouse +495,785161,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_tokenizer_default_1gpu_patch_size_4.sh",2118,0,"",shellscript,selection_command +496,798310,"TERMINAL",0,0,"scancel 3549139",,terminal_command +497,801395,"jasmine/train_tokenizer.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.90"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 30_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 1000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +498,804473,"jasmine/train_tokenizer copy.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.90"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 30_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 1000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +499,815216,"jasmine/train_tokenizer copy.py",279,0,"",python,selection_mouse +500,829654,"jasmine/train_tokenizer_full_precision.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.90"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 30_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 1000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +501,832981,"jasmine/train_tokenizer_full_precision.py",1683,0,"",python,selection_mouse +502,832982,"jasmine/train_tokenizer_full_precision.py",1682,0,"",python,selection_command +503,833406,"jasmine/train_tokenizer_full_precision.py",1683,0,"",python,selection_command +504,833641,"jasmine/train_tokenizer_full_precision.py",1682,1,"",python,content +505,833781,"jasmine/train_tokenizer_full_precision.py",1681,1,"",python,content +506,833919,"jasmine/train_tokenizer_full_precision.py",1680,1,"",python,content +507,834066,"jasmine/train_tokenizer_full_precision.py",1679,1,"",python,content +508,834212,"jasmine/train_tokenizer_full_precision.py",1678,1,"",python,content +509,834351,"jasmine/train_tokenizer_full_precision.py",1677,1,"",python,content +510,834551,"jasmine/train_tokenizer_full_precision.py",1676,1,"",python,content +511,834609,"jasmine/train_tokenizer_full_precision.py",1675,1,"",python,content +512,834859,"jasmine/train_tokenizer_full_precision.py",1675,0,"f",python,content +513,834861,"jasmine/train_tokenizer_full_precision.py",1676,0,"",python,selection_keyboard +514,835012,"jasmine/train_tokenizer_full_precision.py",1676,0,"l",python,content +515,835014,"jasmine/train_tokenizer_full_precision.py",1677,0,"",python,selection_keyboard +516,835142,"jasmine/train_tokenizer_full_precision.py",1677,0,"a",python,content +517,835144,"jasmine/train_tokenizer_full_precision.py",1678,0,"",python,selection_keyboard +518,835405,"jasmine/train_tokenizer_full_precision.py",1678,0,"t",python,content +519,835407,"jasmine/train_tokenizer_full_precision.py",1679,0,"",python,selection_keyboard +520,835756,"jasmine/train_tokenizer_full_precision.py",1678,1,"",python,content +521,835891,"jasmine/train_tokenizer_full_precision.py",1677,1,"",python,content +522,836077,"jasmine/train_tokenizer_full_precision.py",1677,0,"o",python,content +523,836079,"jasmine/train_tokenizer_full_precision.py",1678,0,"",python,selection_keyboard +524,836323,"jasmine/train_tokenizer_full_precision.py",1678,0,"a",python,content +525,836325,"jasmine/train_tokenizer_full_precision.py",1679,0,"",python,selection_keyboard +526,836576,"jasmine/train_tokenizer_full_precision.py",1679,0,"t",python,content +527,836577,"jasmine/train_tokenizer_full_precision.py",1680,0,"",python,selection_keyboard +528,837639,"jasmine/train_tokenizer_full_precision.py",1680,0,"3",python,content +529,837641,"jasmine/train_tokenizer_full_precision.py",1681,0,"",python,selection_keyboard +530,837760,"jasmine/train_tokenizer_full_precision.py",1681,0,"2",python,content +531,837761,"jasmine/train_tokenizer_full_precision.py",1682,0,"",python,selection_keyboard +532,838125,"jasmine/train_tokenizer_full_precision.py",1681,0,"",python,selection_command +533,843655,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node.sh",0,0,"",shellscript,tab +534,847604,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",0,0,"",shellscript,tab +535,849424,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1109,0,"",shellscript,selection_mouse +536,850372,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1109,0,"_",shellscript,content +537,850374,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1110,0,"",shellscript,selection_keyboard +538,850661,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1110,0,"f",shellscript,content +539,850662,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1111,0,"",shellscript,selection_keyboard +540,850836,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1111,0,"u",shellscript,content +541,850837,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1112,0,"",shellscript,selection_keyboard +542,851053,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1112,0,"l",shellscript,content +543,851054,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1113,0,"",shellscript,selection_keyboard +544,851178,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1113,0,"l",shellscript,content +545,851179,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1114,0,"",shellscript,selection_keyboard +546,851436,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1114,0,"_",shellscript,content +547,851438,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1115,0,"",shellscript,selection_keyboard +548,851764,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1115,0,"p",shellscript,content +549,851766,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1116,0,"",shellscript,selection_keyboard +550,851961,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1116,0,"r",shellscript,content +551,851962,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1117,0,"",shellscript,selection_keyboard +552,852447,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1117,0,"e",shellscript,content +553,852449,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1118,0,"",shellscript,selection_keyboard +554,852658,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1118,0,"c",shellscript,content +555,852659,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1119,0,"",shellscript,selection_keyboard +556,852841,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1119,0,"i",shellscript,content +557,852842,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1120,0,"",shellscript,selection_keyboard +558,852956,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1120,0,"s",shellscript,content +559,852957,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1121,0,"",shellscript,selection_keyboard +560,853114,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1121,0,"i",shellscript,content +561,853116,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1122,0,"",shellscript,selection_keyboard +562,853197,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1122,0,"o",shellscript,content +563,853198,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1123,0,"",shellscript,selection_keyboard +564,853357,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1123,0,"n",shellscript,content +565,853358,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",1124,0,"",shellscript,selection_keyboard +566,872119,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/doom/resolution120x160/train_tokenizer_default_1node_requeue.sh",,terminal_command +567,872125,"TERMINAL",0,0,"]633;CSubmitted batch job 3549146\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +568,874809,"TERMINAL",0,0,"queue",,terminal_command +569,874891,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Tue Oct 7 10:01:32 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3549093 accelerat dynamics tum_cte0 PD\t0:00\t 1 (Priority)3549097 accelerat tokenize tum_cte0 PD\t0:00\t 1 (Priority)3549146 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3547802 accelerat tokenize tum_cte0 R 10:59:40\t 1 hkn06353543730 accelerat preproce tum_cte0 R 1-14:24:02\t 1 hkn0632",,terminal_output +570,875944,"TERMINAL",0,0,"313",,terminal_output +571,876980,"TERMINAL",0,0,"424",,terminal_output +572,878012,"TERMINAL",0,0,"535",,terminal_output +573,878736,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +574,989719,"TERMINAL",0,0,"fqueue",,terminal_command +575,989776,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.10M %.9l %.6D %R""hkn1991.localdomain: Tue Oct 7 10:03:27 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3549093\taccelerateddynamics_doom_60x80 tum_cte0 PENDING\t 0:00 2-00:00:00\t1 (Priority)3549097\taccelerated tokenizer_60x80_4_patchsize tum_cte0 PENDING\t 0:00 2-00:00:00\t1 (Priority)3549146\taccelerated train_tokenizer_default_single tum_cte0 PENDING\t 0:00 2-00:00:00\t1 (Priority)3547802\taccelerated tokenizer_120x160_4_patch_size tum_cte0 RUNNING 11:01:35 2-00:00:00\t1 hkn06353543730\tacceleratedpreprocess_doom_50m tum_cte0 RUNNING 1-14:25:57 1-18:00:00\t1 hkn0632",,terminal_output +576,990809,"TERMINAL",0,0,"868",,terminal_output +577,991884,"TERMINAL",0,0,"979",,terminal_output +578,992878,"TERMINAL",0,0,"3086:00",,terminal_output +579,993934,"TERMINAL",0,0,"191",,terminal_output +580,994952,"TERMINAL",0,0,"2402",,terminal_output +581,995983,"TERMINAL",0,0,"313",,terminal_output +582,997043,"TERMINAL",0,0,"424",,terminal_output +583,998047,"TERMINAL",0,0,"535",,terminal_output +584,999075,"TERMINAL",0,0,"646",,terminal_output +585,1000108,"TERMINAL",0,0,"757",,terminal_output +586,1001151,"TERMINAL",0,0,"868",,terminal_output +587,1002176,"TERMINAL",0,0,"979",,terminal_output +588,1003207,"TERMINAL",0,0,"40810",,terminal_output +589,1004238,"TERMINAL",0,0,"191",,terminal_output +590,1005272,"TERMINAL",0,0,"2502",,terminal_output +591,1006305,"TERMINAL",0,0,"324",,terminal_output +592,1007341,"TERMINAL",0,0,"535",,terminal_output +593,1008374,"TERMINAL",0,0,"646",,terminal_output +594,1009412,"TERMINAL",0,0,"757",,terminal_output +595,1010446,"TERMINAL",0,0,"868",,terminal_output +596,1011480,"TERMINAL",0,0,"979",,terminal_output +597,1012511,"TERMINAL",0,0,"50820",,terminal_output +598,1013541,"TERMINAL",0,0,"191",,terminal_output +599,1014581,"TERMINAL",0,0,"22:002",,terminal_output +600,1015650,"TERMINAL",0,0,"313",,terminal_output +601,1016645,"TERMINAL",0,0,"424",,terminal_output +602,1017678,"TERMINAL",0,0,"535",,terminal_output +603,1018706,"TERMINAL",0,0,"646",,terminal_output +604,1019737,"TERMINAL",0,0,"757",,terminal_output +605,1020770,"TERMINAL",0,0,"868",,terminal_output +606,1021827,"TERMINAL",0,0,"979",,terminal_output +607,1022832,"TERMINAL",0,0,"4:00830",,terminal_output +608,1023869,"TERMINAL",0,0,"191",,terminal_output +609,1024902,"TERMINAL",0,0,"2102",,terminal_output +610,1025935,"TERMINAL",0,0,"313",,terminal_output +611,1026968,"TERMINAL",0,0,"424",,terminal_output +612,1028001,"TERMINAL",0,0,"535",,terminal_output +613,1029030,"TERMINAL",0,0,"646",,terminal_output +614,1030063,"TERMINAL",0,0,"757",,terminal_output +615,1031118,"TERMINAL",0,0,"868",,terminal_output +616,1032130,"TERMINAL",0,0,"979",,terminal_output +617,1033160,"TERMINAL",0,0,"10840",,terminal_output +618,1034189,"TERMINAL",0,0,"191",,terminal_output +619,1035224,"TERMINAL",0,0,"2202",,terminal_output +620,1036258,"TERMINAL",0,0,"313",,terminal_output +621,1037290,"TERMINAL",0,0,"424",,terminal_output +622,1038324,"TERMINAL",0,0,"546",,terminal_output +623,1039354,"TERMINAL",0,0,"757",,terminal_output +624,1040389,"TERMINAL",0,0,"868",,terminal_output +625,1041419,"TERMINAL",0,0,"979",,terminal_output +626,1042457,"TERMINAL",0,0,"20850",,terminal_output +627,1043488,"TERMINAL",0,0,"191",,terminal_output +628,1044524,"TERMINAL",0,0,"2302",,terminal_output +629,1045558,"TERMINAL",0,0,"313",,terminal_output +630,1046594,"TERMINAL",0,0,"424",,terminal_output +631,1047624,"TERMINAL",0,0,"535",,terminal_output +632,1048709,"TERMINAL",0,0,"646",,terminal_output +633,1049687,"TERMINAL",0,0,"757",,terminal_output +634,1050718,"TERMINAL",0,0,"868",,terminal_output +635,1051747,"TERMINAL",0,0,"979",,terminal_output +636,1052780,"TERMINAL",0,0,"3087:00",,terminal_output +637,1053843,"TERMINAL",0,0,"191",,terminal_output +638,1054866,"TERMINAL",0,0,"2402",,terminal_output +639,1055890,"TERMINAL",0,0,"313",,terminal_output +640,1056915,"TERMINAL",0,0,"424",,terminal_output +641,1057393,"slurm/jobs/mihir/horeka/doom/resolution120x160/train_dyn_default.sh",0,0,"",shellscript,tab +642,1057949,"TERMINAL",0,0,"535",,terminal_output +643,1058985,"TERMINAL",0,0,"646",,terminal_output +644,1060063,"TERMINAL",0,0,"757",,terminal_output +645,1061060,"TERMINAL",0,0,"868",,terminal_output +646,1062083,"TERMINAL",0,0,"979",,terminal_output +647,1062453,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"",shellscript,tab +648,1063118,"TERMINAL",0,0,"40810",,terminal_output +649,1064184,"TERMINAL",0,0,"191",,terminal_output +650,1066304,"TERMINAL",0,0,"2513",,terminal_output +651,1067339,"TERMINAL",0,0,"435",,terminal_output +652,1068370,"TERMINAL",0,0,"646",,terminal_output +653,1069428,"TERMINAL",0,0,"757",,terminal_output +654,1070436,"TERMINAL",0,0,"868",,terminal_output +655,1071469,"TERMINAL",0,0,"979",,terminal_output +656,1072034,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2122,0,"",shellscript,selection_mouse +657,1072035,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2121,0,"",shellscript,selection_command +658,1072511,"TERMINAL",0,0,"50820",,terminal_output +659,1072741,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2099,0,"",shellscript,selection_mouse +660,1072742,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2098,0,"",shellscript,selection_command +661,1073536,"TERMINAL",0,0,"191",,terminal_output +662,1074567,"TERMINAL",0,0,"23:002",,terminal_output +663,1075598,"TERMINAL",0,0,"313",,terminal_output +664,1076635,"TERMINAL",0,0,"424",,terminal_output +665,1077723,"TERMINAL",0,0,"535",,terminal_output +666,1078699,"TERMINAL",0,0,"646",,terminal_output +667,1079643,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",1995,0,"",shellscript,selection_mouse +668,1079685,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",1994,0,"",shellscript,selection_command +669,1079800,"TERMINAL",0,0,"757",,terminal_output +670,1080764,"TERMINAL",0,0,"868",,terminal_output +671,1081858,"TERMINAL",0,0,"979",,terminal_output +672,1082838,"TERMINAL",0,0,"5:00830",,terminal_output +673,1083555,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n max_noise_level: float = 0.7\n noise_buckets: int = 10\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n max_noise_level=args.max_noise_level,\n noise_buckets=args.noise_buckets,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n batch=inputs,\n seq_len=args.seq_len,\n noise_level=0.0,\n temperature=args.val_temperature,\n sample_argmax=args.val_sample_argmax,\n maskgit_steps=args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +674,1083916,"TERMINAL",0,0,"191",,terminal_output +675,1084919,"TERMINAL",0,0,"2102",,terminal_output +676,1085940,"TERMINAL",0,0,"313",,terminal_output +677,1086995,"TERMINAL",0,0,"424",,terminal_output +678,1088000,"TERMINAL",0,0,"535",,terminal_output +679,1089081,"TERMINAL",0,0,"646",,terminal_output +680,1090067,"TERMINAL",0,0,"757",,terminal_output +681,1091100,"TERMINAL",0,0,"868",,terminal_output +682,1091381,"jasmine/train_dynamics.py",1734,0,"",python,selection_mouse +683,1091522,"jasmine/train_dynamics.py",1727,11,"num_actions",python,selection_mouse +684,1092135,"TERMINAL",0,0,"979",,terminal_output +685,1093165,"TERMINAL",0,0,"10840",,terminal_output +686,1094199,"TERMINAL",0,0,"191",,terminal_output +687,1094804,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",0,0,"",shellscript,tab +688,1095275,"TERMINAL",0,0,"2202",,terminal_output +689,1096295,"TERMINAL",0,0,"313",,terminal_output +690,1097146,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",1995,0,"\n ",shellscript,content +691,1097296,"TERMINAL",0,0,"424",,terminal_output +692,1097999,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2000,0,"-",shellscript,content +693,1098001,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2001,0,"",shellscript,selection_keyboard +694,1098142,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2001,0,"-",shellscript,content +695,1098143,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2002,0,"",shellscript,selection_keyboard +696,1098332,"TERMINAL",0,0,"546",,terminal_output +697,1098485,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2002,0,"num_actions",shellscript,content +698,1099187,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2013,0,"=",shellscript,content +699,1099189,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2014,0,"",shellscript,selection_keyboard +700,1099367,"TERMINAL",0,0,"757",,terminal_output +701,1100400,"TERMINAL",0,0,"868",,terminal_output +702,1101268,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2014,0,"1",shellscript,content +703,1101269,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2015,0,"",shellscript,selection_keyboard +704,1101332,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2015,0,"8",shellscript,content +705,1101333,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2016,0,"",shellscript,selection_keyboard +706,1101449,"TERMINAL",0,0,"979",,terminal_output +707,1102056,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2016,0," ",shellscript,content +708,1102057,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2017,0,"",shellscript,selection_keyboard +709,1102226,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2017,0,"\",shellscript,content +710,1102227,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2018,0,"",shellscript,selection_keyboard +711,1102475,"TERMINAL",0,0,"20850",,terminal_output +712,1102515,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2017,0,"",shellscript,selection_command +713,1102899,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2016,0,"",shellscript,selection_command +714,1103519,"TERMINAL",0,0,"191",,terminal_output +715,1103918,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2015,1,"",shellscript,content +716,1104034,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2014,1,"",shellscript,content +717,1104184,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2014,0,"2",shellscript,content +718,1104185,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2015,0,"",shellscript,selection_keyboard +719,1104255,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2015,0,"0",shellscript,content +720,1104256,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2016,0,"",shellscript,selection_keyboard +721,1104532,"slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",2015,0,"",shellscript,selection_command +722,1104571,"TERMINAL",0,0,"2302",,terminal_output +723,1105638,"TERMINAL",0,0,"313",,terminal_output +724,1106621,"TERMINAL",0,0,"424",,terminal_output +725,1107644,"TERMINAL",0,0,"535",,terminal_output +726,1108673,"TERMINAL",0,0,"646",,terminal_output +727,1109744,"TERMINAL",0,0,"757",,terminal_output +728,1110751,"TERMINAL",0,0,"868",,terminal_output +729,1111807,"TERMINAL",0,0,"979",,terminal_output +730,1112848,"TERMINAL",0,0,"3088:00",,terminal_output +731,1113037,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +732,1116788,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/doom/resolution60x80/train_dyn_default.sh",,terminal_command +733,1116815,"TERMINAL",0,0,"]633;CSubmitted batch job 3549151\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +734,1131793,"TERMINAL",0,0,"git status",,terminal_command +735,1131843,"TERMINAL",0,0,"]633;C",,terminal_output +736,1131871,"TERMINAL",0,0,"On branch vizdoom-dataset\r\nYour branch is ahead of 'origin/vizdoom-dataset' by 1 commit.\r\n (use ""git push"" to publish your local commits)\r\n\r\nAll conflicts fixed but you are still merging.\r\n (use ""git commit"" to conclude merge)\r\n\r\nChanges to be committed:\r\n\tnew file: data/jasmine_data/atari/atari_utils.py\r\n\tnew file: data/jasmine_data/atari/generate_atari_dataset.py\r\n\tnew file: data/jasmine_data/atari/visualize_array_record.py\r\n\tnew file: data/jasmine_data/minatar/generate_minatar_breakout_dataset.py\r\n\tmodified: data/pyproject.toml\r\n\tmodified: jasmine/genie.py\r\n\tmodified: jasmine/models/dynamics.py\r\n\tmodified: jasmine/train_dynamics.py\r\n\tmodified: jasmine/train_lam.py\r\n\tmodified: jasmine/train_tokenizer.py\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: jasmine/train_lam.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\t checklist.md\r\n\tali-old-branch.diff\r\n\tdata/_vizdoom.ini\r\n\tdata/jasmine_data/ViZDoomPPO/_vizdoom.ini\r\n\tdata/jasmine_data/ViZDoomPPO/load_model_generate_dataset_fast.py\r\n\tdata/jasmine_data/ViZDoomPPO/logs/tensorboard/\r\n\tdata/jasmine_data/_vizdoom/\r\n\tdata/uv.lock\r\n\tdataset_duplicates.ipynb\r\n\tdiff.diff\r\n\tdiff2.diff\r\n\tdoom_job_starter.sh\r\n\tgifs/\r\n\tinput_pipeline/\r\n\tjasmine/train_tokenizer_full_precision.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\tmessage.md\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/\r\n\tuv.lock\r\n\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +737,1140491,"jasmine/train_tokenizer_full_precision.py",0,0,"",python,tab +738,1145904,"jasmine/train_tokenizer.py",0,0,"",python,tab +739,1150175,"TERMINAL",0,0,"clear",,terminal_command +740,1150208,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +741,1392272,"TERMINAL",0,0,"queue",,terminal_command +742,1392341,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Tue Oct 7 10:10:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3549093 accelerat dynamics tum_cte0 PD\t0:00\t 1 (Priority)3549097 accelerat tokenize tum_cte0 PD\t0:00\t 1 (Priority)3549146 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3549151 accelerat dynamics tum_cte0 PD\t0:00\t 1 (Priority)3547802 accelerat tokenize tum_cte0 R 11:08:18\t 1 hkn06353543730 accelerat preproce tum_cte0 R 1-14:32:40\t 1 hkn0632",,terminal_output +743,1393385,"TERMINAL",0,0,"1191",,terminal_output +744,1394430,"TERMINAL",0,0,"2202",,terminal_output +745,1394567,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +746,1395459,"TERMINAL",0,0,"idling",,terminal_command +747,1395514,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Tue Oct 7 10:10:13 2025Partition dev_cpuonly: 12 nodes idle\rPartition cpuonly:\t 6 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle\rPartition accelerated-h200:\t 2 nodes idle",,terminal_output +748,1396549,"TERMINAL",0,0,"4",,terminal_output +749,1397585,"TERMINAL",0,0,"5",,terminal_output +750,1398622,"TERMINAL",0,0,"6",,terminal_output +751,1399056,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9cdba2ed-e3b9-400c-aa61-3ca40652e83b1753717763365-2025_07_28-17.49.33.649/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9cdba2ed-e3b9-400c-aa61-3ca40652e83b1753717763365-2025_07_28-17.49.33.649/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..72046c7946e35d5edec2286ed18cb926ad4fe8d6 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9cdba2ed-e3b9-400c-aa61-3ca40652e83b1753717763365-2025_07_28-17.49.33.649/source.csv @@ -0,0 +1,400 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,344,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"5:49:33 PM [info] Activating crowd-code\n5:49:33 PM [info] Recording started\n5:49:33 PM [info] Initializing git provider using file system watchers...\n5:49:33 PM [error] Not a git repository: EntryNotFound (FileSystemError): Error: ENOENT: no such file or directory, stat '/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/.git'\n",Log,tab +3,2291,"extension-output-pdoom-org.crowd-code-#1-crowd-code",336,0,"5:49:35 PM [info] Retrying git provider initialization...\n5:49:35 PM [error] Not a git repository: EntryNotFound (FileSystemError): Error: ENOENT: no such file or directory, stat '/home/hk-project-p0023960/tum_cte0515/Projects/jafar_jobs/.git'\n",Log,content +4,13045,"utils/nn.py",0,0,"import math\nfrom typing import Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +5,26550,"TERMINAL",0,0,"",,terminal_focus +6,28662,"TERMINAL",0,0,"",,terminal_focus +7,34654,"TERMINAL",0,0,"ime=01:00:00 --partition=accelerated-h100 --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +8,34754,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;e1833c94-a8b3-4524-9e80-61ed159495e5]633;C]0;tum_cte0515@hkn1993:~/Projects/jafar_jobs]633;D",,terminal_output +9,72762,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\ndef _sampling_wrapper(module, batch):\n return module.sample(\n batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax\n )\n\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie))\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(params, batch)\n return generated_vid\n\n\n# --- Get video + latent actions ---\narray_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n]\ndataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n)\nvideo_batch = next(iter(dataloader))\n# Get latent actions for all videos in the batch\nbatch = dict(videos=video_batch)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\ntrue_videos = (video_batch * 255).astype(np.uint8)\npred_videos = (vid * 255).astype(np.uint8)\nvideo_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\nvideo_comparison[0] = true_videos[:, : args.seq_len]\nvideo_comparison[1] = pred_videos\nframes = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in frames]\n# Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\nfor t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +10,174652,"sample.py",4736,0,"",python,selection_mouse +11,175135,"sample.py",4885,0,"",python,selection_mouse +12,175670,"sample.py",4799,0,"",python,selection_mouse +13,176257,"sample.py",4887,0,"",python,selection_mouse +14,176617,"sample.py",5047,0,"",python,selection_mouse +15,177208,"sample.py",5054,0,"",python,selection_mouse +16,177668,"sample.py",5136,0,"",python,selection_mouse +17,178079,"sample.py",5194,0,"",python,selection_mouse +18,178390,"sample.py",5228,0,"",python,selection_mouse +19,178395,"sample.py",5227,0,"",python,selection_command +20,548314,"sample.py",0,0,"",python,tab +21,612430,"sample.py",0,0,"",python,tab +22,637465,"sample.py",3924,0,"",python,selection_mouse +23,638094,"sample.py",3883,0,"",python,selection_mouse +24,638114,"sample.py",3882,0,"",python,selection_command +25,654272,"sample.py",3883,0," ",python,content +26,654279,"sample.py",3883,0,"",python,selection_command +27,656338,"sample.py",3883,1,"",python,content +28,656376,"sample.py",3882,0,"",python,selection_command +29,656753,"sample.py",3883,0,"\n",python,content +30,657598,"sample.py",3884,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/train_dynamics_maskprob_fix_8_node/3371237",python,content +31,658806,"sample.py",3884,131,"",python,content +32,659478,"sample.py",3884,0," ",python,content +33,659502,"sample.py",3884,0,"",python,selection_command +34,660696,"sample.py",3926,0,"",python,selection_command +35,662321,"sample.py",3885,0,"",python,selection_mouse +36,662325,"sample.py",3884,0,"",python,selection_command +37,678461,"sample.py",3884,0,"video_batch = jnp.array(video_batch)\nprint(video_batch.dtype)\nvideo_batch = video_batch.astype(args.dtype) # / 255.0\nprint(video_batch.dtype)\nvideo_batch = video_batch / 255.0\nprint(video_batch.dtype)",python,content +38,679700,"sample.py",4083,0,"",python,selection_command +39,683326,"sample.py",3638,0,"",python,selection_mouse +40,684400,"utils/dataloader.py",0,0,"import jax\nimport numpy as np\nimport grain\nfrom typing import Any\nimport pickle\n\n\nclass EpisodeLengthFilter(grain.transforms.Filter):\n """"""\n A Grain Filter that keeps only episodes with sufficient length.\n """"""\n\n def __init__(self, seq_len: int, image_h: int, image_w: int, image_c: int):\n """"""Initializes the filter with sequence length requirements.""""""\n self.seq_len = seq_len\n self.image_h = image_h\n self.image_w = image_w\n self.image_c = image_c\n\n def filter(self, element: Any) -> bool:\n """"""\n Filters episodes based on length.\n\n Args:\n element: A dictionary representing one record from the DataSource.\n Expected to contain 'raw_video' (bytes) and 'sequence_length' (int)\n\n Returns:\n True if the episode has sufficient length, False otherwise.\n """"""\n assert isinstance(element, bytes)\n element = pickle.loads(element)\n\n current_episode_len = element[""sequence_length""]\n if current_episode_len < self.seq_len:\n print(\n f""Filtering out episode with length {current_episode_len}, which is ""\n f""shorter than the requested sequence length {self.seq_len}.""\n )\n return False\n\n return True\n\n\nclass ProcessEpisodeAndSlice(grain.transforms.RandomMap):\n """"""\n A Grain Transformation that combines parsing, slicing, and normalizing.\n """"""\n\n def __init__(self, seq_len: int, image_h: int, image_w: int, image_c: int):\n """"""Initializes the transformation with processing parameters.""""""\n self.seq_len = seq_len\n self.image_h = image_h\n self.image_w = image_w\n self.image_c = image_c\n\n def random_map(self, element: dict, rng: np.random.Generator) -> Any:\n """"""\n Processes a single raw episode from the data source.\n\n Args:\n element: A dictionary representing one record from the DataSource.\n Expected to contain 'raw_video' (bytes) and 'sequence_length' (int)\n rng: A per-record random number generator provided by the Grain sampler.\n\n Returns:\n A processed video sequence as a NumPy array with shape\n (seq_len, height, width, channels) and dtype float32.\n """"""\n assert isinstance(element, bytes)\n element = pickle.loads(element)\n\n video_shape = (\n element[""sequence_length""],\n self.image_h,\n self.image_w,\n self.image_c,\n )\n episode_tensor = np.frombuffer(element[""raw_video""], dtype=np.uint8)\n episode_tensor = episode_tensor.reshape(video_shape)\n\n current_episode_len = episode_tensor.shape[0]\n if current_episode_len < self.seq_len:\n raise ValueError(\n f""Episode length {current_episode_len} is shorter than ""\n f""requested sequence length {self.seq_len}. This should ""\n f""have been filtered out.""\n )\n\n max_start_idx = current_episode_len - self.seq_len\n\n start_idx = rng.integers(0, max_start_idx + 1)\n\n seq = episode_tensor[start_idx : start_idx + self.seq_len]\n\n return seq\n\n\ndef get_dataloader(\n array_record_paths: list[str],\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n num_workers: int = 1,\n prefetch_buffer_size: int = 1,\n seed: int = 42,\n):\n """"""\n Creates a data loading pipeline using Grain.\n """"""\n if not array_record_paths:\n raise ValueError(""array_record_paths list cannot be empty."")\n\n num_processes = jax.process_count()\n\n if global_batch_size % num_processes != 0:\n raise ValueError(\n f""Global batch size {global_batch_size} must be divisible by ""\n f""the number of JAX processes {num_processes} for proper sharding.""\n )\n per_process_batch_size = global_batch_size // num_processes\n\n source = grain.sources.ArrayRecordDataSource(array_record_paths)\n\n sampler = grain.samplers.IndexSampler(\n num_records=len(source),\n shard_options=grain.sharding.ShardByJaxProcess(drop_remainder=True),\n shuffle=True,\n num_epochs=None,\n seed=seed,\n )\n\n operations = [\n EpisodeLengthFilter(\n seq_len=seq_len, image_h=image_h, image_w=image_w, image_c=image_c\n ),\n ProcessEpisodeAndSlice(\n seq_len=seq_len, image_h=image_h, image_w=image_w, image_c=image_c\n ),\n grain.transforms.Batch(batch_size=per_process_batch_size, drop_remainder=True),\n ]\n\n read_options = grain.ReadOptions(\n prefetch_buffer_size=prefetch_buffer_size,\n num_threads=1,\n )\n dataloader = grain.DataLoader(\n data_source=source,\n sampler=sampler,\n operations=operations,\n worker_count=num_workers,\n worker_buffer_size=1,\n read_options=read_options,\n )\n\n return dataloader\n",python,tab +41,687591,"sample.py",0,0,"",python,tab +42,699155,"sample.py",3992,0,"",python,selection_mouse +43,699359,"sample.py",3992,2," /",python,selection_mouse +44,699375,"sample.py",3992,3," / ",python,selection_mouse +45,699391,"sample.py",3992,5," / 25",python,selection_mouse +46,699406,"sample.py",3992,7," / 255.",python,selection_mouse +47,699424,"sample.py",3992,8," / 255.0",python,selection_mouse +48,700379,"sample.py",3992,8,"",python,content +49,700396,"sample.py",3991,0,"",python,selection_command +50,700520,"sample.py",3991,1,"",python,content +51,700526,"sample.py",3990,0,"",python,selection_command +52,700698,"sample.py",3990,1,"",python,content +53,700704,"sample.py",3989,0,"",python,selection_command +54,831446,"sample.py",2895,0,"",python,selection_mouse +55,831625,"sample.py",2890,5,"kpt)\n",python,selection_mouse +56,831636,"sample.py",2886,9,"te(ckpt)\n",python,selection_mouse +57,831650,"sample.py",2801,94,"pointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +58,831668,"sample.py",2797,98,"heckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +59,831678,"sample.py",2796,99,"Checkpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +60,831703,"sample.py",2794,101,"eeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +61,831711,"sample.py",2792,103,"TreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +62,831745,"sample.py",2789,106," PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +63,831746,"sample.py",2788,107,"= PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +64,831829,"sample.py",2787,108," = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +65,831831,"sample.py",2786,109,"t = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +66,831831,"sample.py",2785,110,"pt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +67,831831,"sample.py",2784,111,"kpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +68,831911,"sample.py",2783,112,"ckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n",python,selection_mouse +69,832707,"sample.py",2783,113,"",python,content +70,833712,"sample.py",2783,0,"\n",python,content +71,834084,"sample.py",2784,0,"\ndummy_train_state = TrainState.create(\n apply_fn=genie.apply,\n params=params,\n tx=optax.adamw(\n optax.warmup_cosine_decay_schedule(\n 0, 0, 1, 2 # dummy values\n )\n ), \n)\nhandler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\nhandler_registry.add('model_state', ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler)\ncheckpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=ocp.CheckpointManagerOptions(step_format_fixed_length=6),\n handler_registry=handler_registry\n)\nabstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, dummy_train_state\n)\n\nrestored = checkpoint_manager.restore(\n args.checkpoint_step or checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n ),\n)\nrestored_train_state = restored[""model_state""]\nparams = restored_train_state.params",python,content +72,835493,"sample.py",3729,0,"",python,selection_command +73,838304,"sample.py",2784,0,"",python,selection_mouse +74,849552,"sample.py",250,0,"",python,selection_mouse +75,849558,"sample.py",249,0,"",python,selection_command +76,851771,"sample.py",201,0,"",python,selection_command +77,852002,"sample.py",202,0,"ckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n",python,content +78,852026,"sample.py",202,0,"",python,selection_command +79,853606,"sample.py",202,113,"",python,content +80,853623,"sample.py",201,0,"",python,selection_command +81,854162,"sample.py",217,0,"\n",python,content +82,854375,"sample.py",218,0,"import optax\n",python,content +83,863874,"sample.py",231,0,"from flax.training.train_state import TrainState\n",python,content +84,864734,"sample.py",279,1,"",python,content +85,865699,"sample.py",206,0,"",python,selection_mouse +86,865744,"sample.py",205,0,"",python,selection_command +87,866346,"sample.py",170,48,"",python,content +88,877297,"sample.py",182,0,"",python,selection_mouse +89,877303,"sample.py",181,0,"",python,selection_command +90,878057,"sample.py",182,0,"\n",python,content +91,878214,"sample.py",183,0,"import orbax.checkpoint as ocp\n",python,content +92,878935,"sample.py",213,1,"",python,content +93,886469,"sample.py",0,0,"",python,tab +94,1017820,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +95,1038040,"train_dynamics.py",9442,0,"",python,selection_mouse +96,1039475,"train_dynamics.py",9578,0,"",python,selection_mouse +97,1040030,"train_dynamics.py",9599,0,"",python,selection_mouse +98,1041040,"train_dynamics.py",9561,0,"",python,selection_mouse +99,1041786,"train_dynamics.py",9598,0,"",python,selection_mouse +100,1041787,"train_dynamics.py",9597,0,"",python,selection_command +101,1042107,"train_dynamics.py",9597,1,")",python,selection_mouse +102,1042112,"train_dynamics.py",9598,0,"",python,selection_command +103,1042486,"train_dynamics.py",9556,42," handler_registry=handler_registry,\n )",python,selection_mouse +104,1042521,"train_dynamics.py",9555,43," handler_registry=handler_registry,\n )",python,selection_mouse +105,1049993,"train_dynamics.py",10588,0,"",python,selection_mouse +106,1050114,"train_dynamics.py",10574,18,"checkpoint_manager",python,selection_mouse +107,1051453,"sample.py",0,0,"",python,tab +108,1055431,"sample.py",3549,0,"",python,selection_mouse +109,1056015,"sample.py",3540,0,"",python,selection_mouse +110,1056169,"sample.py",3533,15,"checkpoint_step",python,selection_mouse +111,1056463,"sample.py",3532,16,".checkpoint_step",python,selection_mouse +112,1056507,"sample.py",3528,20,"args.checkpoint_step",python,selection_mouse +113,1057117,"sample.py",3531,0,"",python,selection_mouse +114,1057118,"sample.py",3528,4,"args",python,selection_mouse +115,1057359,"sample.py",3528,5,"args.",python,selection_mouse +116,1057392,"sample.py",3528,20,"args.checkpoint_step",python,selection_mouse +117,1060630,"sample.py",3528,20,"",python,content +118,1060952,"sample.py",3528,1,"",python,content +119,1061195,"sample.py",3528,1,"",python,content +120,1061241,"sample.py",3528,1,"",python,content +121,1061419,"sample.py",3528,1,"",python,content +122,1201521,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",0,0,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer-lr-scaling/train_tokenizer_lr_sweep_1e-4\ndynamics_ckpt_dir=$1\necho $dynamics_ckpt_dir\n\nenv | grep SLURM\n\nsrun python sample.py \\n --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=10 \\n --start_frame=0 \\n --data_dir $array_records_dir\n\n# srun python sample.py \\n # --checkpoint $dynamics_ckpt_dir \\n # --start_frame=0 \\n # --batch_size=12 \\n # --seq_len=2 \\n # --data_dir $array_records_dir\n",shellscript,tab +123,1205330,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",794,0,"",shellscript,selection_mouse +124,1208070,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",793,1,"",shellscript,content +125,1208232,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",792,1,"",shellscript,content +126,1209872,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",792,0,"5",shellscript,content +127,1209874,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",793,0,"",shellscript,selection_keyboard +128,1341561,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",792,1,"",shellscript,content +129,1341666,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",792,0,"1",shellscript,content +130,1341667,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",793,0,"",shellscript,selection_keyboard +131,1342833,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",753,0,"",shellscript,selection_mouse +132,1342965,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",750,4,"4096",shellscript,selection_mouse +133,1343118,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",749,5,"=4096",shellscript,selection_mouse +134,1343136,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",711,43,"dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +135,1343170,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",684,70,"dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +136,1343286,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",662,92,"dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096",shellscript,selection_mouse +137,1343722,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",662,0,"",shellscript,selection_mouse +138,1343723,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",662,8,"dyna_dim",shellscript,selection_mouse +139,1343914,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",662,37,"dyna_dim=1024 \\n --dyna_num_blocks",shellscript,selection_mouse +140,1343934,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",662,63,"dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads",shellscript,selection_mouse +141,1343970,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",662,64,"dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=",shellscript,selection_mouse +142,1344014,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",662,66,"dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +143,1344015,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",662,67,"dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 ",shellscript,selection_mouse +144,1344051,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",662,68,"dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \",shellscript,selection_mouse +145,1344085,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",662,94,"dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +146,1344432,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",756,0,"",shellscript,selection_mouse +147,1344677,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",754,2," \",shellscript,selection_mouse +148,1344714,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",724,32,"s=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +149,1344719,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",722,34,"ads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +150,1344719,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",719,37,"_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +151,1344725,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",690,66,"um_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +152,1344768,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",686,70,"na_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +153,1344769,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",685,71,"yna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +154,1344807,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",683,73,"-dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +155,1344864,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",682,74,"--dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +156,1344864,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",681,75," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +157,1344865,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",658,98," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +158,1344902,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",657,99," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +159,1344949,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",619,137," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \",shellscript,selection_mouse +160,1345397,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",619,0,"",shellscript,selection_mouse +161,1345398,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,4," ",shellscript,selection_mouse +162,1345595,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,40," --checkpoint $dynamics_ckpt_dir \\n ",shellscript,selection_mouse +163,1345611,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,64," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n ",shellscript,selection_mouse +164,1345629,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,93," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --",shellscript,selection_mouse +165,1345654,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,131," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim",shellscript,selection_mouse +166,1345675,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,152," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len",shellscript,selection_mouse +167,1345688,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,173," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size",shellscript,selection_mouse +168,1345708,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,175," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1",shellscript,selection_mouse +169,1345720,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,177," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \",shellscript,selection_mouse +170,1345740,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,199," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \",shellscript,selection_mouse +171,1345834,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,233," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +172,1346251,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",851,0,"",shellscript,selection_mouse +173,1346419,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",834,17,"array_records_dir",shellscript,selection_mouse +174,1346604,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",817,34,"\n --data_dir $array_records_dir",shellscript,selection_mouse +175,1346631,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",795,56,"\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +176,1346663,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",793,58," \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +177,1346701,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",773,78,"\\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +178,1346702,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",770,81,"=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +179,1346733,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",763,88,"seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +180,1346734,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",737,114,"dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +181,1346811,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",710,141,"-dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +182,1346820,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",709,142,"--dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +183,1346857,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",708,143," --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +184,1346890,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",681,170," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +185,1346923,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",680,171," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +186,1346954,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",658,193," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +187,1346998,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",657,194," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +188,1347077,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",619,232," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +189,1347479,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",619,0,"",shellscript,selection_mouse +190,1347480,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,4," ",shellscript,selection_mouse +191,1347663,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,42," --checkpoint $dynamics_ckpt_dir \\n ",shellscript,selection_mouse +192,1347702,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,81," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks",shellscript,selection_mouse +193,1347702,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,107," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads",shellscript,selection_mouse +194,1347737,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,131," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim",shellscript,selection_mouse +195,1347738,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,156," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \",shellscript,selection_mouse +196,1347771,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,177," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \",shellscript,selection_mouse +197,1347803,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,199," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \",shellscript,selection_mouse +198,1347839,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,233," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +199,1348177,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",851,0,"",shellscript,selection_mouse +200,1348310,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",834,17,"array_records_dir",shellscript,selection_mouse +201,1348487,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",817,34,"\n --data_dir $array_records_dir",shellscript,selection_mouse +202,1348530,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",795,56,"\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +203,1348566,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",774,77,"\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +204,1348567,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",771,80,"2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +205,1348571,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",737,114,"dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +206,1348657,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",736,115,"-dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +207,1348689,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",735,116,"--dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +208,1348725,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",709,142,"--dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +209,1348762,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",708,143," --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +210,1348841,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",680,171," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +211,1348852,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",679,172," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +212,1348870,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",657,194," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +213,1348906,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,233," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +214,1350172,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",619,0,"",shellscript,selection_mouse +215,1350172,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,4," ",shellscript,selection_mouse +216,1350393,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,40," --checkpoint $dynamics_ckpt_dir \\n ",shellscript,selection_mouse +217,1350403,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,64," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n ",shellscript,selection_mouse +218,1350433,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,66," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --",shellscript,selection_mouse +219,1350438,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,107," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads",shellscript,selection_mouse +220,1350466,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,131," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim",shellscript,selection_mouse +221,1350557,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,154," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2",shellscript,selection_mouse +222,1350557,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,155," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 ",shellscript,selection_mouse +223,1350557,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,156," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \",shellscript,selection_mouse +224,1350612,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,177," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \",shellscript,selection_mouse +225,1350645,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,199," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \",shellscript,selection_mouse +226,1350827,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,233," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +227,1351509,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",851,0,"",shellscript,selection_mouse +228,1351729,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",834,17,"array_records_dir",shellscript,selection_mouse +229,1351973,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",817,34,"\n --data_dir $array_records_dir",shellscript,selection_mouse +230,1351990,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",814,37,"0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +231,1352001,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",813,38,"=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +232,1352025,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",781,70,"batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +233,1352110,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",763,88,"seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +234,1352117,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",737,114,"dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +235,1352149,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",711,140,"dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +236,1352228,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",684,167,"dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +237,1352233,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",683,168,"-dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +238,1352265,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",682,169,"--dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +239,1352299,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",659,192," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +240,1352377,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",658,193," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +241,1352382,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",657,194," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +242,1352421,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",618,233," --checkpoint $dynamics_ckpt_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096 \\n --seq_len=2 \\n --batch_size=1 \\n --start_frame=0 \\n --data_dir $array_records_dir",shellscript,selection_mouse +243,1511438,"sample.py",0,0,"",python,tab +244,1512708,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",0,0,"",shellscript,tab +245,1532243,"sample.py",0,0,"",python,tab +246,1538054,"sample.py",3729,0,"",python,selection_mouse +247,1555508,"sample.py",2724,0,"",python,selection_mouse +248,1557391,"sample.py",2727,0,"",python,selection_mouse +249,1557517,"sample.py",2723,7,"float32",python,selection_mouse +250,1557873,"sample.py",2722,8,".float32",python,selection_mouse +251,1558088,"sample.py",2719,11,"jnp.float32",python,selection_mouse +252,1558943,"sample.py",2720,0,"",python,selection_mouse +253,1558944,"sample.py",2719,3,"jnp",python,selection_mouse +254,1559182,"sample.py",2719,4,"jnp.",python,selection_mouse +255,1559194,"sample.py",2719,11,"jnp.float32",python,selection_mouse +256,1560674,"sample.py",2719,11,"a",python,content +257,1560676,"sample.py",2720,0,"",python,selection_keyboard +258,1560951,"sample.py",2720,0,"r",python,content +259,1560952,"sample.py",2721,0,"",python,selection_keyboard +260,1561136,"sample.py",2721,0,"g",python,content +261,1561137,"sample.py",2722,0,"",python,selection_keyboard +262,1561224,"sample.py",2722,0,"s",python,content +263,1561226,"sample.py",2723,0,"",python,selection_keyboard +264,1561387,"sample.py",2723,0,".",python,content +265,1561388,"sample.py",2724,0,"",python,selection_keyboard +266,1562325,"sample.py",2724,0,"d",python,content +267,1562326,"sample.py",2725,0,"",python,selection_keyboard +268,1562870,"sample.py",2725,0,"t",python,content +269,1562870,"sample.py",2726,0,"",python,selection_keyboard +270,1563177,"sample.py",2726,0,"y",python,content +271,1563178,"sample.py",2727,0,"",python,selection_keyboard +272,1563351,"sample.py",2727,0,"p",python,content +273,1563351,"sample.py",2728,0,"",python,selection_keyboard +274,1563459,"sample.py",2728,0,"e",python,content +275,1563459,"sample.py",2729,0,"",python,selection_keyboard +276,1564448,"sample.py",2731,0,"",python,selection_mouse +277,1564753,"sample.py",2729,2,"),",python,selection_mouse +278,1564760,"sample.py",2731,19,"\n mask_rng=_rng,",python,selection_mouse +279,1564880,"sample.py",2660,71,"zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),",python,selection_mouse +280,1564880,"sample.py",2656,75,"jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),",python,selection_mouse +281,1564884,"sample.py",2655,76,"=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),",python,selection_mouse +282,1564900,"sample.py",2649,82,"videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),",python,selection_mouse +283,1565113,"sample.py",2624,107,"dummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),",python,selection_mouse +284,1565702,"sample.py",2626,0,"",python,selection_mouse +285,1565702,"sample.py",2624,12,"dummy_inputs",python,selection_mouse +286,1565884,"sample.py",2624,23,"dummy_inputs = dict(\n ",python,selection_mouse +287,1565916,"sample.py",2624,24,"dummy_inputs = dict(\n ",python,selection_mouse +288,1565916,"sample.py",2624,120,"dummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng",python,selection_mouse +289,1565994,"sample.py",2624,128,"dummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)",python,selection_mouse +290,1566231,"sample.py",2752,0,"",python,selection_mouse +291,1566618,"sample.py",2745,7,"_rng,\n)",python,selection_mouse +292,1566637,"sample.py",2736,16,"mask_rng=_rng,\n)",python,selection_mouse +293,1566670,"sample.py",2649,103,"videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)",python,selection_mouse +294,1566765,"sample.py",2624,128,"dummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)",python,selection_mouse +295,1566806,"sample.py",2551,201,"image_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)",python,selection_mouse +296,1566903,"sample.py",2517,235,"rng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)",python,selection_mouse +297,1567458,"sample.py",2518,0,"",python,selection_mouse +298,1567458,"sample.py",2517,3,"rng",python,selection_mouse +299,1567681,"sample.py",2517,45,"rng, _rng = jax.random.split(rng)\nimage_shape",python,selection_mouse +300,1567700,"sample.py",2517,119,"rng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs",python,selection_mouse +301,1567720,"sample.py",2517,132,"rng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n ",python,selection_mouse +302,1567752,"sample.py",2517,227,"rng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng",python,selection_mouse +303,1567788,"sample.py",2517,235,"rng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)",python,selection_mouse +304,1567823,"sample.py",2517,246,"rng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)\nrng, _rng ",python,selection_mouse +305,1567856,"sample.py",2517,284,"rng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie",python,selection_mouse +306,1567889,"sample.py",2517,310,"rng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n",python,selection_mouse +307,1568001,"sample.py",2517,311,"rng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +308,1568793,"sample.py",2828,0,"",python,selection_mouse +309,1569175,"sample.py",2827,1,"\n",python,selection_mouse +310,1569209,"sample.py",2811,17,", dummy_inputs)\n\n",python,selection_mouse +311,1569247,"sample.py",2807,21,"_rng, dummy_inputs)\n\n",python,selection_mouse +312,1569280,"sample.py",2769,59,"random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +313,1569280,"sample.py",2752,76,"\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +314,1569319,"sample.py",2745,83,"_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +315,1569351,"sample.py",2744,84,"=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +316,1569352,"sample.py",2656,172,"jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +317,1569352,"sample.py",2655,173,"=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +318,1569383,"sample.py",2624,204,"dummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +319,1569525,"sample.py",2551,277,"image_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +320,1569602,"sample.py",2520,308,", _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +321,1569603,"sample.py",2517,311,"rng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=args.dtype),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\n\n",python,selection_mouse +322,1569952,"sample.py",2519,0,"",python,selection_mouse +323,1587102,"sample.py",2752,0,"",python,selection_mouse +324,1587947,"sample.py",2752,0,"u",python,content +325,1587948,"sample.py",2753,0,"",python,selection_keyboard +326,1588356,"sample.py",2753,0,"u",python,content +327,1588358,"sample.py",2754,0,"",python,selection_keyboard +328,1588945,"sample.py",2753,1,"",python,content +329,1589092,"sample.py",2752,1,"",python,content +330,1589664,"sample.py",2751,0,"",python,selection_command +331,1589815,"sample.py",3526,0," args.checkpoint_step or",python,content +332,1589816,"sample.py",2719,10,"jnp.float32",python,content +333,1589816,"sample.py",183,31,"",python,content +334,1589848,"sample.py",181,0,"",python,selection_command +335,1592180,"sample.py",183,0,"import orbax.checkpoint as ocp\n",python,content +336,1592185,"sample.py",2719,11,"args.dtype",python,content +337,1592186,"sample.py",3526,24,"",python,content +338,1595319,"sample.py",2720,0,"",python,selection_mouse +339,1596497,"sample.py",3526,0," args.checkpoint_step or",python,content +340,1597293,"sample.py",2719,10,"jnp.float32",python,content +341,1597620,"sample.py",183,31,"",python,content +342,1598176,"sample.py",183,0,"import orbax.checkpoint as ocp\n",python,content +343,1599549,"sample.py",2719,11,"args.dtype",python,content +344,1600700,"sample.py",3526,24,"",python,content +345,1601110,"sample.py",2752,0,"uu",python,content +346,1601626,"sample.py",2752,2,"",python,content +347,1602108,"sample.py",2719,10,"jnp.float32",python,content +348,1999166,"sample.py",3484,0,"",python,selection_mouse +349,1999740,"sample.py",3269,0,"",python,selection_mouse +350,2000462,"sample.py",3415,0,"",python,selection_mouse +351,2001193,"sample.py",3379,0,"",python,selection_mouse +352,2004722,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",0,0,"",shellscript,tab +353,2006170,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",700,0,"",shellscript,selection_mouse +354,2006591,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",794,0,"",shellscript,selection_mouse +355,2007314,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",744,0,"",shellscript,selection_mouse +356,2007886,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",772,0,"",shellscript,selection_mouse +357,2009255,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",743,0,"",shellscript,selection_mouse +358,2009397,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",737,12,"dyna_ffn_dim",shellscript,selection_mouse +359,2010286,"sample.py",0,0,"",python,tab +360,2013566,"sample.py",1062,0,"",python,selection_mouse +361,2013720,"sample.py",1056,11,"lam_ffn_dim",python,selection_mouse +362,2024615,"sample.py",2727,0,"",python,selection_mouse +363,2024769,"sample.py",2723,7,"float32",python,selection_mouse +364,2025005,"sample.py",2722,8,".float32",python,selection_mouse +365,2025040,"sample.py",2719,11,"jnp.float32",python,selection_mouse +366,2028574,"sample.py",2719,11,"a",python,content +367,2028575,"sample.py",2720,0,"",python,selection_keyboard +368,2028803,"sample.py",2720,0,"r",python,content +369,2028807,"sample.py",2721,0,"",python,selection_keyboard +370,2028904,"sample.py",2721,0,"g",python,content +371,2028905,"sample.py",2722,0,"",python,selection_keyboard +372,2029735,"sample.py",2719,3,"args",python,content +373,2032082,"sample.py",2723,0,".",python,content +374,2032083,"sample.py",2724,0,"",python,selection_keyboard +375,2032804,"sample.py",2724,0,"s",python,content +376,2032805,"sample.py",2725,0,"",python,selection_keyboard +377,2033140,"sample.py",2725,0,"t",python,content +378,2033142,"sample.py",2726,0,"",python,selection_keyboard +379,2033345,"sample.py",2725,1,"",python,content +380,2033512,"sample.py",2724,1,"",python,content +381,2033548,"sample.py",2724,0,"d",python,content +382,2033550,"sample.py",2725,0,"",python,selection_keyboard +383,2033785,"sample.py",2725,0,"t",python,content +384,2033785,"sample.py",2726,0,"",python,selection_keyboard +385,2034041,"sample.py",2726,0,"p",python,content +386,2034042,"sample.py",2727,0,"",python,selection_keyboard +387,2034395,"sample.py",2726,1,"",python,content +388,2034418,"sample.py",2726,0,"y",python,content +389,2034420,"sample.py",2727,0,"",python,selection_keyboard +390,2034483,"sample.py",2727,0,"p",python,content +391,2034484,"sample.py",2728,0,"",python,selection_keyboard +392,2034619,"sample.py",2728,0,"e",python,content +393,2034620,"sample.py",2729,0,"",python,selection_keyboard +394,2153137,"sample.py",2692,0,"",python,selection_mouse +395,2153277,"sample.py",2689,7,"seq_len",python,selection_mouse +396,2156276,"slurm/dev/mihir/horeka/yolo-runs/sampling_dev.sh",0,0,"",shellscript,tab +397,4512967,"utils/nn.py",0,0,"",python,tab +398,4514214,"utils/nn.py",836,0,"",python,selection_mouse +399,4514542,"utils/nn.py",927,0,"",python,selection_mouse +400,4515109,"utils/nn.py",896,0,"",python,selection_mouse diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-bebf29de-c50f-45f7-b90b-66f518a4cf1c1758196766807-2025_09_18-14.00.11.582/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-bebf29de-c50f-45f7-b90b-66f518a4cf1c1758196766807-2025_09_18-14.00.11.582/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..3d056efb0a5ac5751670039a515ec678e11f2cab --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-bebf29de-c50f-45f7-b90b-66f518a4cf1c1758196766807-2025_09_18-14.00.11.582/source.csv @@ -0,0 +1,71 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,781,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"2:00:11 PM [info] Activating crowd-code\n2:00:11 PM [info] Recording started\n2:00:11 PM [info] Initializing git provider using file system watchers...\n2:00:11 PM [info] Git repository found\n2:00:11 PM [info] Git provider initialized successfully\n2:00:11 PM [info] Initial git state: [object Object]\n",Log,tab +3,67473,"TERMINAL",0,0,"bash",,terminal_focus +4,69307,"TERMINAL",0,0,"queue",,terminal_command +5,69398,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Sep 18 14:01:20 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3501894 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3501895 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3501896 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)3501898 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +6,70446,"TERMINAL",0,0,"1",,terminal_output +7,71471,"TERMINAL",0,0,"2",,terminal_output +8,72014,"TERMINAL",0,0,"bash",,terminal_focus +9,72519,"TERMINAL",0,0,"3",,terminal_output +10,73560,"TERMINAL",0,0,"5",,terminal_output +11,73838,"TERMINAL",0,0,"watch",,terminal_focus +12,74606,"TERMINAL",0,0,"6",,terminal_output +13,75684,"TERMINAL",0,0,"7",,terminal_output +14,76588,"TERMINAL",0,0,"bash",,terminal_focus +15,76705,"TERMINAL",0,0,"8",,terminal_output +16,77780,"TERMINAL",0,0,"9",,terminal_output +17,78769,"TERMINAL",0,0,"scancel 3501898",,terminal_command +18,78780,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +19,78821,"TERMINAL",0,0,"30",,terminal_output +20,79916,"TERMINAL",0,0,"\r1",,terminal_output +21,80049,"TERMINAL",0,0,"watch",,terminal_focus +22,80596,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +23,81931,"TERMINAL",0,0,"queue",,terminal_command +24,81994,"TERMINAL",0,0,"]633;C",,terminal_output +25,82072,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Sep 18 14:01:33 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3501894 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3501895 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3501896 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +26,83171,"TERMINAL",0,0,"4",,terminal_output +27,83321,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +28,175951,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +29,176011,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +30,177065,"TERMINAL",0,0,"queue",,terminal_command +31,177170,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Sep 18 14:03:08 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3501894 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3501895 accelerat train_la tum_cte0 PD\t0:00\t 1 (Priority)3501896 accelerat train_to tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +32,178187,"TERMINAL",0,0,"9",,terminal_output +33,178743,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +34,179749,"TERMINAL",0,0,"bash",,terminal_focus +35,297446,"input_pipeline/generate_coinrun_dataset.py",0,0,"""""""\nGenerates a dataset of random-action CoinRun episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\n""""""\n\nfrom dataclasses import dataclass\n\nfrom gym3 import types_np\nimport numpy as np\nfrom procgen import ProcgenGym3Env\nimport tyro\nimport json\nimport os\nfrom utils import save_chunks\n\n\n@dataclass\nclass Args:\n num_episodes_train: int = 10000\n num_episodes_val: int = 500\n num_episodes_test: int = 500\n output_dir: str = ""data/coinrun_episodes""\n min_episode_length: int = 1000\n max_episode_length: int = 1000\n chunk_size: int = 100\n chunks_per_file: int = 100\n seed: int = 0\n\n\nargs = tyro.cli(Args)\nassert (\n args.max_episode_length >= args.min_episode_length\n), ""Maximum episode length must be greater than or equal to minimum episode length.""\n\nif args.min_episode_length < args.chunk_size:\n print(\n ""Warning: Minimum episode length is smaller than chunk size. Note that episodes shorter than the chunk size will be discarded.""\n )\n\n\n# --- Generate episodes ---\ndef generate_episodes(num_episodes, split):\n episode_idx = 0\n episode_metadata = []\n obs_chunks = []\n act_chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n while episode_idx < num_episodes:\n seed = np.random.randint(0, 10000)\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=seed)\n\n observations_seq = []\n actions_seq = []\n episode_obs_chunks = []\n episode_act_chunks = []\n\n # --- Run episode ---\n step_t = 0\n for step_t in range(args.max_episode_length):\n action = types_np.sample(env.ac_space, bshape=(env.num,))\n env.act(action)\n _, obs, first = env.observe()\n observations_seq.append(obs[""rgb""])\n actions_seq.append(action)\n if len(observations_seq) == args.chunk_size:\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n observations_seq = []\n actions_seq = []\n if first:\n break\n\n # --- Save episode ---\n if step_t + 1 >= args.min_episode_length:\n if observations_seq:\n if len(observations_seq) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(observations_seq)} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n\n obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8)\n for seq in episode_obs_chunks\n ]\n act_chunks_data = [\n np.concatenate(act, axis=0) for act in episode_act_chunks\n ]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n ep_metadata, obs_chunks, file_idx, act_chunks = save_chunks(\n obs_chunks, file_idx, args.chunks_per_file, output_dir_split, act_chunks\n )\n episode_metadata.extend(ep_metadata)\n\n print(f""Episode {episode_idx} completed, length: {step_t + 1}."")\n episode_idx += 1\n else:\n print(f""Episode too short ({step_t + 1}), resampling..."")\n\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n print(f""Done generating {split} split"")\n return episode_metadata\n\n\ndef get_action_space():\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=0)\n return env.ac_space.eltype.n\n\n\ndef main():\n # Set random seed and create dataset directories\n np.random.seed(args.seed)\n # --- Generate episodes ---\n train_episode_metadata = generate_episodes(args.num_episodes_train, ""train"")\n val_episode_metadata = generate_episodes(args.num_episodes_val, ""val"")\n test_episode_metadata = generate_episodes(args.num_episodes_test, ""test"")\n\n # --- Save metadata ---\n metadata = {\n ""env"": ""coinrun"",\n ""num_actions"": get_action_space(),\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),\n ""avg_episode_len_val"": np.mean(\n [ep[""avg_seq_len""] for ep in val_episode_metadata]\n ),\n ""avg_episode_len_test"": np.mean(\n [ep[""avg_seq_len""] for ep in test_episode_metadata]\n ),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n }\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(f""Done generating dataset."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +36,301969,"input_pipeline/generate_coinrun_dataset.py",1352,0,"",python,selection_mouse +37,303278,"input_pipeline/generate_coinrun_dataset.py",1350,0,"",python,selection_mouse +38,303463,"input_pipeline/generate_coinrun_dataset.py",1343,14,"ProcgenGym3Env",python,selection_mouse +39,306091,"input_pipeline/generate_coinrun_dataset.py",231,0,"",python,selection_mouse +40,306321,"input_pipeline/generate_coinrun_dataset.py",227,7,"procgen",python,selection_mouse +41,306897,"input_pipeline/generate_coinrun_dataset.py",184,0,"",python,selection_mouse +42,307088,"input_pipeline/generate_coinrun_dataset.py",182,4,"gym3",python,selection_mouse +43,311458,"input_pipeline/generate_coinrun_dataset.py",197,0,"",python,selection_mouse +44,311594,"input_pipeline/generate_coinrun_dataset.py",194,8,"types_np",python,selection_mouse +45,315008,"input_pipeline/generate_coinrun_dataset.py",1654,0,"",python,selection_mouse +46,315082,"input_pipeline/generate_coinrun_dataset.py",1648,8,"types_np",python,selection_mouse +47,317287,"input_pipeline/generate_coinrun_dataset.py",184,0,"",python,selection_mouse +48,317761,".venv/lib/python3.10/site-packages/gym3/__init__.py",0,0,"from gym3 import libenv, testing, types, types_np\nfrom gym3.asynchronous import AsynchronousWrapper\nfrom gym3.concat import ConcatEnv\nfrom gym3.env import Env\nfrom gym3.interactive import Interactive\nfrom gym3.interop import (\n FromBaselinesVecEnv,\n FromGymEnv,\n ToBaselinesVecEnv,\n ToGymEnv,\n vectorize_gym,\n)\nfrom gym3.subproc import SubprocEnv, SubprocError\nfrom gym3.trajectory_recorder import TrajectoryRecorderWrapper\nfrom gym3.util import call_func\nfrom gym3.video_recorder import VideoRecorderWrapper\nfrom gym3.viewer import ViewerWrapper\nfrom gym3.wrapper import Wrapper, unwrap\nfrom gym3.extract_dict_ob import ExtractDictObWrapper\n\n__all__ = [\n ""AsynchronousWrapper"",\n ""call_func"",\n ""ConcatEnv"",\n ""Env"",\n ""ExtractDictObWrapper"",\n ""FromBaselinesVecEnv"",\n ""FromGymEnv"",\n ""Interactive"",\n ""libenv"",\n ""SubprocEnv"",\n ""SubprocError"",\n ""testing"",\n ""ToBaselinesVecEnv"",\n ""ToGymEnv"",\n ""TrajectoryRecorderWrapper"",\n ""types_np"",\n ""types"",\n ""unwrap"",\n ""vectorize_gym"",\n ""VideoRecorderWrapper"",\n ""ViewerWrapper"",\n ""Wrapper"",\n ""wrappers"",\n]\n",python,tab +49,320860,".venv/lib/python3.10/site-packages/gym3/__init__.py",713,0,"",python,selection_mouse +50,320880,".venv/lib/python3.10/site-packages/gym3/__init__.py",712,0,"",python,selection_command +51,321055,".venv/lib/python3.10/site-packages/gym3/__init__.py",712,1,",",python,selection_mouse +52,321138,".venv/lib/python3.10/site-packages/gym3/__init__.py",696,16,"\n ""call_func""",python,selection_mouse +53,321139,".venv/lib/python3.10/site-packages/gym3/__init__.py",669,43,"\n ""AsynchronousWrapper"",\n ""call_func""",python,selection_mouse +54,321139,".venv/lib/python3.10/site-packages/gym3/__init__.py",657,55,"\n__all__ = [\n ""AsynchronousWrapper"",\n ""call_func""",python,selection_mouse +55,321140,".venv/lib/python3.10/site-packages/gym3/__init__.py",642,70,"tDictObWrapper\n\n__all__ = [\n ""AsynchronousWrapper"",\n ""call_func""",python,selection_mouse +56,321140,".venv/lib/python3.10/site-packages/gym3/__init__.py",713,0,"",python,selection_command +57,321184,".venv/lib/python3.10/site-packages/gym3/__init__.py",601,112,"p\nfrom gym3.extract_dict_ob import ExtractDictObWrapper\n\n__all__ = [\n ""AsynchronousWrapper"",\n ""call_func"",",python,selection_mouse +58,321230,".venv/lib/python3.10/site-packages/gym3/__init__.py",561,152,"\nfrom gym3.wrapper import Wrapper, unwrap\nfrom gym3.extract_dict_ob import ExtractDictObWrapper\n\n__all__ = [\n ""AsynchronousWrapper"",\n ""call_func"",",python,selection_mouse +59,321746,".venv/lib/python3.10/site-packages/gym3/__init__.py",561,0,"",python,selection_mouse +60,321747,".venv/lib/python3.10/site-packages/gym3/__init__.py",560,0,"",python,selection_command +61,322629,".venv/lib/python3.10/site-packages/gym3/__init__.py",657,0,"",python,selection_mouse +62,323244,".venv/lib/python3.10/site-packages/gym3/__init__.py",636,0,"",python,selection_mouse +63,329407,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +64,333462,"input_pipeline/generate_coinrun_dataset copy.py",0,0,"""""""\nGenerates a dataset of random-action CoinRun episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\n""""""\n\nfrom dataclasses import dataclass\n\nfrom gym3 import types_np\nimport numpy as np\nfrom procgen import ProcgenGym3Env\nimport tyro\nimport json\nimport os\nfrom utils import save_chunks\n\n\n@dataclass\nclass Args:\n num_episodes_train: int = 10000\n num_episodes_val: int = 500\n num_episodes_test: int = 500\n output_dir: str = ""data/coinrun_episodes""\n min_episode_length: int = 1000\n max_episode_length: int = 1000\n chunk_size: int = 100\n chunks_per_file: int = 100\n seed: int = 0\n\n\nargs = tyro.cli(Args)\nassert (\n args.max_episode_length >= args.min_episode_length\n), ""Maximum episode length must be greater than or equal to minimum episode length.""\n\nif args.min_episode_length < args.chunk_size:\n print(\n ""Warning: Minimum episode length is smaller than chunk size. Note that episodes shorter than the chunk size will be discarded.""\n )\n\n\n# --- Generate episodes ---\ndef generate_episodes(num_episodes, split):\n episode_idx = 0\n episode_metadata = []\n obs_chunks = []\n act_chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n while episode_idx < num_episodes:\n seed = np.random.randint(0, 10000)\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=seed)\n\n observations_seq = []\n actions_seq = []\n episode_obs_chunks = []\n episode_act_chunks = []\n\n # --- Run episode ---\n step_t = 0\n for step_t in range(args.max_episode_length):\n action = types_np.sample(env.ac_space, bshape=(env.num,))\n env.act(action)\n _, obs, first = env.observe()\n observations_seq.append(obs[""rgb""])\n actions_seq.append(action)\n if len(observations_seq) == args.chunk_size:\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n observations_seq = []\n actions_seq = []\n if first:\n break\n\n # --- Save episode ---\n if step_t + 1 >= args.min_episode_length:\n if observations_seq:\n if len(observations_seq) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(observations_seq)} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n\n obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8)\n for seq in episode_obs_chunks\n ]\n act_chunks_data = [\n np.concatenate(act, axis=0) for act in episode_act_chunks\n ]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n ep_metadata, obs_chunks, file_idx, act_chunks = save_chunks(\n obs_chunks, file_idx, args.chunks_per_file, output_dir_split, act_chunks\n )\n episode_metadata.extend(ep_metadata)\n\n print(f""Episode {episode_idx} completed, length: {step_t + 1}."")\n episode_idx += 1\n else:\n print(f""Episode too short ({step_t + 1}), resampling..."")\n\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n print(f""Done generating {split} split"")\n return episode_metadata\n\n\ndef get_action_space():\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=0)\n return env.ac_space.eltype.n\n\n\ndef main():\n # Set random seed and create dataset directories\n np.random.seed(args.seed)\n # --- Generate episodes ---\n train_episode_metadata = generate_episodes(args.num_episodes_train, ""train"")\n val_episode_metadata = generate_episodes(args.num_episodes_val, ""val"")\n test_episode_metadata = generate_episodes(args.num_episodes_test, ""test"")\n\n # --- Save metadata ---\n metadata = {\n ""env"": ""coinrun"",\n ""num_actions"": get_action_space(),\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),\n ""avg_episode_len_val"": np.mean(\n [ep[""avg_seq_len""] for ep in val_episode_metadata]\n ),\n ""avg_episode_len_test"": np.mean(\n [ep[""avg_seq_len""] for ep in test_episode_metadata]\n ),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n }\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(f""Done generating dataset."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +65,350130,"input_pipeline/generate_breakout_dataset.py",0,0,"""""""\nGenerates a dataset of random-action CoinRun episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\n""""""\n\nfrom dataclasses import dataclass\n\nfrom gym3 import types_np\nimport numpy as np\nfrom procgen import ProcgenGym3Env\nimport tyro\nimport json\nimport os\nfrom utils import save_chunks\n\n\n@dataclass\nclass Args:\n num_episodes_train: int = 10000\n num_episodes_val: int = 500\n num_episodes_test: int = 500\n output_dir: str = ""data/coinrun_episodes""\n min_episode_length: int = 1000\n max_episode_length: int = 1000\n chunk_size: int = 100\n chunks_per_file: int = 100\n seed: int = 0\n\n\nargs = tyro.cli(Args)\nassert (\n args.max_episode_length >= args.min_episode_length\n), ""Maximum episode length must be greater than or equal to minimum episode length.""\n\nif args.min_episode_length < args.chunk_size:\n print(\n ""Warning: Minimum episode length is smaller than chunk size. Note that episodes shorter than the chunk size will be discarded.""\n )\n\n\n# --- Generate episodes ---\ndef generate_episodes(num_episodes, split):\n episode_idx = 0\n episode_metadata = []\n obs_chunks = []\n act_chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n while episode_idx < num_episodes:\n seed = np.random.randint(0, 10000)\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=seed)\n\n observations_seq = []\n actions_seq = []\n episode_obs_chunks = []\n episode_act_chunks = []\n\n # --- Run episode ---\n step_t = 0\n for step_t in range(args.max_episode_length):\n action = types_np.sample(env.ac_space, bshape=(env.num,))\n env.act(action)\n _, obs, first = env.observe()\n observations_seq.append(obs[""rgb""])\n actions_seq.append(action)\n if len(observations_seq) == args.chunk_size:\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n observations_seq = []\n actions_seq = []\n if first:\n break\n\n # --- Save episode ---\n if step_t + 1 >= args.min_episode_length:\n if observations_seq:\n if len(observations_seq) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(observations_seq)} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n\n obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8)\n for seq in episode_obs_chunks\n ]\n act_chunks_data = [\n np.concatenate(act, axis=0) for act in episode_act_chunks\n ]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n ep_metadata, obs_chunks, file_idx, act_chunks = save_chunks(\n obs_chunks, file_idx, args.chunks_per_file, output_dir_split, act_chunks\n )\n episode_metadata.extend(ep_metadata)\n\n print(f""Episode {episode_idx} completed, length: {step_t + 1}."")\n episode_idx += 1\n else:\n print(f""Episode too short ({step_t + 1}), resampling..."")\n\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n print(f""Done generating {split} split"")\n return episode_metadata\n\n\ndef get_action_space():\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=0)\n return env.ac_space.eltype.n\n\n\ndef main():\n # Set random seed and create dataset directories\n np.random.seed(args.seed)\n # --- Generate episodes ---\n train_episode_metadata = generate_episodes(args.num_episodes_train, ""train"")\n val_episode_metadata = generate_episodes(args.num_episodes_val, ""val"")\n test_episode_metadata = generate_episodes(args.num_episodes_test, ""test"")\n\n # --- Save metadata ---\n metadata = {\n ""env"": ""coinrun"",\n ""num_actions"": get_action_space(),\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),\n ""avg_episode_len_val"": np.mean(\n [ep[""avg_seq_len""] for ep in val_episode_metadata]\n ),\n ""avg_episode_len_test"": np.mean(\n [ep[""avg_seq_len""] for ep in test_episode_metadata]\n ),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n }\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(f""Done generating dataset."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +66,489770,"input_pipeline/generate_breakout_dataset.py",1485,0,"",python,selection_mouse +67,551302,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +68,551809,"input_pipeline/generate_coinrun_dataset.py",1060,0,"",python,selection_mouse +69,552660,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,selection_command +70,553135,"input_pipeline/generate_coinrun_dataset.py",0,3,"""""""",python,selection_command +71,553385,"input_pipeline/generate_coinrun_dataset.py",0,5351,"""""""\nGenerates a dataset of random-action CoinRun episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\n""""""\n\nfrom dataclasses import dataclass\n\nfrom gym3 import types_np\nimport numpy as np\nfrom procgen import ProcgenGym3Env\nimport tyro\nimport json\nimport os\nfrom utils import save_chunks\n\n\n@dataclass\nclass Args:\n num_episodes_train: int = 10000\n num_episodes_val: int = 500\n num_episodes_test: int = 500\n output_dir: str = ""data/coinrun_episodes""\n min_episode_length: int = 1000\n max_episode_length: int = 1000\n chunk_size: int = 100\n chunks_per_file: int = 100\n seed: int = 0\n\n\nargs = tyro.cli(Args)\nassert (\n args.max_episode_length >= args.min_episode_length\n), ""Maximum episode length must be greater than or equal to minimum episode length.""\n\nif args.min_episode_length < args.chunk_size:\n print(\n ""Warning: Minimum episode length is smaller than chunk size. Note that episodes shorter than the chunk size will be discarded.""\n )\n\n\n# --- Generate episodes ---\ndef generate_episodes(num_episodes, split):\n episode_idx = 0\n episode_metadata = []\n obs_chunks = []\n act_chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n while episode_idx < num_episodes:\n seed = np.random.randint(0, 10000)\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=seed)\n\n observations_seq = []\n actions_seq = []\n episode_obs_chunks = []\n episode_act_chunks = []\n\n # --- Run episode ---\n step_t = 0\n for step_t in range(args.max_episode_length):\n action = types_np.sample(env.ac_space, bshape=(env.num,))\n env.act(action)\n _, obs, first = env.observe()\n observations_seq.append(obs[""rgb""])\n actions_seq.append(action)\n if len(observations_seq) == args.chunk_size:\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n observations_seq = []\n actions_seq = []\n if first:\n break\n\n # --- Save episode ---\n if step_t + 1 >= args.min_episode_length:\n if observations_seq:\n if len(observations_seq) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(observations_seq)} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n\n obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8)\n for seq in episode_obs_chunks\n ]\n act_chunks_data = [\n np.concatenate(act, axis=0) for act in episode_act_chunks\n ]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n ep_metadata, obs_chunks, file_idx, act_chunks = save_chunks(\n obs_chunks, file_idx, args.chunks_per_file, output_dir_split, act_chunks\n )\n episode_metadata.extend(ep_metadata)\n\n print(f""Episode {episode_idx} completed, length: {step_t + 1}."")\n episode_idx += 1\n else:\n print(f""Episode too short ({step_t + 1}), resampling..."")\n\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n print(f""Done generating {split} split"")\n return episode_metadata\n\n\ndef get_action_space():\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=0)\n return env.ac_space.eltype.n\n\n\ndef main():\n # Set random seed and create dataset directories\n np.random.seed(args.seed)\n # --- Generate episodes ---\n train_episode_metadata = generate_episodes(args.num_episodes_train, ""train"")\n val_episode_metadata = generate_episodes(args.num_episodes_val, ""val"")\n test_episode_metadata = generate_episodes(args.num_episodes_test, ""test"")\n\n # --- Save metadata ---\n metadata = {\n ""env"": ""coinrun"",\n ""num_actions"": get_action_space(),\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),\n ""avg_episode_len_val"": np.mean(\n [ep[""avg_seq_len""] for ep in val_episode_metadata]\n ),\n ""avg_episode_len_test"": np.mean(\n [ep[""avg_seq_len""] for ep in test_episode_metadata]\n ),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n }\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(f""Done generating dataset."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,selection_command diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-d4ecca31-879c-4879-b2a7-b7463e4327b91757416440874-2025_09_09-13.15.15.617/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-d4ecca31-879c-4879-b2a7-b7463e4327b91757416440874-2025_09_09-13.15.15.617/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..6cb954a838f5a589391ebf8b032eee26c70a526b --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-d4ecca31-879c-4879-b2a7-b7463e4327b91757416440874-2025_09_09-13.15.15.617/source.csv @@ -0,0 +1,7 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,1249,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"1:15:15 PM [info] Activating crowd-code\n1:15:15 PM [info] Recording started\n1:15:15 PM [info] Initializing git provider using file system watchers...\n1:15:16 PM [info] Git repository found\n1:15:16 PM [info] Git provider initialized successfully\n1:15:16 PM [info] Initial git state: [object Object]\n",Log,tab +3,436098,"train_lam.py",0,0,"from dataclasses import dataclass, field\nimport os\nfrom typing import cast\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(\n model: LatentActionModel, inputs: dict\n) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n # --- Compute loss ---\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n model.train()\n outputs = model(inputs, training=True)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n gt_future_frames = gt[:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt, recon)).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@nnx.jit\ndef train_step(\n lam: LatentActionModel,\n optimizer: nnx.Optimizer,\n inputs: dict,\n action_last_active: jax.Array,\n rng: jax.Array,\n) -> tuple[jax.Array, jax.Array, jax.Array, dict]:\n def loss_fn(\n model: LatentActionModel,\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n return lam_loss_fn(model, inputs)\n\n # --- Update model ---\n (loss, (recon, idx_counts, metrics)), grads = nnx.value_and_grad(\n loss_fn, has_aux=True\n )(lam)\n optimizer.update(grads)\n\n # --- Reset inactive latent actions ---\n codebook = lam.vq.codebook\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook.value\n )\n lam.vq.codebook.value = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return loss, recon, action_last_active, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n )\n\n # Count parameters\n _, params, _ = nnx.split(lam, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n optimizer = nnx.Optimizer(lam, tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n\n # --- TRAIN LOOP ---\n dataloader = (\n jax.make_array_from_process_local_data(videos_sharding, elem)\n for elem in grain_iterator\n )\n print(f""Starting training from step {step}..."")\n action_last_active = jnp.zeros(args.num_latents, dtype=jnp.int32)\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n inputs = dict(videos=videos, rng=_rng)\n rng, _rng = jax.random.split(rng)\n loss, recon, action_last_active, metrics = train_step(\n lam, optimizer, inputs, action_last_active, _rng\n )\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n grain_iterator # type: ignore\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +4,440483,"train_lam.py",2637,0,"",python,selection_mouse +5,441103,"train_lam.py",2648,0,"",python,selection_mouse +6,441819,"train_lam.py",2673,0,"",python,selection_mouse +7,443158,"train_lam.py",2683,0,"",python,selection_mouse